lang
stringclasses
3 values
file_path
stringlengths
5
150
repo_name
stringlengths
6
110
commit
stringlengths
40
40
file_code
stringlengths
1.52k
18.9k
prefix
stringlengths
82
16.5k
suffix
stringlengths
0
15.1k
middle
stringlengths
121
8.18k
strategy
stringclasses
8 values
context_items
listlengths
0
100
Rust
src/bin/main.rs
DanielJoyce/websocket-serial-server
27eee51e364d8852ad4f995272a30241d2ac6ab9
#[macro_use] extern crate log; use std::io::Write; use std::net::TcpStream; use std::sync::mpsc::{channel, Sender}; use std::thread; use hyper::net::Fresh; use hyper::server::request::Request; use hyper::server::response::Response; use hyper::Server as HttpServer; use rand::{thread_rng, Rng}; use websocket::client::Writer; use websocket::message::Type; use websocket::result::WebSocketError; use websocket::server::upgrade::WsUpgrade; use websocket::{Message, Server}; use lib::cfg::*; use lib::dynamic_sleep::DynamicSleep; use lib::errors as e; use lib::manager::Manager; use lib::messages::*; pub const MAX_SEND_ERROR_COUNT: u32 = 5; pub fn main() { env_logger::init().expect("Initialization of logging system failed!"); let cfg = WsssConfig::load(); let websocket_html = include_str!("websockets.html").replace( "__WS_PORT__ = 8081", &format!("__WS_PORT__ = {}", cfg.ws_port), ); let http_handler = move |_: Request, response: Response<Fresh>| { let mut response = response.start().expect(&"Could not start response"); response .write_all(websocket_html.as_bytes()) .expect(&"Could not get template as bytes"); response.end().expect(&"Send response failed"); }; info!("Using ports {} {}", cfg.http_port, cfg.ws_port); let (sub_tx, sub_rx) = channel::<SubscriptionRequest>(); let (sreq_tx, sreq_rx) = channel::<(String, SerialRequest)>(); Manager::spawn(sreq_rx, sub_rx); let http_server = HttpServer::http(format!("{}:{}", cfg.bind_address, cfg.http_port)).expect( &format!("Failed to create http server on port {}", cfg.http_port), ); thread::spawn(move || { http_server.handle(http_handler).expect(&"Failed to listen"); }); let ws_server = Server::bind(format!("{}:{}", cfg.bind_address, cfg.ws_port)) .expect(&format!("Failed bind on websocket port {}", cfg.ws_port)); for connection in ws_server.filter_map(Result::ok) { let prefix: String = thread_rng().gen_ascii_chars().take(8).collect(); let sub_id = format!("thread-{}-{}", prefix, rand::random::<u16>()); debug!("{}: spawned.", sub_id); let sub_tx_clone = sub_tx.clone(); let sreq_tx_clone = sreq_tx.clone(); spawn_ws_handler(sub_id, sub_tx_clone, sreq_tx_clone, connection); } } fn spawn_ws_handler( sub_id: String, sub_tx_clone: Sender<SubscriptionRequest>, sreq_tx_clone: Sender<(String, SerialRequest)>, connection: WsUpgrade<TcpStream>, ) { thread::spawn(move || ws_handler(sub_id, &sub_tx_clone, &sreq_tx_clone, connection)); } fn ws_handler( sub_id: String, sub_tx: &Sender<SubscriptionRequest>, sreq_tx: &Sender<(String, SerialRequest)>, connection: WsUpgrade<TcpStream>, ) { if !connection .protocols() .contains(&"websocket-serial-json".to_string()) { connection.reject().expect(&"Connection rejection failed."); return; } connection .tcp_stream() .set_nonblocking(true) .expect(&"Setting stream non-blocking failed."); let (sub_resp_tx, sub_resp_rx) = channel::<SerialResponse>(); sub_tx .send(SubscriptionRequest { sub_id: sub_id.clone(), subscriber: sub_resp_tx, }) .expect(&format!("{}: Registering with manager failed.", sub_id)); let client = connection .use_protocol(format!("websocket-serial-json")) .accept() .expect(&format!("{}: Accept protocol failed.", sub_id)); let ip = client .peer_addr() .expect(&format!("{}: Could not get peer address", sub_id)); info!("{}: Connection from {}", sub_id, ip); let (mut receiver, mut sender) = client .split() .expect(&format!("{}: WS client error", sub_id)); let mut send_error_count = 0; let mut dynamic_sleep = DynamicSleep::new("main"); 'msg_loop: loop { dynamic_sleep.sleep(); match receiver.recv_message::<Message, _, _>() { Ok(message) => { match message.opcode { Type::Close => { let message = Message::close(); sender .send_message(&message) .unwrap_or(info!("{}: Client {} hung up!", sub_id, ip)); sreq_tx .send((sub_id.clone(), SerialRequest::Close { port: None })) .unwrap_or_else(|e| { warn!( "Client exit cleanup failed for sub_id '{}', cause '{}'", sub_id, e ) }); info!("{}: Client {} disconnected", sub_id, ip); break 'msg_loop; } Type::Ping => { let message = Message::pong(message.payload); sender .send_message(&message) .unwrap_or(info!("{}: Could not ping client {}!", sub_id, ip)); } _ => { let msg = String::from_utf8_lossy(&message.payload); match serde_json::from_str(&msg) { Ok(req) => { match sreq_tx.send((sub_id.clone(), req)) { Err(err) => { let error = e::ErrorKind::SendRequest(err).into(); send_serial_response_error(&sub_id, &mut sender, error); } _ => {} }; } Err(err) => { let error = e::ErrorKind::Json(err).into(); send_serial_response_error(&sub_id, &mut sender, error); } }; } }; } Err(e) => { match e { WebSocketError::NoDataAvailable => { /*Logging?*/ } _ => { /*Logging?*/ } }; } } match sub_resp_rx.try_recv() { Ok(resp) => match serde_json::to_string(&resp) { Ok(json) => { let reply = Message::text(json.clone()); sender.send_message(&reply).unwrap_or_else(|e| { send_error_count += 1; info!( "{}: Could not send message '{}' to client '{}', cause '{}'", sub_id, json, ip, e ) }); } Err(_) => {} }, _ => { /*Logging*/ } }; if send_error_count > MAX_SEND_ERROR_COUNT { warn!( "{}: Client send error count exceeded! Shutting down msg loop.", &sub_id ); break 'msg_loop; } } info!("{}: Shutting down!", sub_id); } fn send_serial_response_error(sub_id: &String, sender: &mut Writer<TcpStream>, error: e::Error) { let error = e::to_serial_response_error(error); serde_json::to_string(&error) .map_err(|err| e::ErrorKind::Json(err)) .map(|json| Message::text(json)) .map(|msg| { sender .send_message::<Message, _>(&msg) .map_err::<e::Error, _>(|err| e::ErrorKind::SendWsMessage(err).into()) }) .unwrap_or_else(|_| { warn!("{}: Problem sending bad json error response", sub_id); Ok(()) }) .is_ok(); }
#[macro_use] extern crate log; use std::io::Write; use std::net::TcpStream; use std::sync::mpsc::{channel, Sender};
pStream>, ) { thread::spawn(move || ws_handler(sub_id, &sub_tx_clone, &sreq_tx_clone, connection)); } fn ws_handler( sub_id: String, sub_tx: &Sender<SubscriptionRequest>, sreq_tx: &Sender<(String, SerialRequest)>, connection: WsUpgrade<TcpStream>, ) { if !connection .protocols() .contains(&"websocket-serial-json".to_string()) { connection.reject().expect(&"Connection rejection failed."); return; } connection .tcp_stream() .set_nonblocking(true) .expect(&"Setting stream non-blocking failed."); let (sub_resp_tx, sub_resp_rx) = channel::<SerialResponse>(); sub_tx .send(SubscriptionRequest { sub_id: sub_id.clone(), subscriber: sub_resp_tx, }) .expect(&format!("{}: Registering with manager failed.", sub_id)); let client = connection .use_protocol(format!("websocket-serial-json")) .accept() .expect(&format!("{}: Accept protocol failed.", sub_id)); let ip = client .peer_addr() .expect(&format!("{}: Could not get peer address", sub_id)); info!("{}: Connection from {}", sub_id, ip); let (mut receiver, mut sender) = client .split() .expect(&format!("{}: WS client error", sub_id)); let mut send_error_count = 0; let mut dynamic_sleep = DynamicSleep::new("main"); 'msg_loop: loop { dynamic_sleep.sleep(); match receiver.recv_message::<Message, _, _>() { Ok(message) => { match message.opcode { Type::Close => { let message = Message::close(); sender .send_message(&message) .unwrap_or(info!("{}: Client {} hung up!", sub_id, ip)); sreq_tx .send((sub_id.clone(), SerialRequest::Close { port: None })) .unwrap_or_else(|e| { warn!( "Client exit cleanup failed for sub_id '{}', cause '{}'", sub_id, e ) }); info!("{}: Client {} disconnected", sub_id, ip); break 'msg_loop; } Type::Ping => { let message = Message::pong(message.payload); sender .send_message(&message) .unwrap_or(info!("{}: Could not ping client {}!", sub_id, ip)); } _ => { let msg = String::from_utf8_lossy(&message.payload); match serde_json::from_str(&msg) { Ok(req) => { match sreq_tx.send((sub_id.clone(), req)) { Err(err) => { let error = e::ErrorKind::SendRequest(err).into(); send_serial_response_error(&sub_id, &mut sender, error); } _ => {} }; } Err(err) => { let error = e::ErrorKind::Json(err).into(); send_serial_response_error(&sub_id, &mut sender, error); } }; } }; } Err(e) => { match e { WebSocketError::NoDataAvailable => { /*Logging?*/ } _ => { /*Logging?*/ } }; } } match sub_resp_rx.try_recv() { Ok(resp) => match serde_json::to_string(&resp) { Ok(json) => { let reply = Message::text(json.clone()); sender.send_message(&reply).unwrap_or_else(|e| { send_error_count += 1; info!( "{}: Could not send message '{}' to client '{}', cause '{}'", sub_id, json, ip, e ) }); } Err(_) => {} }, _ => { /*Logging*/ } }; if send_error_count > MAX_SEND_ERROR_COUNT { warn!( "{}: Client send error count exceeded! Shutting down msg loop.", &sub_id ); break 'msg_loop; } } info!("{}: Shutting down!", sub_id); } fn send_serial_response_error(sub_id: &String, sender: &mut Writer<TcpStream>, error: e::Error) { let error = e::to_serial_response_error(error); serde_json::to_string(&error) .map_err(|err| e::ErrorKind::Json(err)) .map(|json| Message::text(json)) .map(|msg| { sender .send_message::<Message, _>(&msg) .map_err::<e::Error, _>(|err| e::ErrorKind::SendWsMessage(err).into()) }) .unwrap_or_else(|_| { warn!("{}: Problem sending bad json error response", sub_id); Ok(()) }) .is_ok(); }
use std::thread; use hyper::net::Fresh; use hyper::server::request::Request; use hyper::server::response::Response; use hyper::Server as HttpServer; use rand::{thread_rng, Rng}; use websocket::client::Writer; use websocket::message::Type; use websocket::result::WebSocketError; use websocket::server::upgrade::WsUpgrade; use websocket::{Message, Server}; use lib::cfg::*; use lib::dynamic_sleep::DynamicSleep; use lib::errors as e; use lib::manager::Manager; use lib::messages::*; pub const MAX_SEND_ERROR_COUNT: u32 = 5; pub fn main() { env_logger::init().expect("Initialization of logging system failed!"); let cfg = WsssConfig::load(); let websocket_html = include_str!("websockets.html").replace( "__WS_PORT__ = 8081", &format!("__WS_PORT__ = {}", cfg.ws_port), ); let http_handler = move |_: Request, response: Response<Fresh>| { let mut response = response.start().expect(&"Could not start response"); response .write_all(websocket_html.as_bytes()) .expect(&"Could not get template as bytes"); response.end().expect(&"Send response failed"); }; info!("Using ports {} {}", cfg.http_port, cfg.ws_port); let (sub_tx, sub_rx) = channel::<SubscriptionRequest>(); let (sreq_tx, sreq_rx) = channel::<(String, SerialRequest)>(); Manager::spawn(sreq_rx, sub_rx); let http_server = HttpServer::http(format!("{}:{}", cfg.bind_address, cfg.http_port)).expect( &format!("Failed to create http server on port {}", cfg.http_port), ); thread::spawn(move || { http_server.handle(http_handler).expect(&"Failed to listen"); }); let ws_server = Server::bind(format!("{}:{}", cfg.bind_address, cfg.ws_port)) .expect(&format!("Failed bind on websocket port {}", cfg.ws_port)); for connection in ws_server.filter_map(Result::ok) { let prefix: String = thread_rng().gen_ascii_chars().take(8).collect(); let sub_id = format!("thread-{}-{}", prefix, rand::random::<u16>()); debug!("{}: spawned.", sub_id); let sub_tx_clone = sub_tx.clone(); let sreq_tx_clone = sreq_tx.clone(); spawn_ws_handler(sub_id, sub_tx_clone, sreq_tx_clone, connection); } } fn spawn_ws_handler( sub_id: String, sub_tx_clone: Sender<SubscriptionRequest>, sreq_tx_clone: Sender<(String, SerialRequest)>, connection: WsUpgrade<Tc
random
[ { "content": "//! The serial support library contains all\n\n//! the functionality to read ports, and send data\n\n//! between threads reading serial port data\n\n//! and threads handling websocket requests\n\n\n\n#![recursion_limit = \"1024\"]\n\n#![allow(dead_code)]\n\n#![allow(unused_variables)]\n\nextern cr...
Rust
src/shader/service.rs
vojd/skuggbox
724ddb025623345f634634f4ea70cb022fc20e2b
use log::{error, info}; use std::path::PathBuf; use crate::shader::VERTEX_SHADER; use crate::shader::{find_included_files, PreProcessor, Shader, ShaderError}; use crate::uniforms::{read_uniforms, Uniform}; use crate::utils::cstr_with_len; pub fn create_program(fragment_src: String) -> Result<ShaderProgram, ShaderError> { let vertex_shader = Shader::from_source(String::from(VERTEX_SHADER), gl::VERTEX_SHADER)?; let frag_shader = Shader::from_source(fragment_src, gl::FRAGMENT_SHADER)?; info!( "Creating shader program: {} {}", vertex_shader.id, frag_shader.id ); Ok(ShaderProgram::new(vertex_shader, frag_shader)) } pub struct ShaderProgram { pub id: gl::types::GLuint, } impl ShaderProgram { pub fn new(vert_shader: Shader, frag_shader: Shader) -> Self { let id = unsafe { gl::CreateProgram() }; unsafe { gl::AttachShader(id, vert_shader.id); gl::AttachShader(id, frag_shader.id); gl::LinkProgram(id); } let mut success: gl::types::GLint = 1; unsafe { gl::GetProgramiv(id, gl::LINK_STATUS, &mut success); } if success == 0 { let mut len: gl::types::GLint = 0; unsafe { gl::GetProgramiv(id, gl::INFO_LOG_LENGTH, &mut len); } let error = cstr_with_len(len as usize); unsafe { gl::GetProgramInfoLog( id, len, std::ptr::null_mut(), error.as_ptr() as *mut gl::types::GLchar, ); } error!("linker error {}", error.to_string_lossy()); panic!("linker error"); } unsafe { gl::DetachShader(id, vert_shader.id); gl::DetachShader(id, frag_shader.id); } Self { id } } } impl Drop for ShaderProgram { fn drop(&mut self) { unsafe { gl::DeleteProgram(self.id); } } } pub struct ShaderService { pre_processor: Box<PreProcessor>, fs: PathBuf, pub program: Option<ShaderProgram>, pub files: Vec<PathBuf>, pub use_camera_integration: bool, uniforms: Vec<Uniform>, } impl ShaderService { pub fn new(fs: PathBuf) -> Self { let mut pre_processor = PreProcessor::new(fs.clone()); pre_processor.reload(); let program = create_program(pre_processor.shader_src.clone()).unwrap(); let files = if let Some(f) = find_included_files(fs.clone()) { vec![fs.clone(), f.iter().collect()] } else { vec![fs.clone()] }; Self { pre_processor: pre_processor.into(), fs, program: Some(program), files, use_camera_integration: false, uniforms: vec![], } } pub fn reload(&mut self) { self.pre_processor.use_camera_integration = self.use_camera_integration; self.pre_processor.reload(); match create_program(self.pre_processor.shader_src.clone()) { Ok(new_program) => { self.program = Some(new_program); self.uniforms = read_uniforms(self.fs.clone()); info!("Shader recreated without errors") } _ => { error!("Compilation failed - not binding failed program"); } }; } }
use log::{error, info}; use std::path::PathBuf; use crate::shader::VERTEX_SHADER; use crate::shader::{find_included_files, PreProcessor, Shader, ShaderError}; use crate::uniforms::{read_uniforms, Uniform}; use crate::utils::cstr_with_len; pub fn create_program(fragment_src: String) -> Result<ShaderProgram, ShaderError> { let vertex_shader = Shader::from_source(String::from(VERTEX_SHADER), gl::VERTEX_SHADER)?; let frag_shader = Shader::from_source(fragment_src, gl::FRAGMENT_SHADER)?; info!( "Creating shader program: {} {}", vertex_shader.id, frag_shader.id ); Ok(ShaderProgram::new(vertex_shader, frag_shader)) } pub struct ShaderProgram { pub id: gl::types::GLuint, } impl ShaderProgram { pub fn new(vert_shader: Shader, frag_shader: Shader) -> Self { let id = unsafe { gl::CreateProgram() }; unsafe { gl::AttachShader(id, vert_shader.id); gl::AttachShader(id, frag_shader.id); gl::LinkProgram(id); } let mut success: gl::types::GLint = 1; unsafe { gl::GetProgramiv(id, gl::LINK_STATUS, &mut success); } if success == 0 { let mut len: gl::types::GLint = 0; unsafe { gl::GetProgramiv(id, gl::INFO_LOG_LENGTH, &mut len); } let error = cstr_with_len(len as usize); unsafe { gl::GetProgramInfoLog( id, len, std::ptr::null_mut(), error.as_ptr() as *mut gl::types::GLchar, ); } error!("linker error {}", error.to_string_lossy()); panic!("linker error"); } unsafe { gl::DetachShader(id, vert
= self.use_camera_integration; self.pre_processor.reload(); match create_program(self.pre_processor.shader_src.clone()) { Ok(new_program) => { self.program = Some(new_program); self.uniforms = read_uniforms(self.fs.clone()); info!("Shader recreated without errors") } _ => { error!("Compilation failed - not binding failed program"); } }; } }
_shader.id); gl::DetachShader(id, frag_shader.id); } Self { id } } } impl Drop for ShaderProgram { fn drop(&mut self) { unsafe { gl::DeleteProgram(self.id); } } } pub struct ShaderService { pre_processor: Box<PreProcessor>, fs: PathBuf, pub program: Option<ShaderProgram>, pub files: Vec<PathBuf>, pub use_camera_integration: bool, uniforms: Vec<Uniform>, } impl ShaderService { pub fn new(fs: PathBuf) -> Self { let mut pre_processor = PreProcessor::new(fs.clone()); pre_processor.reload(); let program = create_program(pre_processor.shader_src.clone()).unwrap(); let files = if let Some(f) = find_included_files(fs.clone()) { vec![fs.clone(), f.iter().collect()] } else { vec![fs.clone()] }; Self { pre_processor: pre_processor.into(), fs, program: Some(program), files, use_camera_integration: false, uniforms: vec![], } } pub fn reload(&mut self) { self.pre_processor.use_camera_integration
random
[ { "content": "pub fn cstr_with_len(len: usize) -> CString {\n\n let mut buffer: Vec<u8> = Vec::with_capacity(len + 1);\n\n buffer.extend([b' '].iter().cycle().take(len));\n\n unsafe { CString::from_vec_unchecked(buffer) }\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 1, "score": 136967....
Rust
libsplinter/src/peer/notification.rs
rbuysse/splinter
1864eb39be8c44f910dc0ce79693fea7f136fd5a
use std::collections::{HashMap, VecDeque}; use std::sync::mpsc::{Receiver, TryRecvError}; use super::error::PeerManagerError; use super::PeerTokenPair; #[derive(Debug, PartialEq, Clone)] pub enum PeerManagerNotification { Connected { peer: PeerTokenPair }, Disconnected { peer: PeerTokenPair }, } pub struct PeerNotificationIter { pub(super) recv: Receiver<PeerManagerNotification>, } impl PeerNotificationIter { pub fn try_next(&self) -> Result<Option<PeerManagerNotification>, PeerManagerError> { match self.recv.try_recv() { Ok(notifications) => Ok(Some(notifications)), Err(TryRecvError::Empty) => Ok(None), Err(TryRecvError::Disconnected) => Err(PeerManagerError::SendMessageError( "The peer manager is no longer running".into(), )), } } } impl Iterator for PeerNotificationIter { type Item = PeerManagerNotification; fn next(&mut self) -> Option<Self::Item> { match self.recv.recv() { Ok(notification) => Some(notification), Err(_) => { None } } } } pub type SubscriberId = usize; pub(super) type Subscriber = Box<dyn Fn(PeerManagerNotification) -> Result<(), Box<dyn std::error::Error>> + Send>; pub(super) struct SubscriberMap { queue: VecDeque<PeerManagerNotification>, queue_limit: usize, subscribers: HashMap<SubscriberId, Subscriber>, next_id: SubscriberId, } impl SubscriberMap { pub fn new() -> Self { Self::new_with_queue_limit(std::u16::MAX as usize) } pub fn new_with_queue_limit(limit: usize) -> Self { Self { queue: VecDeque::new(), queue_limit: limit, subscribers: HashMap::new(), next_id: 0, } } pub fn broadcast(&mut self, notification: PeerManagerNotification) { self.queue.push_back(notification); if self.queue.len() > self.queue_limit { self.queue.pop_front(); } if self.subscribers.is_empty() { return; } while let Some(notification) = self.queue.pop_front() { let mut failures = vec![]; for (id, callback) in self.subscribers.iter() { if let Err(err) = (*callback)(notification.clone()) { failures.push(*id); debug!("Dropping subscriber ({}): {}", id, err); } } for id in failures { self.subscribers.remove(&id); } } } pub fn add_subscriber(&mut self, subscriber: Subscriber) -> SubscriberId { let subscriber_id = self.next_id; self.next_id += 1; if self.subscribers.is_empty() { while let Some(notification) = self.queue.pop_front() { if let Err(err) = (*subscriber)(notification) { debug!("Dropping subscriber on add ({}): {}", subscriber_id, err); return subscriber_id; } } } self.subscribers.insert(subscriber_id, subscriber); subscriber_id } pub fn remove_subscriber(&mut self, subscriber_id: SubscriberId) { self.subscribers.remove(&subscriber_id); } } #[cfg(test)] pub mod tests { use super::*; use std::sync::mpsc::channel; use std::thread; use crate::peer::PeerAuthorizationToken; #[test] fn test_peer_manager_notifications() { let (send, recv) = channel(); let notifcation_iter = PeerNotificationIter { recv }; let join_handle = thread::spawn(move || { for i in 0..5 { send.send(PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: format!("test_peer{}", i), }, PeerAuthorizationToken::Trust { peer_id: "local".into(), }, ), }) .unwrap(); } }); let mut notifications_sent = 0; for notifcation in notifcation_iter { assert_eq!( notifcation, PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: format!("test_peer{}", notifications_sent), }, PeerAuthorizationToken::Trust { peer_id: "local".into(), }, ), } ); notifications_sent += 1; } assert_eq!(notifications_sent, 5); join_handle.join().unwrap(); } #[test] fn test_broadcast_queue() { let mut subscriber_map = SubscriberMap::new(); for i in 0..3 { subscriber_map.broadcast(PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: format!("test_peer_{}", i), }, PeerAuthorizationToken::Trust { peer_id: "local".into(), }, ), }) } let (tx, sub1) = channel(); let _sub1_id = subscriber_map.add_subscriber(Box::new(move |notification| { tx.send(notification).map_err(Box::from) })); assert_eq!( sub1.try_recv().expect("Unable to receive value"), PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: "test_peer_0".into() }, PeerAuthorizationToken::Trust { peer_id: "local".into() }, ) } ); assert_eq!( sub1.try_recv().expect("Unable to receive value"), PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: "test_peer_1".into() }, PeerAuthorizationToken::Trust { peer_id: "local".into() }, ) } ); assert_eq!( sub1.try_recv().expect("Unable to receive value"), PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: "test_peer_2".into() }, PeerAuthorizationToken::Trust { peer_id: "local".into() }, ) } ); assert!(matches!( sub1.try_recv(), Err(std::sync::mpsc::TryRecvError::Empty) )); let (tx, sub2) = channel(); let _sub2_id = subscriber_map.add_subscriber(Box::new(move |notification| { tx.send(notification).map_err(Box::from) })); subscriber_map.broadcast(PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: "test_peer_3".into(), }, PeerAuthorizationToken::Trust { peer_id: "local".into(), }, ), }); assert_eq!( sub1.try_recv().expect("Unable to receive value"), PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: "test_peer_3".into() }, PeerAuthorizationToken::Trust { peer_id: "local".into() }, ) } ); assert_eq!( sub2.try_recv().expect("Unable to receive value"), PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: "test_peer_3".into() }, PeerAuthorizationToken::Trust { peer_id: "local".into() }, ) } ); } #[test] fn test_broadcast_queue_limit() { let mut subscriber_map = SubscriberMap::new_with_queue_limit(1); for i in 0..3 { subscriber_map.broadcast(PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: format!("test_peer_{}", i), }, PeerAuthorizationToken::Trust { peer_id: "local".into(), }, ), }) } let (tx, sub1) = channel(); let _sub1_id = subscriber_map.add_subscriber(Box::new(move |notification| { tx.send(notification).map_err(Box::from) })); assert_eq!( sub1.try_recv().expect("Unable to receive value"), PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: "test_peer_2".into() }, PeerAuthorizationToken::Trust { peer_id: "local".into() }, ) } ); assert!(matches!( sub1.try_recv(), Err(std::sync::mpsc::TryRecvError::Empty) )); subscriber_map.broadcast(PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: "test_peer_3".into(), }, PeerAuthorizationToken::Trust { peer_id: "local".into(), }, ), }); subscriber_map.broadcast(PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: "test_peer_4".into(), }, PeerAuthorizationToken::Trust { peer_id: "local".into(), }, ), }); assert_eq!( sub1.try_recv().expect("Unable to receive value"), PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: "test_peer_3".into() }, PeerAuthorizationToken::Trust { peer_id: "local".into() }, ) } ); assert_eq!( sub1.try_recv().expect("Unable to receive value"), PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: "test_peer_4".into() }, PeerAuthorizationToken::Trust { peer_id: "local".into() }, ) } ); assert!(matches!( sub1.try_recv(), Err(std::sync::mpsc::TryRecvError::Empty) )); } }
use std::collections::{HashMap, VecDeque}; use std::sync::mpsc::{Receiver, TryRecvError}; use super::error::PeerManagerError; use super::PeerTokenPair; #[derive(Debug, PartialEq, Clone)] pub enum PeerManagerNotification { Connected { peer: PeerTokenPair }, Disconnected { peer: PeerTokenPair }, } pub struct PeerNotificationIter { pub(super) recv: Receiver<PeerManagerNotification>, } impl PeerNotificationIter { pub fn try_next(&self) -> Result<Option<PeerManagerNotification>, PeerManagerError> { match self.recv.try_recv() { Ok(notifications) => Ok(Some(notifications)), Err(TryRecvError::Empty) => Ok(None), Err(TryRecvError::Disconnected) => Err(PeerManagerError::SendMessageError( "The peer manager is no longer running".into(), )), } } } impl Iterator for PeerNotificationIter { type Item = PeerManagerNotification; fn next(&mut self) -> Option<Self::Item> { match self.recv.recv() { Ok(notification) => Some(notification), Err(_) => { None } } } } pub type SubscriberId = usize; pub(super) type Subscriber = Box<dyn Fn(PeerManagerNotification) -> Result<(), Box<dyn std::error::Error>> + Send>; pub(super) struct SubscriberMap { queue: VecDeque<PeerManagerNotification>, queue_limit: usize, subscribers: HashMap<SubscriberId, Subscriber>, next_id: SubscriberId, } impl SubscriberMap { pub fn new() -> Self { Self::new_with_queue_limit(std::u16::MAX as usize) } pub fn new_with_queue_limit(limit: usize) -> Self { Self { queue: VecDeque::new(), queue_limit: limit, subscribers: HashMap::new(), next_id: 0, } } pub fn broadcast(&mut self, notification: PeerManagerNotification) { self.queue.push_back(notification); if self.queue.len() > self.queue_limit { self.queue.pop_front(); } if self.subscribers.is_empty() { return; } while let Some(notification) = self.queue.pop_front() { let mut failures = vec![]; for (id, callback) in self.subscribers.iter() { if let Err(err) = (*callback)(notification.clone()) { failures.push(*id); debug!("Dropping subscriber ({}): {}", id, err); } } for id in failures { self.subscribers.remove(&id); } } } pub fn add_subscriber(&mut self, subscriber: Subscriber) -> SubscriberId { let subscriber_id = self.next_id; self.next_id += 1; if self.subscribers.is_empty() { while let Some(notification) = self.queue.pop_front() { if let Err(err) = (*subscriber)(notification) { debug!("Dropping subscriber on add ({}): {}", subscriber_id, err); return subscriber_id; } } } self.subscribers.insert(subscriber_id, subscriber); subscriber_id } pub fn remove_subscriber(&mut self, subscriber_id: SubscriberId) { self.subscribers.remove(&subscriber_id); } } #[cfg(test)] pub mod tests { use super::*; use std::sync::mpsc::channel; use std::thread; use crate::peer::PeerAuthorizationToken; #[test] fn test_peer_manager_notifications() { let (send, recv) = channel(); let notifcation_iter = PeerNotificationIter { recv }; let join_handle = thread::spawn(move || { for i in 0..5 { send.send(PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: format!("test_peer{}", i), }, PeerAuthorizationToken::Trust { peer_id: "local".into(), }, ), }) .unwrap(); } }); let mut notifications_sent = 0; for notifcation in notifcation_iter { assert_eq!( notifcation, PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: format!("test_peer{}", notifications_sent), }, PeerAuthorizationToken::Trust { peer_id: "local".into(), }, ), } ); notifications_sent += 1; } assert_eq!(notifications_sent, 5); join_handle.join().unwrap(); } #[test] fn test_broadcast_queue() { let mut subscriber_map = SubscriberMap::new(); for i in 0..3 { subscriber_map.broadcast(PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: format!("test_peer_{}", i), }, PeerAuthorizationToken::Trust { peer_id: "local".into(), }, ), }) } let (tx, sub1) = channel(); let _sub1_id = subscriber_map.add_subscriber(Box::new(move |notification| { tx.send(notification).map_err(Box::from) })); assert_eq!( sub1.try_recv().expect("Unable to receive value"), PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: "test_peer_0".into() }, PeerAuthorizationToken::Trust { peer_id: "local".into() }, ) } ); assert_eq!( sub1.try_recv().expect("Unable to receive value"), PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: "test_peer_1".into() }, PeerAuthorizationToken::Trust { peer_id: "local".into() }, ) } );
#[test] fn test_broadcast_queue_limit() { let mut subscriber_map = SubscriberMap::new_with_queue_limit(1); for i in 0..3 { subscriber_map.broadcast(PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: format!("test_peer_{}", i), }, PeerAuthorizationToken::Trust { peer_id: "local".into(), }, ), }) } let (tx, sub1) = channel(); let _sub1_id = subscriber_map.add_subscriber(Box::new(move |notification| { tx.send(notification).map_err(Box::from) })); assert_eq!( sub1.try_recv().expect("Unable to receive value"), PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: "test_peer_2".into() }, PeerAuthorizationToken::Trust { peer_id: "local".into() }, ) } ); assert!(matches!( sub1.try_recv(), Err(std::sync::mpsc::TryRecvError::Empty) )); subscriber_map.broadcast(PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: "test_peer_3".into(), }, PeerAuthorizationToken::Trust { peer_id: "local".into(), }, ), }); subscriber_map.broadcast(PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: "test_peer_4".into(), }, PeerAuthorizationToken::Trust { peer_id: "local".into(), }, ), }); assert_eq!( sub1.try_recv().expect("Unable to receive value"), PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: "test_peer_3".into() }, PeerAuthorizationToken::Trust { peer_id: "local".into() }, ) } ); assert_eq!( sub1.try_recv().expect("Unable to receive value"), PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: "test_peer_4".into() }, PeerAuthorizationToken::Trust { peer_id: "local".into() }, ) } ); assert!(matches!( sub1.try_recv(), Err(std::sync::mpsc::TryRecvError::Empty) )); } }
assert_eq!( sub1.try_recv().expect("Unable to receive value"), PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: "test_peer_2".into() }, PeerAuthorizationToken::Trust { peer_id: "local".into() }, ) } ); assert!(matches!( sub1.try_recv(), Err(std::sync::mpsc::TryRecvError::Empty) )); let (tx, sub2) = channel(); let _sub2_id = subscriber_map.add_subscriber(Box::new(move |notification| { tx.send(notification).map_err(Box::from) })); subscriber_map.broadcast(PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: "test_peer_3".into(), }, PeerAuthorizationToken::Trust { peer_id: "local".into(), }, ), }); assert_eq!( sub1.try_recv().expect("Unable to receive value"), PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: "test_peer_3".into() }, PeerAuthorizationToken::Trust { peer_id: "local".into() }, ) } ); assert_eq!( sub2.try_recv().expect("Unable to receive value"), PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: "test_peer_3".into() }, PeerAuthorizationToken::Trust { peer_id: "local".into() }, ) } ); }
function_block-function_prefix_line
[ { "content": "#[cfg(not(any(feature = \"trust-authorization\", feature = \"challenge-authorization\")))]\n\nfn connect_msg_bytes() -> Result<Vec<u8>, AuthorizationManagerError> {\n\n let connect_msg = AuthorizationMessage::ConnectRequest(ConnectRequest::Bidirectional);\n\n\n\n IntoBytes::<network::Network...
Rust
crates/platform/src/input/mouse.rs
gents83/NRG
62743a54ac873a8dea359f3816e24c189a323ebb
use std::collections::HashMap; use sabi_commands::CommandParser; use sabi_messenger::{implement_message, Message, MessageFromString}; #[derive(Debug, Hash, Ord, PartialOrd, PartialEq, Eq, Clone, Copy)] pub enum MouseButton { None, Left, Right, Middle, Other(u16), } #[derive(Debug, Hash, Ord, PartialOrd, PartialEq, Eq, Clone, Copy)] pub enum MouseState { Invalid, Move, DoubleClick, Down, Up, } #[derive(Debug, PartialOrd, PartialEq, Clone, Copy)] pub struct MouseEvent { pub x: f64, pub y: f64, pub normalized_x: f32, pub normalized_y: f32, pub button: MouseButton, pub state: MouseState, } implement_message!(MouseEvent); impl MessageFromString for MouseEvent { fn from_command_parser(command_parser: CommandParser) -> Option<Box<dyn Message>> where Self: Sized, { if command_parser.has("mouse_move") { let values = command_parser.get_values_of("mouse_move"); return Some( MouseEvent { x: values[0], y: values[1], normalized_x: values[0] as _, normalized_y: values[1] as _, button: MouseButton::None, state: MouseState::Move, } .as_boxed(), ); } else if command_parser.has("mouse_left_down") { let values = command_parser.get_values_of("mouse_left_down"); return Some( MouseEvent { x: values[0], y: values[1], normalized_x: values[0] as _, normalized_y: values[1] as _, button: MouseButton::Left, state: MouseState::Down, } .as_boxed(), ); } else if command_parser.has("mouse_right_down") { let values = command_parser.get_values_of("mouse_right_down"); return Some( MouseEvent { x: values[0], y: values[1], normalized_x: values[0] as _, normalized_y: values[1] as _, button: MouseButton::Right, state: MouseState::Down, } .as_boxed(), ); } else if command_parser.has("mouse_left_up") { let values = command_parser.get_values_of("mouse_left_up"); return Some( MouseEvent { x: values[0], y: values[1], normalized_x: values[0] as _, normalized_y: values[1] as _, button: MouseButton::Left, state: MouseState::Up, } .as_boxed(), ); } else if command_parser.has("mouse_right_up") { let values = command_parser.get_values_of("mouse_right_up"); return Some( MouseEvent { x: values[0], y: values[1], normalized_x: values[0] as _, normalized_y: values[1] as _, button: MouseButton::Right, state: MouseState::Up, } .as_boxed(), ); } None } } impl Default for MouseEvent { #[inline] fn default() -> Self { Self { x: 0., y: 0., normalized_x: 0., normalized_y: 0., button: MouseButton::None, state: MouseState::Move, } } } pub struct MouseData { pub(super) pos_x: f64, pub(super) pos_y: f64, pub(super) move_x: f64, pub(super) move_y: f64, pub(super) is_pressed: bool, pub(super) buttons: HashMap<MouseButton, MouseState>, } impl Default for MouseData { fn default() -> Self { Self { pos_x: 0.0, pos_y: 0.0, move_x: 0.0, move_y: 0.0, is_pressed: false, buttons: HashMap::new(), } } } impl MouseData { pub fn get_x(&self) -> f64 { self.pos_x } pub fn get_y(&self) -> f64 { self.pos_y } pub fn movement_x(&self) -> f64 { self.move_x } pub fn movement_y(&self) -> f64 { self.move_y } pub fn is_pressed(&self) -> bool { self.is_pressed } pub fn get_button_state(&self, button: MouseButton) -> MouseState { if let Some(button) = self.buttons.get(&button) { *button } else { MouseState::Invalid } } pub fn is_button_down(&self, button: MouseButton) -> bool { if let Some(button) = self.buttons.get(&button) { *button == MouseState::Down } else { false } } pub fn is_button_up(&self, button: MouseButton) -> bool { if let Some(button) = self.buttons.get(&button) { *button == MouseState::Up } else { false } } }
use std::collections::HashMap; use sabi_commands::CommandParser; use sabi_messenger::{implement_message, Message, MessageFromString}; #[derive(Debug, Hash, Ord, PartialOrd, PartialEq, Eq, Clone, Copy)] pub enum MouseButton { None, Left, Right, Middle, Other(u16), } #
ues_of("mouse_move"); return Some( MouseEvent { x: values[0], y: values[1], normalized_x: values[0] as _, normalized_y: values[1] as _, button: MouseButton::None, state: MouseState::Move, } .as_boxed(), ); } else if command_parser.has("mouse_left_down") { let values = command_parser.get_values_of("mouse_left_down"); return Some( MouseEvent { x: values[0], y: values[1], normalized_x: values[0] as _, normalized_y: values[1] as _, button: MouseButton::Left, state: MouseState::Down, } .as_boxed(), ); } else if command_parser.has("mouse_right_down") { let values = command_parser.get_values_of("mouse_right_down"); return Some( MouseEvent { x: values[0], y: values[1], normalized_x: values[0] as _, normalized_y: values[1] as _, button: MouseButton::Right, state: MouseState::Down, } .as_boxed(), ); } else if command_parser.has("mouse_left_up") { let values = command_parser.get_values_of("mouse_left_up"); return Some( MouseEvent { x: values[0], y: values[1], normalized_x: values[0] as _, normalized_y: values[1] as _, button: MouseButton::Left, state: MouseState::Up, } .as_boxed(), ); } else if command_parser.has("mouse_right_up") { let values = command_parser.get_values_of("mouse_right_up"); return Some( MouseEvent { x: values[0], y: values[1], normalized_x: values[0] as _, normalized_y: values[1] as _, button: MouseButton::Right, state: MouseState::Up, } .as_boxed(), ); } None } } impl Default for MouseEvent { #[inline] fn default() -> Self { Self { x: 0., y: 0., normalized_x: 0., normalized_y: 0., button: MouseButton::None, state: MouseState::Move, } } } pub struct MouseData { pub(super) pos_x: f64, pub(super) pos_y: f64, pub(super) move_x: f64, pub(super) move_y: f64, pub(super) is_pressed: bool, pub(super) buttons: HashMap<MouseButton, MouseState>, } impl Default for MouseData { fn default() -> Self { Self { pos_x: 0.0, pos_y: 0.0, move_x: 0.0, move_y: 0.0, is_pressed: false, buttons: HashMap::new(), } } } impl MouseData { pub fn get_x(&self) -> f64 { self.pos_x } pub fn get_y(&self) -> f64 { self.pos_y } pub fn movement_x(&self) -> f64 { self.move_x } pub fn movement_y(&self) -> f64 { self.move_y } pub fn is_pressed(&self) -> bool { self.is_pressed } pub fn get_button_state(&self, button: MouseButton) -> MouseState { if let Some(button) = self.buttons.get(&button) { *button } else { MouseState::Invalid } } pub fn is_button_down(&self, button: MouseButton) -> bool { if let Some(button) = self.buttons.get(&button) { *button == MouseState::Down } else { false } } pub fn is_button_up(&self, button: MouseButton) -> bool { if let Some(button) = self.buttons.get(&button) { *button == MouseState::Up } else { false } } }
[derive(Debug, Hash, Ord, PartialOrd, PartialEq, Eq, Clone, Copy)] pub enum MouseState { Invalid, Move, DoubleClick, Down, Up, } #[derive(Debug, PartialOrd, PartialEq, Clone, Copy)] pub struct MouseEvent { pub x: f64, pub y: f64, pub normalized_x: f32, pub normalized_y: f32, pub button: MouseButton, pub state: MouseState, } implement_message!(MouseEvent); impl MessageFromString for MouseEvent { fn from_command_parser(command_parser: CommandParser) -> Option<Box<dyn Message>> where Self: Sized, { if command_parser.has("mouse_move") { let values = command_parser.get_val
random
[]
Rust
tools/ldr2img/src/main.rs
segfault87/ldraw.rs
f181317cf1505ca64456cf67efdf3ac406d05831
use std::{ collections::HashMap, env, fs::File, io::BufReader, path::Path, rc::Rc, }; use bincode::deserialize_from; use clap::{App, Arg}; use glutin::event_loop::EventLoop; use ldraw::{ parser::{parse_color_definition, parse_multipart_document}, }; use ldraw_ir::{ part::PartBuilder, }; use ldraw_olr::{ context::{create_headless_context, create_osmesa_context}, ops::render_display_list, }; use ldraw_renderer::{ display_list::DisplayList, part::Part, }; fn main() { let matches = App::new("ldr2img") .about("Render LDraw model into still image") .arg(Arg::with_name("ldraw_dir") .long("ldraw-dir") .value_name("PATH") .takes_value(true) .help("Path to LDraw directory")) .arg(Arg::with_name("parts_path") .short("p") .value_name("PATH") .takes_value(true) .help("Path to baked LDraw parts")) .arg(Arg::with_name("use_window_system") .short("w") .help("Use window system to utilize GPU rendering")) .arg(Arg::with_name("output") .short("o") .takes_value(true) .help("Output file name")) .arg(Arg::with_name("input") .takes_value(true) .required(true) .index(1) .help("Input file name")) .arg(Arg::with_name("size") .short("s") .takes_value(true) .help("Maximum width/height pixel size")) .get_matches(); let ldrawdir = match matches.value_of("ldraw_dir") { Some(v) => v.to_string(), None => { match env::var("LDRAWDIR") { Ok(v) => v, Err(_) => { panic!("--ldraw-dir option or LDRAWDIR environment variable is required."); } } } }; let ldrawdir = Path::new(&ldrawdir); let bakeddir = match matches.value_of("parts_path") { Some(v) => Path::new(v).to_path_buf(), None => { let baked = Path::new(&ldrawdir).join("baked"); if baked.exists() { baked } else { panic!("Parts path is not provided.") } } }; let use_window_system = matches.is_present("use_window_system"); let size = matches.value_of("size").unwrap_or("1024").parse::<usize>().unwrap(); let mut context = if use_window_system { let evloop = EventLoop::new(); create_headless_context(evloop, size, size) } else { create_osmesa_context(size, size) }.unwrap(); let gl = Rc::clone(&context.gl); let colors = parse_color_definition(&mut BufReader::new( File::open(ldrawdir.join("LDConfig.ldr")).unwrap(), )).unwrap(); let input = matches.value_of("input").unwrap(); let output = matches.value_of("output").unwrap_or("image.png"); let document = parse_multipart_document( &colors, &mut BufReader::new(File::open(&input).unwrap()) ).unwrap(); let mut parts = HashMap::new(); for dep in document.list_dependencies() { let path = bakeddir.join(format!("{}.part", dep.normalized)); let file = match File::open(&path) { Ok(f) => f, Err(_) => { println!("Could not open part file {}.", path.to_str().unwrap_or("")); continue }, }; let mut part = deserialize_from::<_, PartBuilder>(&mut BufReader::new(file)).unwrap(); part.part_builder.resolve_colors(&colors); let part = Part::create(&part, Rc::clone(&gl)); parts.insert(dep.clone(), part); } let mut display_list = DisplayList::from_multipart_document(Rc::clone(&gl), &document); { let mut rc = context.rendering_context.borrow_mut(); rc.set_initial_state(); rc.resize(size as _, size as _); rc.upload_shading_data(); } let image = render_display_list(&context, &parts, &mut display_list); image.save(&Path::new(output)).unwrap(); }
use std::{ collections::HashMap, env, fs::File, io::BufReader, path::Path, rc::Rc, }; use bincode::deserialize_from; use clap::{App, Arg}; use glutin::event_loop::EventLoop; use ldraw::{ parser::{parse_color_definition, parse_multipart_document}, }; use ldraw_ir::{ part::PartBuilder, }; use ldraw_olr::{ context::{create_headless_context, create_osmesa_context}, ops::render_display_list, }; use ldraw_renderer::{ display_list::DisplayList, part::Part, };
fn main() { let matches = App::new("ldr2img") .about("Render LDraw model into still image") .arg(Arg::with_name("ldraw_dir") .long("ldraw-dir") .value_name("PATH") .takes_value(true) .help("Path to LDraw directory")) .arg(Arg::with_name("parts_path") .short("p") .value_name("PATH") .takes_value(true) .help("Path to baked LDraw parts")) .arg(Arg::with_name("use_window_system") .short("w") .help("Use window system to utilize GPU rendering")) .arg(Arg::with_name("output") .short("o") .takes_value(true) .help("Output file name")) .arg(Arg::with_name("input") .takes_value(true) .required(true) .index(1) .help("Input file name")) .arg(Arg::with_name("size") .short("s") .takes_value(true) .help("Maximum width/height pixel size")) .get_matches(); let ldrawdir = match matches.value_of("ldraw_dir") { Some(v) => v.to_string(), None => { match env::var("LDRAWDIR") { Ok(v) => v, Err(_) => { panic!("--ldraw-dir option or LDRAWDIR environment variable is required."); } } } }; let ldrawdir = Path::new(&ldrawdir); let bakeddir = match matches.value_of("parts_path") { Some(v) => Path::new(v).to_path_buf(), None => { let baked = Path::new(&ldrawdir).join("baked"); if baked.exists() { baked } else { panic!("Parts path is not provided.") } } }; let use_window_system = matches.is_present("use_window_system"); let size = matches.value_of("size").unwrap_or("1024").parse::<usize>().unwrap(); let mut context = if use_window_system { let evloop = EventLoop::new(); create_headless_context(evloop, size, size) } else { create_osmesa_context(size, size) }.unwrap(); let gl = Rc::clone(&context.gl); let colors = parse_color_definition(&mut BufReader::new( File::open(ldrawdir.join("LDConfig.ldr")).unwrap(), )).unwrap(); let input = matches.value_of("input").unwrap(); let output = matches.value_of("output").unwrap_or("image.png"); let document = parse_multipart_document( &colors, &mut BufReader::new(File::open(&input).unwrap()) ).unwrap(); let mut parts = HashMap::new(); for dep in document.list_dependencies() { let path = bakeddir.join(format!("{}.part", dep.normalized)); let file = match File::open(&path) { Ok(f) => f, Err(_) => { println!("Could not open part file {}.", path.to_str().unwrap_or("")); continue }, }; let mut part = deserialize_from::<_, PartBuilder>(&mut BufReader::new(file)).unwrap(); part.part_builder.resolve_colors(&colors); let part = Part::create(&part, Rc::clone(&gl)); parts.insert(dep.clone(), part); } let mut display_list = DisplayList::from_multipart_document(Rc::clone(&gl), &document); { let mut rc = context.rendering_context.borrow_mut(); rc.set_initial_state(); rc.resize(size as _, size as _); rc.upload_shading_data(); } let image = render_display_list(&context, &parts, &mut display_list); image.save(&Path::new(output)).unwrap(); }
function_block-full_function
[ { "content": "}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::{parse_color_definition, parse_multipart_document, parse_single_document};\n\n use crate::color::MaterialRegistry;\n\n use crate::error::{ColorDefinitionParseError, ParseError};\n\n use std::fs::File;\n\n use std::io::BufReader;\n\...
Rust
src/components/message.rs
jtakalai/sanuli
e21e1f887f88ab7c9716b8cba98b1fe59727cf24
use yew::prelude::*; use crate::manager::GameMode; use crate::Msg as GameMsg; const FORMS_LINK_TEMPLATE_ADD: &str = "https://docs.google.com/forms/d/e/1FAIpQLSfH8gs4sq-Ynn8iGOvlc99J_zOG2rJEC4m8V0kCgF_en3RHFQ/viewform?usp=pp_url&entry.461337706=Lis%C3%A4yst%C3%A4&entry.560255602="; const FORMS_LINK_TEMPLATE_DEL: &str = "https://docs.google.com/forms/d/e/1FAIpQLSfH8gs4sq-Ynn8iGOvlc99J_zOG2rJEC4m8V0kCgF_en3RHFQ/viewform?usp=pp_url&entry.461337706=Poistoa&entry.560255602="; const DICTIONARY_LINK_TEMPLATE: &str = "https://www.kielitoimistonsanakirja.fi/#/"; #[derive(Properties, Clone, PartialEq)] pub struct MessageProps { pub message: String, pub is_unknown: bool, pub is_winner: bool, pub is_guessing: bool, pub is_hidden: bool, pub is_emojis_copied: bool, pub is_link_copied: bool, pub word: String, pub last_guess: String, pub game_mode: GameMode, pub callback: Callback<GameMsg>, } #[function_component(Message)] pub fn message(props: &MessageProps) -> Html { html! { <div class="message"> { &props.message } <div class="message-small">{ if props.is_hidden { let callback = props.callback.clone(); let reveal_hidden_tiles = Callback::from(move |e: MouseEvent| { e.prevent_default(); callback.emit(GameMsg::RevealHiddenTiles); }); let callback = props.callback.clone(); let reset_game = Callback::from(move |e: MouseEvent| { e.prevent_default(); callback.emit(GameMsg::ResetGame); }); html! { <> <a class="link" href={"javascript:void(0)"} onclick={reset_game}> {"Kokeile ratkaista"} </a> {" | "} <a class="link" href={"javascript:void(0)"} onclick={reveal_hidden_tiles}> {"Paljasta"} </a> </> } } else if !props.is_guessing { html! { <SubMessage is_winner={props.is_winner} is_emojis_copied={props.is_emojis_copied} is_link_copied={props.is_link_copied} word={props.word.clone()} game_mode={props.game_mode} callback={props.callback.clone()} /> } } else if props.is_guessing && props.is_unknown { let last_guess = props.last_guess.to_lowercase(); html! { <a class="link" href={format!("{}{}", FORMS_LINK_TEMPLATE_ADD, last_guess)} target="_blank">{ "Ehdota lisäystä?" } </a> } } else { html! {} } } </div> </div> } } #[derive(Properties, Clone, PartialEq)] pub struct SubMessageProps { pub is_winner: bool, pub is_emojis_copied: bool, pub is_link_copied: bool, pub word: String, pub game_mode: GameMode, pub callback: Callback<GameMsg>, } #[function_component(SubMessage)] fn sub_message(props: &SubMessageProps) -> Html { let word = props.word.to_lowercase(); let callback = props.callback.clone(); let share_emojis = Callback::from(move |e: MouseEvent| { e.prevent_default(); callback.emit(GameMsg::ShareEmojis); }); let callback = props.callback.clone(); let share_link = Callback::from(move |e: MouseEvent| { e.prevent_default(); callback.emit(GameMsg::ShareLink); }); if props.game_mode == GameMode::Quadruple { return html!{} } html! { <> <a class="link" href={format!("{}{}?searchMode=all", DICTIONARY_LINK_TEMPLATE, word)} target="_blank">{ "Sanakirja" } </a> {" | "} <a class="link" href={"javascript:void(0)"} onclick={share_link}> { if !props.is_link_copied { {"Kopioi linkki"} } else { {"Kopioitu!"} } } </a> { if matches!(props.game_mode, GameMode::DailyWord(_)) { html! { <> {" | "} <a class="link" href={"javascript:void(0)"} onclick={share_emojis}> { if !props.is_emojis_copied { {"Kopioi tulos"} } else { {"Kopioitu!"} } } </a> </> } } else if !props.is_winner { html! { <> {" | "} <a class="link" href={format!("{}{}", FORMS_LINK_TEMPLATE_DEL, word)} target="_blank">{ "Ehdota poistoa?" } </a> </> } } else { html! {} } } </> } }
use yew::prelude::*; use crate::manager::GameMode; use crate::Msg as GameMsg; const FORMS_LINK_TEMPLATE_ADD: &str = "https://docs.google.com/forms/d/e/1FAIpQLSfH8gs4sq-Ynn8iGOvlc99J_zOG2rJEC4m8V0kCgF_en3RHFQ/viewform?usp=pp_url&entry.461337706=Lis%C3%A4yst%C3%A4&entry.560255602="; const FORMS_LINK_TEMPLATE_DEL: &str = "https://docs.google.com/forms/d/e/1FAIpQLSfH8gs4sq-Ynn8iGOvlc99J_zOG2rJEC4m8V0kCgF_en3RHFQ/viewform?usp=pp_url&entry.461337706=Poistoa&entry.560255602="; const DICTIONARY_LINK_TEMPLATE: &str = "https://www.kielitoimistonsanakirja.fi/#/"; #[derive(Properties, Clone, PartialEq)] pub struct MessageProps { pub message: String, pub is_unknown: bool, pub is_winner: bool, pub is_guessing: bool, pub is_hidden: bool, pub is_emojis_copied: bool, pub is_link_copied: bool, pub word: String, pub last_guess: String, pub game_mode: GameMode, pub callback: Callback<Ga
|e: MouseEvent| { e.prevent_default(); callback.emit(GameMsg::ShareEmojis); }); let callback = props.callback.clone(); let share_link = Callback::from(move |e: MouseEvent| { e.prevent_default(); callback.emit(GameMsg::ShareLink); }); if props.game_mode == GameMode::Quadruple { return html!{} } html! { <> <a class="link" href={format!("{}{}?searchMode=all", DICTIONARY_LINK_TEMPLATE, word)} target="_blank">{ "Sanakirja" } </a> {" | "} <a class="link" href={"javascript:void(0)"} onclick={share_link}> { if !props.is_link_copied { {"Kopioi linkki"} } else { {"Kopioitu!"} } } </a> { if matches!(props.game_mode, GameMode::DailyWord(_)) { html! { <> {" | "} <a class="link" href={"javascript:void(0)"} onclick={share_emojis}> { if !props.is_emojis_copied { {"Kopioi tulos"} } else { {"Kopioitu!"} } } </a> </> } } else if !props.is_winner { html! { <> {" | "} <a class="link" href={format!("{}{}", FORMS_LINK_TEMPLATE_DEL, word)} target="_blank">{ "Ehdota poistoa?" } </a> </> } } else { html! {} } } </> } }
meMsg>, } #[function_component(Message)] pub fn message(props: &MessageProps) -> Html { html! { <div class="message"> { &props.message } <div class="message-small">{ if props.is_hidden { let callback = props.callback.clone(); let reveal_hidden_tiles = Callback::from(move |e: MouseEvent| { e.prevent_default(); callback.emit(GameMsg::RevealHiddenTiles); }); let callback = props.callback.clone(); let reset_game = Callback::from(move |e: MouseEvent| { e.prevent_default(); callback.emit(GameMsg::ResetGame); }); html! { <> <a class="link" href={"javascript:void(0)"} onclick={reset_game}> {"Kokeile ratkaista"} </a> {" | "} <a class="link" href={"javascript:void(0)"} onclick={reveal_hidden_tiles}> {"Paljasta"} </a> </> } } else if !props.is_guessing { html! { <SubMessage is_winner={props.is_winner} is_emojis_copied={props.is_emojis_copied} is_link_copied={props.is_link_copied} word={props.word.clone()} game_mode={props.game_mode} callback={props.callback.clone()} /> } } else if props.is_guessing && props.is_unknown { let last_guess = props.last_guess.to_lowercase(); html! { <a class="link" href={format!("{}{}", FORMS_LINK_TEMPLATE_ADD, last_guess)} target="_blank">{ "Ehdota lisäystä?" } </a> } } else { html! {} } } </div> </div> } } #[derive(Properties, Clone, PartialEq)] pub struct SubMessageProps { pub is_winner: bool, pub is_emojis_copied: bool, pub is_link_copied: bool, pub word: String, pub game_mode: GameMode, pub callback: Callback<GameMsg>, } #[function_component(SubMessage)] fn sub_message(props: &SubMessageProps) -> Html { let word = props.word.to_lowercase(); let callback = props.callback.clone(); let share_emojis = Callback::from(move
random
[ { "content": "pub trait Game {\n\n fn title(&self) -> String;\n\n fn next_word(&mut self);\n\n fn keyboard_tilestate(&self, key: &char) -> KeyState;\n\n fn submit_guess(&mut self);\n\n fn push_character(&mut self, character: char);\n\n fn pop_character(&mut self);\n\n fn share_emojis(&self,...
Rust
src/corroboy/gpu/background.rs
squidboylan/corroboy
4c264c2604eb0cc6830add3c9e98dee0cb054c40
use gfx_device_gl; use image::*; use piston_window; use piston_window::PistonWindow as Window; use piston_window::Texture; use piston_window::TextureSettings; use sdl2_window::Sdl2Window; use crate::corroboy::mmu::Mmu; struct Tile { raw_val: [[u8; 8]; 8], } impl Tile { pub fn new() -> Tile { Tile { raw_val: [[0; 8]; 8], } } #[allow(dead_code)] fn display_ascii(&self) { for i in 0..8 { for j in 0..8 { print!("{} ", self.raw_val[i][j]); } println!(""); } println!(""); } } pub struct Background { background_data_bot: usize, background_data_top: usize, bg_tiles: Vec<Tile>, bg_tile_map_bot: usize, bg_tile_map: [[u8; 32]; 32], window_tile_map_bot: usize, window_tile_map: [[u8; 32]; 32], window_enabled: bool, window_x: u8, window_y: u8, bg_palette: [usize; 4], pub base_tex: Texture<gfx_device_gl::Resources>, pub tex: Texture<gfx_device_gl::Resources>, base_pixel_map: [[usize; 160]; 144], pixel_map: [[usize; 160]; 144], last_pixel_map: [[usize; 160]; 144], pub enabled: u8, } impl Background { pub fn new(window: &mut Window<Sdl2Window>) -> Background { let mut factory = window.factory.clone(); let mut tiles = Vec::with_capacity(256); for _i in 0..256 { let new = Tile::new(); tiles.push(new); } Background { background_data_bot: 0, background_data_top: 0, bg_tiles: tiles, bg_tile_map_bot: 0, bg_tile_map: [[0; 32]; 32], bg_palette: [0; 4], window_tile_map_bot: 0, window_tile_map: [[0; 32]; 32], window_enabled: false, window_x: 0, window_y: 0, base_tex: Texture::empty(&mut factory).unwrap(), tex: Texture::empty(&mut factory).unwrap(), base_pixel_map: [[0; 160]; 144], pixel_map: [[0; 160]; 144], last_pixel_map: [[0; 160]; 144], enabled: 0, } } pub fn initialize(&mut self, mem: &mut Mmu) { let ff40 = mem.get_io_register(0xFF40); if (ff40 & 0b00010000) >> 4 == 0 { self.background_data_bot = 0x8800; self.background_data_top = 0x97FF; } else { self.background_data_bot = 0x8000; self.background_data_top = 0x8FFF; } if ff40 & 0b00001000 == 0 { self.bg_tile_map_bot = 0x9800; } else { self.bg_tile_map_bot = 0x9C00; } if ff40 & 0b00100000 == 0 { self.window_enabled = false; } else { self.window_enabled = true; } if ff40 & 0b01000000 == 0 { self.window_tile_map_bot = 0x9800; } else { self.window_tile_map_bot = 0x9C00; } self.window_y = mem.get_mem_u8(0xFF4A); self.window_x = mem.get_mem_u8(0xFF4B); self.build_tile_data(mem); } #[allow(dead_code)] fn print_bg_tile_map(&self) { for i in 0..32 { for j in 0..32 { print!("{} ", self.bg_tile_map[i][j]); } println!(""); } } #[allow(dead_code)] fn display_ascii(&self) { if self.bg_tiles.len() == 256 { for i in 0..32 { for k in 0..8 { for j in 0..32 { for l in 0..8 { print!( "{} ", self.bg_tiles[self.bg_tile_map[i][j] as usize].raw_val[k][l] ); } } println!(""); } } println!(""); } } pub fn generate_tex(&mut self, window: &mut Window<Sdl2Window>) { const SCREEN_SIZE_X: u32 = 160; const SCREEN_SIZE_Y: u32 = 144; let mut new_map = false; let mut x = 0; let mut y = 0; while y < SCREEN_SIZE_Y as usize && new_map == false { while x < SCREEN_SIZE_X as usize && new_map == false { if self.pixel_map[y][x] != self.last_pixel_map[y][x] { new_map = true; } x += 1; } x = 0; y += 1; } if new_map == true { let mut img: RgbaImage = ImageBuffer::new(SCREEN_SIZE_X, SCREEN_SIZE_Y); let mut base_img: RgbaImage = ImageBuffer::new(SCREEN_SIZE_X, SCREEN_SIZE_Y); let colors = [ [255, 255, 255, 255], [169, 169, 169, 255], [128, 128, 128, 255], [0, 0, 0, 255], [0, 0, 0, 0], ]; let mut x = 0; let mut y = 0; while y < SCREEN_SIZE_Y as usize { while x < SCREEN_SIZE_X as usize { let color = colors[self.pixel_map[y][x]]; self.last_pixel_map[y][x] = self.pixel_map[y][x]; img.put_pixel(x as u32, y as u32, Rgba { data: color }); let color = colors[self.base_pixel_map[y][x]]; base_img.put_pixel(x as u32, y as u32, Rgba { data: color }); x += 1; } x = 0; y += 1; } let mut tex_settings = TextureSettings::new(); tex_settings.set_mag(piston_window::Filter::Nearest); self.tex = Texture::from_image(&mut window.factory, &img, &tex_settings).unwrap(); self.base_tex = Texture::from_image(&mut window.factory, &base_img, &tex_settings).unwrap(); } } pub fn build_bg_tile_map(&mut self, mem: &mut Mmu) { for i in 0..32 { for j in 0..32 { self.bg_tile_map[i][j] = mem.get_mem_u8(self.bg_tile_map_bot + (i * 32) + j); } } } pub fn build_window_tile_map(&mut self, mem: &mut Mmu) { for i in 0..32 { for j in 0..32 { self.window_tile_map[i][j] = mem.get_mem_u8(self.window_tile_map_bot + (i * 32) + j); } } } pub fn set_bg_palette(&mut self, mem: &mut Mmu) { let ff47 = mem.get_io_register(0xFF47); self.bg_palette[0] = (ff47 & 0b00000011) as usize; self.bg_palette[1] = ((ff47 & 0b00001100) >> 2) as usize; self.bg_palette[2] = ((ff47 & 0b00110000) >> 4) as usize; self.bg_palette[3] = ((ff47 & 0b11000000) >> 6) as usize; } pub fn build_tile_data(&mut self, mem: &mut Mmu) { if self.background_data_bot == 0x8000 { for i in 0..256 { for j in 0..8 { let left = mem.get_vram(self.background_data_bot + (i * 16) + (j * 2)); let right = mem.get_vram(self.background_data_bot + (i * 16) + 1 + (j * 2)); self.bg_tiles[i].raw_val[j as usize][0] = ((right & 0b10000000) >> 6) + ((left & 0b10000000) >> 7); self.bg_tiles[i].raw_val[j as usize][1] = ((right & 0b01000000) >> 5) + ((left & 0b01000000) >> 6); self.bg_tiles[i].raw_val[j as usize][2] = ((right & 0b00100000) >> 4) + ((left & 0b00100000) >> 5); self.bg_tiles[i].raw_val[j as usize][3] = ((right & 0b00010000) >> 3) + ((left & 0b00010000) >> 4); self.bg_tiles[i].raw_val[j as usize][4] = ((right & 0b00001000) >> 2) + ((left & 0b00001000) >> 3); self.bg_tiles[i].raw_val[j as usize][5] = ((right & 0b00000100) >> 1) + ((left & 0b00000100) >> 2); self.bg_tiles[i].raw_val[j as usize][6] = (right & 0b00000010) + ((left & 0b00000010) >> 1); self.bg_tiles[i].raw_val[j as usize][7] = ((right & 0b00000001) << 1) + (left & 0b00000001); } } } else { let curr = 0x9000; for i in 0..128 { for j in 0..8 { let left = mem.get_vram(curr + (i * 16) + (j * 2)); let right = mem.get_vram(curr + (i * 16) + 1 + (j * 2)); self.bg_tiles[i].raw_val[j as usize][0] = ((right & 0b10000000) >> 6) + ((left & 0b10000000) >> 7); self.bg_tiles[i].raw_val[j as usize][1] = ((right & 0b01000000) >> 5) + ((left & 0b01000000) >> 6); self.bg_tiles[i].raw_val[j as usize][2] = ((right & 0b00100000) >> 4) + ((left & 0b00100000) >> 5); self.bg_tiles[i].raw_val[j as usize][3] = ((right & 0b00010000) >> 3) + ((left & 0b00010000) >> 4); self.bg_tiles[i].raw_val[j as usize][4] = ((right & 0b00001000) >> 2) + ((left & 0b00001000) >> 3); self.bg_tiles[i].raw_val[j as usize][5] = ((right & 0b00000100) >> 1) + ((left & 0b00000100) >> 2); self.bg_tiles[i].raw_val[j as usize][6] = (right & 0b00000010) + ((left & 0b00000010) >> 1); self.bg_tiles[i].raw_val[j as usize][7] = ((right & 0b00000001) << 1) + (left & 0b00000001); } } let curr = 0x8800; for i in 128..256 { for j in 0..8 { let left = mem.get_vram(curr + ((i - 128) * 16) + (j * 2)); let right = mem.get_vram(curr + ((i - 128) * 16) + 1 + (j * 2)); self.bg_tiles[i].raw_val[j as usize][0] = ((right & 0b10000000) >> 6) + ((left & 0b10000000) >> 7); self.bg_tiles[i].raw_val[j as usize][1] = ((right & 0b01000000) >> 5) + ((left & 0b01000000) >> 6); self.bg_tiles[i].raw_val[j as usize][2] = ((right & 0b00100000) >> 4) + ((left & 0b00100000) >> 5); self.bg_tiles[i].raw_val[j as usize][3] = ((right & 0b00010000) >> 3) + ((left & 0b00010000) >> 4); self.bg_tiles[i].raw_val[j as usize][4] = ((right & 0b00001000) >> 2) + ((left & 0b00001000) >> 3); self.bg_tiles[i].raw_val[j as usize][5] = ((right & 0b00000100) >> 1) + ((left & 0b00000100) >> 2); self.bg_tiles[i].raw_val[j as usize][6] = (right & 0b00000010) + ((left & 0b00000010) >> 1); self.bg_tiles[i].raw_val[j as usize][7] = ((right & 0b00000001) << 1) + (left & 0b00000001); } } } } pub fn update_background_line(&mut self, line_lcd: u8, mem: &mut Mmu) { self.enabled = mem.get_io_register(0xFF40) & 0x01; let scy = mem.get_io_register(0xFF42); let scx = mem.get_io_register(0xFF43); if self.enabled == 1 { let line = line_lcd + scy; let tile_y = ((line / 8) % 32) as usize; let line_in_tile = (line % 8) as usize; for i in 0..160 { let x = i + scx; let tile_x = ((x / 8) % 32) as usize; let x_in_tile = (x % 8) as usize; let tile_num = self.bg_tile_map[tile_y][tile_x] as usize; let palette_num = self.bg_tiles[tile_num].raw_val[line_in_tile][x_in_tile] as usize; if palette_num != 0 { let pixel_val = self.bg_palette[palette_num]; self.pixel_map[line_lcd as usize][i as usize] = pixel_val; } else { self.pixel_map[line_lcd as usize][i as usize] = 4; } let base_pixel_val = self.bg_palette[0]; self.base_pixel_map[line_lcd as usize][i as usize] = base_pixel_val; } } else { for i in 0..160 { let pixel_val = self.bg_palette[0]; self.pixel_map[line_lcd as usize][i as usize] = 4; self.base_pixel_map[line_lcd as usize][i as usize] = pixel_val; } } } pub fn update_window_line(&mut self, line_lcd: u8) { if self.window_enabled == true { if self.window_y <= line_lcd { let y = line_lcd - self.window_y; let line_in_tile = (y % 8) as usize; let tile_y = ((y / 8) % 32) as usize; for i in 0..160 { if i + 7 >= self.window_x { let x = i - self.window_x + 7; let tile_x = ((x / 8) % 32) as usize; let x_in_tile = (x % 8) as usize; let tile_num = self.window_tile_map[tile_y][tile_x] as usize; let palette_num = self.bg_tiles[tile_num].raw_val[line_in_tile][x_in_tile] as usize; let pixel_val = self.bg_palette[palette_num]; self.pixel_map[line_lcd as usize][i as usize] = pixel_val; } } } } } }
use gfx_device_gl; use image::*; use piston_window; use piston_window::PistonWindow as Window; use piston_window::Texture; use piston_window::TextureSettings; use sdl2_window::Sdl2Window; use crate::corroboy::mmu::Mmu; struct Tile { raw_val: [[u8; 8]; 8], } impl Tile { pub fn new() -> Tile { Tile { raw_val: [[0; 8]; 8], } } #[allow(dead_code)] fn display_ascii(&self) { for i in 0..8 { for j in 0..8 { print!("{} ", self.raw_val[i][j]); } println!(""); } println!(""); } } pub struct Background { background_data_bot: usize, background_data_top: usize, bg_tiles: Vec<Tile>, bg_tile_map_bot: usize, bg_tile_map: [[u8; 32]; 32], window_tile_map_bot: usize, window_tile_map: [[u8; 32]; 32], window_enabled: bool, window_x: u8, window_y: u8, bg_palette: [usize; 4], pub base_tex: Texture<gfx_device_gl::Resources>, pub tex: Texture<gfx_device_gl::Resources>, base_pixel_map: [[usize; 160]; 144], pixel_map: [[usize; 160]; 144], last_pixel_map: [[usize; 160]; 144], pub enabled: u8, } impl Background { pub fn new(window: &mut Window<Sdl2Window>) -> Background { let mut factory = window.factory.clone(); let mut tiles = Vec::with_capacity(256); for _i in 0..256 { let new = Tile::new(); tiles.push(new); } Background { background_data_bot: 0, background_data_top: 0, bg_tiles: tiles, bg_tile_map_bot: 0, bg_tile_map: [[0; 32]; 32], bg_palette: [0; 4], window_tile_map_bot: 0, window_tile_map: [[0; 32]; 32], window_enabled: false, window_x: 0, window_y: 0, base_tex: Texture::empty(&mut factory).unwrap(), tex: Texture::empty(&mut factory).unwrap(), base_pixel_map: [[0; 160]; 144], pixel_map: [[0; 160]; 144], last_pixel_map: [[0; 160]; 144], enabled: 0, } } pub fn initialize(&mut self, mem: &mut Mmu) { let ff40 = mem.get_io_register(0xFF40); if (ff40 & 0b00010000) >> 4 == 0 { self.background_data_bot = 0x8800; self.background_data_top = 0x97FF; } else { self.background_data_bot = 0x8000; self.background_data_top = 0x8FFF; } if ff40 & 0b00001000 == 0 { self.bg_tile_map_bot = 0x9800; } else { self.bg_tile_map_bot = 0x9C00; } if ff40 & 0b00100000 == 0 { self.window_enabled = false; } else { self.window_enabled = true; } if ff40 & 0b01000000 == 0 { self.window_tile_map_bot = 0x9800; } else { self.window_tile_map_bot = 0x9C00; } self.window_y = mem.get_mem_u8(0xFF4A); self.window_x = mem.get_mem_u8(0xFF4B); self.build_tile_data(mem); } #[allow(dead_code)] fn print_bg_tile_map(&self) { for i in 0..32 { for j in 0..32 { print!("{} ", self.bg_tile_map[i][j]); } println!(""); } } #[allow(dead_code)] fn display_ascii(&self) { if self.bg_tiles.len() == 256 { for i in 0..32 { for k in 0..8 { for j in 0..32 { for l in 0..8 { print!( "{} ", self.bg_tiles[self.bg_tile_map[i][j] as usize].raw_val[k][l] ); } } println!(""); } } println!(""); } } pub fn generate_tex(&mut self, window: &mut Window<Sdl2Window>) { const SCREEN_SIZE_X: u32 = 160; const SCREEN_SIZE_Y: u32 = 144; let mut new_map = false; let mut x = 0; let mut y = 0; while y < SCREEN_SIZE_Y as usize && new_map == false { while x < SCREEN_SIZE_X as usize && new_map == false { if self.pixel_map[y][x] != self.last_pixel_map[y][x] { new_map = true; } x += 1; } x = 0; y += 1; } if new_map == true { let mut img: RgbaImage = ImageBuffer::new(SCREEN_SIZE_X, SCREEN_SIZE_Y); let mut base_img: RgbaImage = ImageBuffer::new(SCREEN_SIZE_X, SCREEN_SIZE_Y); let colors = [ [255, 255, 255, 255], [169, 169, 169, 255], [128, 128, 128, 255], [0, 0, 0, 255], [0, 0, 0, 0], ]; let mut x = 0; let mut y = 0; while y < SCREEN_SIZE_Y as usize { while x < SCREEN_SIZE_X as usize { let color = colors[self.pixel_map[y][x]]; self.last_pixel_map[y][x] = self.pixel_map[y][x]; img.put_pixel(x as u32, y as u32, Rgba { data: color }); let color = colors[self.base_pixel_map[y][x]]; base_img.put_pixel(x as u32, y as u32, Rgba { data: color }); x += 1; } x = 0; y += 1; } let mut tex_settings = TextureSettings::new(); tex_settings.set_mag(piston_window::Filter::Nearest); self.tex = Texture::from_image(&mut window.factory, &img, &tex_settings).unwrap(); self.base_tex = Texture::from_image(&mut window.factory, &base_img, &tex_settings).unwrap(); } } pub fn build_bg_tile_map(&mut self, mem: &mut Mmu) { for i in 0..32 { for j in 0..32 { self.bg_tile_map[i][j] = mem.get_mem_u8(self.bg_tile_map_bot + (i * 32) + j); } } } pub fn build_window_tile_map(&mut self, mem: &mut Mmu) { for i in 0..32 { for j in 0..32 { self.window_tile_map[i][j] = mem.get_mem_u8(self.window_tile_map_bot + (i * 32) + j); } } } pub fn set_bg_palette(&mut self, mem: &mut Mmu) { let ff47 = mem.get_io_register(0xFF47); self.bg_palette[0] = (ff47 & 0b00000011) as usize; self.bg_palette[1] = ((ff47 & 0b00001100) >> 2) as usize;
pub fn build_tile_data(&mut self, mem: &mut Mmu) { if self.background_data_bot == 0x8000 { for i in 0..256 { for j in 0..8 { let left = mem.get_vram(self.background_data_bot + (i * 16) + (j * 2)); let right = mem.get_vram(self.background_data_bot + (i * 16) + 1 + (j * 2)); self.bg_tiles[i].raw_val[j as usize][0] = ((right & 0b10000000) >> 6) + ((left & 0b10000000) >> 7); self.bg_tiles[i].raw_val[j as usize][1] = ((right & 0b01000000) >> 5) + ((left & 0b01000000) >> 6); self.bg_tiles[i].raw_val[j as usize][2] = ((right & 0b00100000) >> 4) + ((left & 0b00100000) >> 5); self.bg_tiles[i].raw_val[j as usize][3] = ((right & 0b00010000) >> 3) + ((left & 0b00010000) >> 4); self.bg_tiles[i].raw_val[j as usize][4] = ((right & 0b00001000) >> 2) + ((left & 0b00001000) >> 3); self.bg_tiles[i].raw_val[j as usize][5] = ((right & 0b00000100) >> 1) + ((left & 0b00000100) >> 2); self.bg_tiles[i].raw_val[j as usize][6] = (right & 0b00000010) + ((left & 0b00000010) >> 1); self.bg_tiles[i].raw_val[j as usize][7] = ((right & 0b00000001) << 1) + (left & 0b00000001); } } } else { let curr = 0x9000; for i in 0..128 { for j in 0..8 { let left = mem.get_vram(curr + (i * 16) + (j * 2)); let right = mem.get_vram(curr + (i * 16) + 1 + (j * 2)); self.bg_tiles[i].raw_val[j as usize][0] = ((right & 0b10000000) >> 6) + ((left & 0b10000000) >> 7); self.bg_tiles[i].raw_val[j as usize][1] = ((right & 0b01000000) >> 5) + ((left & 0b01000000) >> 6); self.bg_tiles[i].raw_val[j as usize][2] = ((right & 0b00100000) >> 4) + ((left & 0b00100000) >> 5); self.bg_tiles[i].raw_val[j as usize][3] = ((right & 0b00010000) >> 3) + ((left & 0b00010000) >> 4); self.bg_tiles[i].raw_val[j as usize][4] = ((right & 0b00001000) >> 2) + ((left & 0b00001000) >> 3); self.bg_tiles[i].raw_val[j as usize][5] = ((right & 0b00000100) >> 1) + ((left & 0b00000100) >> 2); self.bg_tiles[i].raw_val[j as usize][6] = (right & 0b00000010) + ((left & 0b00000010) >> 1); self.bg_tiles[i].raw_val[j as usize][7] = ((right & 0b00000001) << 1) + (left & 0b00000001); } } let curr = 0x8800; for i in 128..256 { for j in 0..8 { let left = mem.get_vram(curr + ((i - 128) * 16) + (j * 2)); let right = mem.get_vram(curr + ((i - 128) * 16) + 1 + (j * 2)); self.bg_tiles[i].raw_val[j as usize][0] = ((right & 0b10000000) >> 6) + ((left & 0b10000000) >> 7); self.bg_tiles[i].raw_val[j as usize][1] = ((right & 0b01000000) >> 5) + ((left & 0b01000000) >> 6); self.bg_tiles[i].raw_val[j as usize][2] = ((right & 0b00100000) >> 4) + ((left & 0b00100000) >> 5); self.bg_tiles[i].raw_val[j as usize][3] = ((right & 0b00010000) >> 3) + ((left & 0b00010000) >> 4); self.bg_tiles[i].raw_val[j as usize][4] = ((right & 0b00001000) >> 2) + ((left & 0b00001000) >> 3); self.bg_tiles[i].raw_val[j as usize][5] = ((right & 0b00000100) >> 1) + ((left & 0b00000100) >> 2); self.bg_tiles[i].raw_val[j as usize][6] = (right & 0b00000010) + ((left & 0b00000010) >> 1); self.bg_tiles[i].raw_val[j as usize][7] = ((right & 0b00000001) << 1) + (left & 0b00000001); } } } } pub fn update_background_line(&mut self, line_lcd: u8, mem: &mut Mmu) { self.enabled = mem.get_io_register(0xFF40) & 0x01; let scy = mem.get_io_register(0xFF42); let scx = mem.get_io_register(0xFF43); if self.enabled == 1 { let line = line_lcd + scy; let tile_y = ((line / 8) % 32) as usize; let line_in_tile = (line % 8) as usize; for i in 0..160 { let x = i + scx; let tile_x = ((x / 8) % 32) as usize; let x_in_tile = (x % 8) as usize; let tile_num = self.bg_tile_map[tile_y][tile_x] as usize; let palette_num = self.bg_tiles[tile_num].raw_val[line_in_tile][x_in_tile] as usize; if palette_num != 0 { let pixel_val = self.bg_palette[palette_num]; self.pixel_map[line_lcd as usize][i as usize] = pixel_val; } else { self.pixel_map[line_lcd as usize][i as usize] = 4; } let base_pixel_val = self.bg_palette[0]; self.base_pixel_map[line_lcd as usize][i as usize] = base_pixel_val; } } else { for i in 0..160 { let pixel_val = self.bg_palette[0]; self.pixel_map[line_lcd as usize][i as usize] = 4; self.base_pixel_map[line_lcd as usize][i as usize] = pixel_val; } } } pub fn update_window_line(&mut self, line_lcd: u8) { if self.window_enabled == true { if self.window_y <= line_lcd { let y = line_lcd - self.window_y; let line_in_tile = (y % 8) as usize; let tile_y = ((y / 8) % 32) as usize; for i in 0..160 { if i + 7 >= self.window_x { let x = i - self.window_x + 7; let tile_x = ((x / 8) % 32) as usize; let x_in_tile = (x % 8) as usize; let tile_num = self.window_tile_map[tile_y][tile_x] as usize; let palette_num = self.bg_tiles[tile_num].raw_val[line_in_tile][x_in_tile] as usize; let pixel_val = self.bg_palette[palette_num]; self.pixel_map[line_lcd as usize][i as usize] = pixel_val; } } } } } }
self.bg_palette[2] = ((ff47 & 0b00110000) >> 4) as usize; self.bg_palette[3] = ((ff47 & 0b11000000) >> 6) as usize; }
function_block-function_prefix_line
[ { "content": "pub fn ret_z(mem: &mut Mmu, flags: u8, pc: &mut u16, sp: &mut u16) -> bool {\n\n if flags & 0b10000000 != 0 {\n\n *pc = mem.pop_u16(sp);\n\n return true;\n\n }\n\n return false;\n\n}\n\n\n", "file_path": "src/corroboy/cpu/ops/ret.rs", "rank": 0, "score": 298418.8...
Rust
crates/holochain/tests/gossip_test.rs
zdb999/holochain
45dd3f827caea18f41f77486ca2c37149a18b4ca
use ::fixt::prelude::*; use hdk3::prelude::*; use holochain::conductor::{ api::{AppInterfaceApi, AppRequest, AppResponse, RealAppInterfaceApi}, dna_store::MockDnaStore, }; use holochain::core::ribosome::ZomeCallInvocation; use holochain::{ fixt::*, test_utils::{install_app, setup_app}, }; use holochain_types::app::InstalledCell; use holochain_types::cell::CellId; use holochain_types::dna::DnaDef; use holochain_types::dna::DnaFile; use holochain_types::test_utils::fake_agent_pubkey_1; use holochain_types::{observability, test_utils::fake_agent_pubkey_2}; use holochain_wasm_test_utils::TestWasm; use holochain_zome_types::ExternInput; use matches::assert_matches; use test_wasm_common::{AnchorInput, TestString}; #[tokio::test(threaded_scheduler)] async fn gossip_test() { observability::test_run().ok(); const NUM: usize = 1; let dna_file = DnaFile::new( DnaDef { name: "need_for_speed_test".to_string(), uuid: "ba1d046d-ce29-4778-914b-47e6010d2faf".to_string(), properties: SerializedBytes::try_from(()).unwrap(), zomes: vec![TestWasm::Anchor.into()].into(), }, vec![TestWasm::Anchor.into()], ) .await .unwrap(); let alice_agent_id = fake_agent_pubkey_1(); let alice_cell_id = CellId::new(dna_file.dna_hash().to_owned(), alice_agent_id.clone()); let alice_installed_cell = InstalledCell::new(alice_cell_id.clone(), "alice_handle".into()); let bob_agent_id = fake_agent_pubkey_2(); let bob_cell_id = CellId::new(dna_file.dna_hash().to_owned(), bob_agent_id.clone()); let bob_installed_cell = InstalledCell::new(bob_cell_id.clone(), "bob_handle".into()); let mut dna_store = MockDnaStore::new(); dna_store.expect_get().return_const(Some(dna_file.clone())); dna_store .expect_add_dnas::<Vec<_>>() .times(2) .return_const(()); dna_store .expect_add_entry_defs::<Vec<_>>() .times(2) .return_const(()); dna_store.expect_get_entry_def().return_const(None); let (_tmpdir, app_api, handle) = setup_app( vec![("alice app", vec![(alice_installed_cell, None)])], dna_store, ) .await; let anchor_invocation = |anchor: &str, cell_id, i: usize| { let anchor = AnchorInput(anchor.into(), i.to_string()); new_invocation(cell_id, "anchor", anchor) }; for i in 0..NUM { let invocation = anchor_invocation("alice", alice_cell_id.clone(), i).unwrap(); let response = call(&app_api, invocation).await; assert_matches!(response, AppResponse::ZomeCallInvocation(_)); } tokio::time::delay_for(std::time::Duration::from_secs(1)).await; let cell_data = vec![(bob_installed_cell, None)]; install_app("bob_app", cell_data, handle.clone()).await; tokio::time::delay_for(std::time::Duration::from_secs(1)).await; let invocation = new_invocation( bob_cell_id.clone(), "list_anchor_addresses", TestString("alice".into()), ) .unwrap(); let response = call(&app_api, invocation).await; match response { AppResponse::ZomeCallInvocation(r) => { let response: SerializedBytes = r.into_inner(); let hashes: EntryHashes = response.try_into().unwrap(); assert_eq!(hashes.0.len(), NUM); } _ => unreachable!(), } let shutdown = handle.take_shutdown_handle().await.unwrap(); handle.shutdown().await; shutdown.await.unwrap(); } async fn call(app_api: &RealAppInterfaceApi, invocation: ZomeCallInvocation) -> AppResponse { let request = AppRequest::ZomeCallInvocation(Box::new(invocation)); app_api.handle_app_request(request).await } fn new_invocation<P>( cell_id: CellId, func: &str, payload: P, ) -> Result<ZomeCallInvocation, SerializedBytesError> where P: TryInto<SerializedBytes, Error = SerializedBytesError>, { Ok(ZomeCallInvocation { cell_id: cell_id.clone(), zome_name: TestWasm::Anchor.into(), cap: Some(CapSecretFixturator::new(Unpredictable).next().unwrap()), fn_name: func.into(), payload: ExternInput::new(payload.try_into()?), provenance: cell_id.agent_pubkey().clone(), }) }
use ::fixt::prelude::*; use hdk3::prelude::*; use holochain::conductor::{ api::{AppInterfaceApi, AppRequest, AppResponse, RealAppInterfaceApi}, dna_store::MockDnaStore, }; use holochain::core::ribosome::ZomeCallInvocation; use holochain::{ fixt::*, test_utils::{install_app, setup_app}, }; use holochain_types::app::InstalledCell; use holochain_types::cell::CellId; use holochain_types::dna::DnaDef; use holochain_types::dna::DnaFile; use holochain_types::test_utils::fake_agent_pubkey_1; use holochain_types::{observability, test_utils::fake_agent_pubkey_2}; use holochain_wasm_test_utils::TestWasm; use holochain_zome_types::ExternInput; use matches::assert_matches; use test_wasm_common::{AnchorInput, TestString}; #[tokio::test(threaded_scheduler)] async fn gossip_test() { observability::test_run().ok(); const NUM: usize = 1; let dna_file = DnaFile::new( DnaDef { name: "need_for_speed_test".to_string(), uuid: "ba1d046d-ce29-4778-914b-47e6010d2faf".to_string(), properties: SerializedBytes::try_from(()).unwrap(), zomes: vec![TestWasm::Anchor.into()].into(), }, vec![TestWasm::Anchor.into()], ) .await .unwrap(); let alice_agent_id = fake_agent_pubkey_1(); let alice_cell_id = CellId::new(dna_file.dna_hash().to_owned(), alice_agent_id.clone()); let alice_installed_cell = InstalledCell::new(alice_cell_id.clone(), "alice_handle".into()); let bob_agent_id = fake_agent_pubkey_2(); let bob_cell_id = CellId::new(dna_file.dna_hash().to_owned(), bob_agent_id.clone()); let bob_installed_cell = InstalledCell::new(bob_cell_id.clone(), "bob_handle".into()); let mut dna_store = MockDnaStore::new(); dna_store.expect_get().return_const(Some(dna_file.clone())); dna_store .expect_add_dnas::<Vec<_>>() .times(2) .return_const(()); dna_store .expect_add_entry_defs::<Vec<_>>() .times(2) .return_const(()); dna_store.expect_get_entry_def().return_const(None); let (_tmpdir, app_api, handle) = setup_app( vec![("alice app", vec![(alice_installed_cell, None)])], dna_store, ) .await; let anchor_invocation = |anchor: &str, cell_id, i: usize| { let anchor = AnchorInput(anchor.into(), i.to_string()); new_invocation(cell_id, "anchor", anchor) }; for i in 0..NUM { let invocation = anchor_invocation("alice", alice_cell_id.clone(), i).unwrap(); let response = call(&app_api, invocation).await; assert_matches!(response, AppResponse::ZomeCallInvocation(_)); } tokio::time::delay_for(std::time::Duration::from_secs(1)).await; let cell_data = vec![(bob_installed_cell, None)]; install_app("bob_app", cell_data, handle.clone()).await; tokio::time::delay_for(std::time::Duration::from_secs(1)).await; let invocation = new_invocation( bob_cell_id.clone(), "list_anchor_addresses", TestString("alice".into()), ) .unwrap(); let response = call(&app_api, invocation).await; match response { AppResponse::ZomeCallInvocation(r) => { let response: SerializedBytes = r.into_inner(); let hashes: EntryHashes = response.try_into().unwrap(); assert_eq!(hashes.0.len(), NUM); } _ => unreachable!(), } let shutdown = handle.take_shutdown_handle().await.unwrap(); handle.shutdown().await; shutdown.await.unwrap(); } async fn call(app_api: &RealAppInterfaceApi, invocation: ZomeCallInvocation) -> AppResponse { let request = AppRequest::ZomeCallInvocation(Box::new(invocation)); app_api.handle_app_request(request).await } fn new_invocation<P>( cell_id: CellId, func: &str, payload: P, ) -> Result<ZomeCallInvocation, SerializedBytesError> where P: TryInto<Serialized
Bytes, Error = SerializedBytesError>, { Ok(ZomeCallInvocation { cell_id: cell_id.clone(), zome_name: TestWasm::Anchor.into(), cap: Some(CapSecretFixturator::new(Unpredictable).next().unwrap()), fn_name: func.into(), payload: ExternInput::new(payload.try_into()?), provenance: cell_id.agent_pubkey().clone(), }) }
function_block-function_prefixed
[ { "content": "/// Helper to create a zome invocation for tests\n\npub fn new_invocation<P, Z: Into<ZomeName>>(\n\n cell_id: &CellId,\n\n func: &str,\n\n payload: P,\n\n zome_name: Z,\n\n) -> Result<ZomeCallInvocation, SerializedBytesError>\n\nwhere\n\n P: TryInto<SerializedBytes, Error = Serializ...
Rust
query/src/servers/mysql/mysql_interactive_worker.rs
CNLHC/databend
fad0df2843c148c9c74793dadd38a9e5db274a36
use std::marker::PhantomData; use std::time::Instant; use common_datablocks::DataBlock; use common_exception::ErrorCode; use common_exception::Result; use common_runtime::tokio; use metrics::histogram; use msql_srv::ErrorKind; use msql_srv::InitWriter; use msql_srv::MysqlShim; use msql_srv::ParamParser; use msql_srv::QueryResultWriter; use msql_srv::StatementMetaWriter; use rand::RngCore; use tokio_stream::StreamExt; use crate::interpreters::InterpreterFactory; use crate::servers::mysql::writers::DFInitResultWriter; use crate::servers::mysql::writers::DFQueryResultWriter; use crate::servers::server::mock::get_mock_user; use crate::sessions::DatabendQueryContextRef; use crate::sessions::SessionRef; use crate::sql::DfHint; use crate::sql::PlanParser; struct InteractiveWorkerBase<W: std::io::Write>(PhantomData<W>); pub struct InteractiveWorker<W: std::io::Write> { base: InteractiveWorkerBase<W>, session: SessionRef, version: String, salt: [u8; 20], } impl<W: std::io::Write> MysqlShim<W> for InteractiveWorker<W> { type Error = ErrorCode; fn on_prepare(&mut self, query: &str, writer: StatementMetaWriter<W>) -> Result<()> { if self.session.is_aborting() { writer.error( ErrorKind::ER_ABORTING_CONNECTION, "Aborting this connection. because we are try aborting server.".as_bytes(), )?; return Err(ErrorCode::AbortedSession( "Aborting this connection. because we are try aborting server.", )); } self.base .do_prepare(query, writer, self.session.create_context()) } fn on_execute( &mut self, id: u32, param: ParamParser, writer: QueryResultWriter<W>, ) -> Result<()> { if self.session.is_aborting() { writer.error( ErrorKind::ER_ABORTING_CONNECTION, "Aborting this connection. because we are try aborting server.".as_bytes(), )?; return Err(ErrorCode::AbortedSession( "Aborting this connection. because we are try aborting server.", )); } self.base .do_execute(id, param, writer, self.session.create_context()) } fn on_close(&mut self, id: u32) { self.base.do_close(id, self.session.create_context()); } fn on_query(&mut self, query: &str, writer: QueryResultWriter<W>) -> Result<()> { if self.session.is_aborting() { writer.error( ErrorKind::ER_ABORTING_CONNECTION, "Aborting this connection. because we are try aborting server.".as_bytes(), )?; return Err(ErrorCode::AbortedSession( "Aborting this connection. because we are try aborting server.", )); } let start = Instant::now(); let context = self.session.create_context(); context.attach_query_str(query); if let Err(cause) = DFQueryResultWriter::create(writer).write(self.base.do_query(query, context)) { let new_error = cause.add_message(query); return Err(new_error); }; histogram!( super::mysql_metrics::METRIC_MYSQL_PROCESSOR_REQUEST_DURATION, start.elapsed() ); Ok(()) } fn on_init(&mut self, database_name: &str, writer: InitWriter<W>) -> Result<()> { if self.session.is_aborting() { writer.error( ErrorKind::ER_ABORTING_CONNECTION, "Aborting this connection. because we are try aborting server.".as_bytes(), )?; return Err(ErrorCode::AbortedSession( "Aborting this connection. because we are try aborting server.", )); } let context = self.session.create_context(); DFInitResultWriter::create(writer).write(self.base.do_init(database_name, context)) } fn version(&self) -> &str { self.version.as_str() } fn connect_id(&self) -> u32 { u32::from_le_bytes([0x08, 0x00, 0x00, 0x00]) } fn default_auth_plugin(&self) -> &str { "mysql_native_password" } fn auth_plugin_for_username(&self, _user: &[u8]) -> &str { "mysql_native_password" } fn salt(&self) -> [u8; 20] { self.salt } fn authenticate( &self, auth_plugin: &str, username: &[u8], salt: &[u8], auth_data: &[u8], ) -> bool { let user = String::from_utf8_lossy(username); if let Ok(user) = get_mock_user(&user) { let encode_password = match auth_plugin { "mysql_native_password" => { if auth_data.is_empty() { vec![] } else { let mut m = sha1::Sha1::new(); m.update(salt); m.update(&user.password); let result = m.digest().bytes(); if auth_data.len() != result.len() { return false; } let mut s = Vec::with_capacity(result.len()); for i in 0..result.len() { s.push(auth_data[i] ^ result[i]); } s } } _ => auth_data.to_vec(), }; return user.authenticate_user(encode_password); } false } } impl<W: std::io::Write> InteractiveWorkerBase<W> { fn do_prepare( &mut self, _: &str, writer: StatementMetaWriter<'_, W>, _: DatabendQueryContextRef, ) -> Result<()> { writer.error( ErrorKind::ER_UNKNOWN_ERROR, "Prepare is not support in Databend.".as_bytes(), )?; Ok(()) } fn do_execute( &mut self, _: u32, _: ParamParser<'_>, writer: QueryResultWriter<'_, W>, _: DatabendQueryContextRef, ) -> Result<()> { writer.error( ErrorKind::ER_UNKNOWN_ERROR, "Execute is not support in Databend.".as_bytes(), )?; Ok(()) } fn do_close(&mut self, _: u32, _: DatabendQueryContextRef) {} fn do_query( &mut self, query: &str, context: DatabendQueryContextRef, ) -> Result<Vec<DataBlock>> { log::debug!("{}", query); let runtime = Self::build_runtime()?; let (plan, hints) = PlanParser::create(context.clone()).build_with_hint_from_sql(query); let fetch_query_blocks = || -> Result<Vec<DataBlock>> { let start = Instant::now(); let interpreter = InterpreterFactory::get(context.clone(), plan?)?; let name = interpreter.name().to_string(); let data_stream = runtime.block_on(interpreter.execute())?; histogram!( super::mysql_metrics::METRIC_INTERPRETER_USEDTIME, start.elapsed(), "interpreter" => name ); runtime.block_on(data_stream.collect::<Result<Vec<DataBlock>>>()) }; let blocks = fetch_query_blocks(); match blocks { Ok(v) => Ok(v), Err(e) => { let hint = hints.iter().find(|v| v.error_code.is_some()); if let Some(DfHint { error_code: Some(code), .. }) = hint { if *code == e.code() { Ok(vec![DataBlock::empty()]) } else { let actual_code = e.code(); Err(e.add_message(format!( "Expected server error code: {} but got: {}.", code, actual_code ))) } } else { Err(e) } } } } fn do_init(&mut self, database_name: &str, context: DatabendQueryContextRef) -> Result<()> { self.do_query(&format!("USE {};", database_name), context)?; Ok(()) } fn build_runtime() -> Result<tokio::runtime::Runtime> { tokio::runtime::Builder::new_multi_thread() .enable_all() .build() .map_err(|tokio_error| ErrorCode::TokioError(format!("{}", tokio_error))) } } impl<W: std::io::Write> InteractiveWorker<W> { pub fn create(session: SessionRef) -> InteractiveWorker<W> { let mut bs = vec![0u8; 20]; let mut rng = rand::thread_rng(); rng.fill_bytes(bs.as_mut()); let mut scramble: [u8; 20] = [0; 20]; for i in 0..20 { scramble[i] = bs[i]; if scramble[i] == b'\0' || scramble[i] == b'$' { scramble[i] += 1; } } let context = session.create_context(); InteractiveWorker::<W> { session, base: InteractiveWorkerBase::<W>(PhantomData::<W>), salt: scramble, version: context.get_fuse_version(), } } }
use std::marker::PhantomData; use std::time::Instant; use common_datablocks::DataBlock; use common_exception::ErrorCode; use common_exception::Result; use common_runtime::tokio; use metrics::histogram; use msql_srv::ErrorKind; use msql_srv::InitWriter; use msql_srv::MysqlShim; use msql_srv::ParamParser; use msql_srv::QueryResultWriter; use msql_srv::StatementMetaWriter; use rand::RngCore; use tokio_stream::StreamExt; use crate::interpreters::InterpreterFactory; use crate::servers::mysql::writers::DFInitResultWriter; use crate::servers::mysql::writers::DFQueryResultWriter; use crate::servers::server::mock::get_mock_user; use crate::sessions::DatabendQueryContextRef; use crate::sessions::SessionRef; use crate::sql::DfHint; use crate::sql::PlanParser; struct InteractiveWorkerBase<W: std::io::Write>(PhantomData<W>); pub struct InteractiveWorker<W: std::io::Write> { base: InteractiveWorkerBase<W>, session: SessionRef, version: String, salt: [u8; 20], } impl<W: std::io::Write> MysqlShim<W> for InteractiveWorker<W> { type Error = ErrorCode; fn on_prepare(&mut self, query: &str, writer: StatementMetaWriter<W>) -> Result<()> { if self.session.is_aborting() { writer.error( ErrorKind::ER_ABORTING_CONNECTION, "Aborting this connection. because we are try aborting server.".as_bytes(), )?; return Err(ErrorCode::AbortedSession( "Aborting this connection. because we are try aborting server.", )); } self.base .do_prepare(query, writer, self.session.create_context()) } fn on_execute( &mut self, id: u32, param: ParamParser, writer: QueryResultWriter<W>, ) -> Result<()> { if self.session.is_aborting() { writer.error( ErrorKind::ER_ABORTING_CONNECTION, "Aborting this connection. because we are try aborting server.".as_bytes(), )?; return
; } self.base .do_execute(id, param, writer, self.session.create_context()) } fn on_close(&mut self, id: u32) { self.base.do_close(id, self.session.create_context()); } fn on_query(&mut self, query: &str, writer: QueryResultWriter<W>) -> Result<()> { if self.session.is_aborting() { writer.error( ErrorKind::ER_ABORTING_CONNECTION, "Aborting this connection. because we are try aborting server.".as_bytes(), )?; return Err(ErrorCode::AbortedSession( "Aborting this connection. because we are try aborting server.", )); } let start = Instant::now(); let context = self.session.create_context(); context.attach_query_str(query); if let Err(cause) = DFQueryResultWriter::create(writer).write(self.base.do_query(query, context)) { let new_error = cause.add_message(query); return Err(new_error); }; histogram!( super::mysql_metrics::METRIC_MYSQL_PROCESSOR_REQUEST_DURATION, start.elapsed() ); Ok(()) } fn on_init(&mut self, database_name: &str, writer: InitWriter<W>) -> Result<()> { if self.session.is_aborting() { writer.error( ErrorKind::ER_ABORTING_CONNECTION, "Aborting this connection. because we are try aborting server.".as_bytes(), )?; return Err(ErrorCode::AbortedSession( "Aborting this connection. because we are try aborting server.", )); } let context = self.session.create_context(); DFInitResultWriter::create(writer).write(self.base.do_init(database_name, context)) } fn version(&self) -> &str { self.version.as_str() } fn connect_id(&self) -> u32 { u32::from_le_bytes([0x08, 0x00, 0x00, 0x00]) } fn default_auth_plugin(&self) -> &str { "mysql_native_password" } fn auth_plugin_for_username(&self, _user: &[u8]) -> &str { "mysql_native_password" } fn salt(&self) -> [u8; 20] { self.salt } fn authenticate( &self, auth_plugin: &str, username: &[u8], salt: &[u8], auth_data: &[u8], ) -> bool { let user = String::from_utf8_lossy(username); if let Ok(user) = get_mock_user(&user) { let encode_password = match auth_plugin { "mysql_native_password" => { if auth_data.is_empty() { vec![] } else { let mut m = sha1::Sha1::new(); m.update(salt); m.update(&user.password); let result = m.digest().bytes(); if auth_data.len() != result.len() { return false; } let mut s = Vec::with_capacity(result.len()); for i in 0..result.len() { s.push(auth_data[i] ^ result[i]); } s } } _ => auth_data.to_vec(), }; return user.authenticate_user(encode_password); } false } } impl<W: std::io::Write> InteractiveWorkerBase<W> { fn do_prepare( &mut self, _: &str, writer: StatementMetaWriter<'_, W>, _: DatabendQueryContextRef, ) -> Result<()> { writer.error( ErrorKind::ER_UNKNOWN_ERROR, "Prepare is not support in Databend.".as_bytes(), )?; Ok(()) } fn do_execute( &mut self, _: u32, _: ParamParser<'_>, writer: QueryResultWriter<'_, W>, _: DatabendQueryContextRef, ) -> Result<()> { writer.error( ErrorKind::ER_UNKNOWN_ERROR, "Execute is not support in Databend.".as_bytes(), )?; Ok(()) } fn do_close(&mut self, _: u32, _: DatabendQueryContextRef) {} fn do_query( &mut self, query: &str, context: DatabendQueryContextRef, ) -> Result<Vec<DataBlock>> { log::debug!("{}", query); let runtime = Self::build_runtime()?; let (plan, hints) = PlanParser::create(context.clone()).build_with_hint_from_sql(query); let fetch_query_blocks = || -> Result<Vec<DataBlock>> { let start = Instant::now(); let interpreter = InterpreterFactory::get(context.clone(), plan?)?; let name = interpreter.name().to_string(); let data_stream = runtime.block_on(interpreter.execute())?; histogram!( super::mysql_metrics::METRIC_INTERPRETER_USEDTIME, start.elapsed(), "interpreter" => name ); runtime.block_on(data_stream.collect::<Result<Vec<DataBlock>>>()) }; let blocks = fetch_query_blocks(); match blocks { Ok(v) => Ok(v), Err(e) => { let hint = hints.iter().find(|v| v.error_code.is_some()); if let Some(DfHint { error_code: Some(code), .. }) = hint { if *code == e.code() { Ok(vec![DataBlock::empty()]) } else { let actual_code = e.code(); Err(e.add_message(format!( "Expected server error code: {} but got: {}.", code, actual_code ))) } } else { Err(e) } } } } fn do_init(&mut self, database_name: &str, context: DatabendQueryContextRef) -> Result<()> { self.do_query(&format!("USE {};", database_name), context)?; Ok(()) } fn build_runtime() -> Result<tokio::runtime::Runtime> { tokio::runtime::Builder::new_multi_thread() .enable_all() .build() .map_err(|tokio_error| ErrorCode::TokioError(format!("{}", tokio_error))) } } impl<W: std::io::Write> InteractiveWorker<W> { pub fn create(session: SessionRef) -> InteractiveWorker<W> { let mut bs = vec![0u8; 20]; let mut rng = rand::thread_rng(); rng.fill_bytes(bs.as_mut()); let mut scramble: [u8; 20] = [0; 20]; for i in 0..20 { scramble[i] = bs[i]; if scramble[i] == b'\0' || scramble[i] == b'$' { scramble[i] += 1; } } let context = session.create_context(); InteractiveWorker::<W> { session, base: InteractiveWorkerBase::<W>(PhantomData::<W>), salt: scramble, version: context.get_fuse_version(), } } }
Err(ErrorCode::AbortedSession( "Aborting this connection. because we are try aborting server.", ))
call_expression
[ { "content": "fn query<T: FromRow>(connection: &mut Conn, query: &str) -> Result<Vec<T>> {\n\n connection\n\n .query::<T, &str>(query)\n\n .map_err_to_code(ErrorCode::UnknownException, || \"Query error\")\n\n}\n\n\n", "file_path": "query/src/servers/mysql/mysql_handler_test.rs", "rank":...
Rust
beacon_node/network/src/sync/range_sync/chain_collection.rs
protolambda/lighthouse
3acb3cc640c7a1fe4aab94ce35be1c300a4146d8
use super::chain::{ChainSyncingState, ProcessingResult, SyncingChain}; use crate::message_processor::PeerSyncInfo; use crate::sync::network_context::SyncNetworkContext; use beacon_chain::{BeaconChain, BeaconChainTypes}; use eth2_libp2p::PeerId; use slog::{debug, warn}; use std::sync::Weak; use types::EthSpec; use types::{Hash256, Slot}; pub enum SyncState { Finalized, Head, Idle, } pub struct ChainCollection<T: BeaconChainTypes> { finalized_chains: Vec<SyncingChain<T>>, head_chains: Vec<SyncingChain<T>>, sync_state: SyncState, } impl<T: BeaconChainTypes> ChainCollection<T> { pub fn new() -> Self { ChainCollection { sync_state: SyncState::Idle, finalized_chains: Vec::new(), head_chains: Vec::new(), } } pub fn sync_state(&self) -> &SyncState { &self.sync_state } pub fn fully_synced_peer_found(&mut self) { if let SyncState::Head = self.sync_state { if self.head_chains.is_empty() { self.sync_state = SyncState::Idle; } } } pub fn set_head_sync(&mut self) { if let SyncState::Idle = self.sync_state { self.sync_state = SyncState::Head; } } fn finalized_syncing_index(&self) -> Option<usize> { self.finalized_chains .iter() .enumerate() .find_map(|(index, chain)| { if chain.state == ChainSyncingState::Syncing { Some(index) } else { None } }) } pub fn purge_finalized(&mut self, local_finalized_slot: Slot) { self.finalized_chains .retain(|chain| chain.target_head_slot > local_finalized_slot); } pub fn purge_head(&mut self, head_slot: Slot) { self.head_chains .retain(|chain| chain.target_head_slot > head_slot); } fn get_chain<'a>( chain: &'a mut [SyncingChain<T>], target_head_root: Hash256, target_head_slot: Slot, ) -> Option<&'a mut SyncingChain<T>> { chain.iter_mut().find(|iter_chain| { iter_chain.target_head_root == target_head_root && iter_chain.target_head_slot == target_head_slot }) } pub fn get_finalized_mut( &mut self, target_head_root: Hash256, target_head_slot: Slot, ) -> Option<&mut SyncingChain<T>> { ChainCollection::get_chain( self.finalized_chains.as_mut(), target_head_root, target_head_slot, ) } pub fn get_head_mut( &mut self, target_head_root: Hash256, target_head_slot: Slot, ) -> Option<&mut SyncingChain<T>> { ChainCollection::get_chain( self.head_chains.as_mut(), target_head_root, target_head_slot, ) } pub fn update_finalized( &mut self, beacon_chain: Weak<BeaconChain<T>>, network: &mut SyncNetworkContext, log: &slog::Logger, ) { let local_info = match beacon_chain.upgrade() { Some(chain) => PeerSyncInfo::from(&chain), None => { warn!(log, "Beacon chain dropped. Chains not updated"); return; } }; let local_slot = local_info .finalized_epoch .start_slot(T::EthSpec::slots_per_epoch()); self.purge_finalized(local_slot); self.finalized_chains .retain(|chain| !chain.peer_pool.is_empty()); self.purge_head(local_info.head_slot); self.finalized_chains .retain(|chain| !chain.peer_pool.is_empty()); if let Some(index) = self.finalized_syncing_index() { let syncing_chain_peer_count = self.finalized_chains[index].peer_pool.len(); if let Some((new_index, chain)) = self.finalized_chains .iter_mut() .enumerate() .find(|(iter_index, chain)| { *iter_index != index && chain.peer_pool.len() > syncing_chain_peer_count }) { debug!(log, "Switching finalized chains to sync"; "new_target_root" => format!("{}", chain.target_head_root), "new_end_slot" => chain.target_head_slot, "new_start_slot"=> chain.start_slot); self.finalized_chains[index].stop_syncing(); self.finalized_chains[new_index].start_syncing(network, local_slot, log); self.sync_state = SyncState::Finalized; } } else if let Some(chain) = self .finalized_chains .iter_mut() .max_by_key(|chain| chain.peer_pool.len()) { debug!(log, "New finalized chain started syncing"; "new_target_root" => format!("{}", chain.target_head_root), "new_end_slot" => chain.target_head_slot, "new_start_slot"=> chain.start_slot); chain.start_syncing(network, local_slot, log); self.sync_state = SyncState::Finalized; } else { if self.head_chains.is_empty() { self.sync_state = SyncState::Idle; } else { self.sync_state = SyncState::Head; } } } pub fn new_finalized_chain( &mut self, local_finalized_slot: Slot, target_head: Hash256, target_slot: Slot, peer_id: PeerId, ) { self.finalized_chains.push(SyncingChain::new( local_finalized_slot, target_slot, target_head, peer_id, )); } pub fn new_head_chain( &mut self, network: &mut SyncNetworkContext, remote_finalized_slot: Slot, target_head: Hash256, target_slot: Slot, peer_id: PeerId, log: &slog::Logger, ) { self.head_chains.iter_mut().for_each(|chain| { chain.peer_pool.remove(&peer_id); }); self.head_chains.retain(|chain| !chain.peer_pool.is_empty()); let mut new_head_chain = SyncingChain::new(remote_finalized_slot, target_slot, target_head, peer_id); new_head_chain.start_syncing(network, remote_finalized_slot, log); self.head_chains.push(new_head_chain); } pub fn is_finalizing_sync(&self) -> bool { !self.finalized_chains.is_empty() } fn request_function<'a, F, I>(chain: I, mut func: F) -> Option<(usize, ProcessingResult)> where I: Iterator<Item = &'a mut SyncingChain<T>>, F: FnMut(&'a mut SyncingChain<T>) -> Option<ProcessingResult>, { chain .enumerate() .find_map(|(index, chain)| Some((index, func(chain)?))) } pub fn finalized_request<F>(&mut self, func: F) -> Option<(usize, ProcessingResult)> where F: FnMut(&mut SyncingChain<T>) -> Option<ProcessingResult>, { ChainCollection::request_function(self.finalized_chains.iter_mut(), func) } pub fn head_request<F>(&mut self, func: F) -> Option<(usize, ProcessingResult)> where F: FnMut(&mut SyncingChain<T>) -> Option<ProcessingResult>, { ChainCollection::request_function(self.head_chains.iter_mut(), func) } #[allow(dead_code)] pub fn head_finalized_request<F>(&mut self, func: F) -> Option<(usize, ProcessingResult)> where F: FnMut(&mut SyncingChain<T>) -> Option<ProcessingResult>, { ChainCollection::request_function( self.finalized_chains .iter_mut() .chain(self.head_chains.iter_mut()), func, ) } pub fn remove_finalized_chain(&mut self, index: usize) -> SyncingChain<T> { self.finalized_chains.swap_remove(index) } pub fn remove_head_chain(&mut self, index: usize) -> SyncingChain<T> { self.head_chains.swap_remove(index) } pub fn remove_chain(&mut self, index: usize) -> SyncingChain<T> { if index >= self.finalized_chains.len() { let index = index - self.finalized_chains.len(); self.head_chains.swap_remove(index) } else { self.finalized_chains.swap_remove(index) } } }
use super::chain::{ChainSyncingState, ProcessingResult, SyncingChain}; use crate::message_processor::PeerSyncInfo; use crate::sync::network_context::SyncNetworkContext; use beacon_chain::{BeaconChain, BeaconChainTypes}; use eth2_libp2p::PeerId; use slog::{debug, warn}; use std::sync::Weak; use types::EthSpec; use types::{Hash256, Slot}; pub enum SyncState { Finalized, Head, Idle, } pub struct ChainCollection<T: BeaconChainTypes> { finalized_chains: Vec<SyncingChain<T>>, head_chains: Vec<SyncingChain<T>>, sync_state: SyncState, } impl<T: BeaconChainTypes> ChainCollection<T> { pub fn new() -> Self { ChainCollection { sync_state: SyncState::Idle, finalized_chains: Vec::new(), head_chains: Vec::new(), } } pub fn sync_state(&self) -> &SyncState { &self.sync_state } pub fn fully_synced_peer_found(&mut self) { if let SyncState::Head = self.sync_state { if self.head_chains.is_empty() { self.sync_state = SyncState::Idle; } } } pub fn set_head_sync(&mut self) { if let SyncState::Idle = self.sync_state { self.sync_state = SyncState::Head; } } fn finalized_syncing_index(&self) -> Option<usize> { self.finalized_chains .iter() .enumerate() .find_map(|(index, chain)| { if chain.state == ChainSyncingState::Syncing { Some(index) } else { None } }) } pub fn purge_finalized(&mut self, local_finalized_slot: Slot) { self.finalized_chains .retain(|chain| chain.target_head_slot > local_finalized_slot); } pub fn purge_head(&mut self, head_slot: Slot) { self.head_chains .retain(|chain| chain.target_head_slot > head_slot); } fn get_chain<'a>( chain: &'a mut [SyncingChain<T>], target_head_root: Hash256, target_head_slot: Slot, ) -> Option<&'a mut SyncingChain<T>> { chain.iter_mut().find(|iter_chain| { iter_chain.target_head_root == target_head_root && iter_chain.target_head_slot == target_head_slot }) } pub fn get_finalized_mut( &mut self, target_head_root: Hash256, target_head_slot: Slot, ) -> Option<&mut SyncingChain<T>> { ChainCollection::get_chain( self.finalized_chains.as_mut(), target_head_root, target_head_slot, ) }
pub fn update_finalized( &mut self, beacon_chain: Weak<BeaconChain<T>>, network: &mut SyncNetworkContext, log: &slog::Logger, ) { let local_info = match beacon_chain.upgrade() { Some(chain) => PeerSyncInfo::from(&chain), None => { warn!(log, "Beacon chain dropped. Chains not updated"); return; } }; let local_slot = local_info .finalized_epoch .start_slot(T::EthSpec::slots_per_epoch()); self.purge_finalized(local_slot); self.finalized_chains .retain(|chain| !chain.peer_pool.is_empty()); self.purge_head(local_info.head_slot); self.finalized_chains .retain(|chain| !chain.peer_pool.is_empty()); if let Some(index) = self.finalized_syncing_index() { let syncing_chain_peer_count = self.finalized_chains[index].peer_pool.len(); if let Some((new_index, chain)) = self.finalized_chains .iter_mut() .enumerate() .find(|(iter_index, chain)| { *iter_index != index && chain.peer_pool.len() > syncing_chain_peer_count }) { debug!(log, "Switching finalized chains to sync"; "new_target_root" => format!("{}", chain.target_head_root), "new_end_slot" => chain.target_head_slot, "new_start_slot"=> chain.start_slot); self.finalized_chains[index].stop_syncing(); self.finalized_chains[new_index].start_syncing(network, local_slot, log); self.sync_state = SyncState::Finalized; } } else if let Some(chain) = self .finalized_chains .iter_mut() .max_by_key(|chain| chain.peer_pool.len()) { debug!(log, "New finalized chain started syncing"; "new_target_root" => format!("{}", chain.target_head_root), "new_end_slot" => chain.target_head_slot, "new_start_slot"=> chain.start_slot); chain.start_syncing(network, local_slot, log); self.sync_state = SyncState::Finalized; } else { if self.head_chains.is_empty() { self.sync_state = SyncState::Idle; } else { self.sync_state = SyncState::Head; } } } pub fn new_finalized_chain( &mut self, local_finalized_slot: Slot, target_head: Hash256, target_slot: Slot, peer_id: PeerId, ) { self.finalized_chains.push(SyncingChain::new( local_finalized_slot, target_slot, target_head, peer_id, )); } pub fn new_head_chain( &mut self, network: &mut SyncNetworkContext, remote_finalized_slot: Slot, target_head: Hash256, target_slot: Slot, peer_id: PeerId, log: &slog::Logger, ) { self.head_chains.iter_mut().for_each(|chain| { chain.peer_pool.remove(&peer_id); }); self.head_chains.retain(|chain| !chain.peer_pool.is_empty()); let mut new_head_chain = SyncingChain::new(remote_finalized_slot, target_slot, target_head, peer_id); new_head_chain.start_syncing(network, remote_finalized_slot, log); self.head_chains.push(new_head_chain); } pub fn is_finalizing_sync(&self) -> bool { !self.finalized_chains.is_empty() } fn request_function<'a, F, I>(chain: I, mut func: F) -> Option<(usize, ProcessingResult)> where I: Iterator<Item = &'a mut SyncingChain<T>>, F: FnMut(&'a mut SyncingChain<T>) -> Option<ProcessingResult>, { chain .enumerate() .find_map(|(index, chain)| Some((index, func(chain)?))) } pub fn finalized_request<F>(&mut self, func: F) -> Option<(usize, ProcessingResult)> where F: FnMut(&mut SyncingChain<T>) -> Option<ProcessingResult>, { ChainCollection::request_function(self.finalized_chains.iter_mut(), func) } pub fn head_request<F>(&mut self, func: F) -> Option<(usize, ProcessingResult)> where F: FnMut(&mut SyncingChain<T>) -> Option<ProcessingResult>, { ChainCollection::request_function(self.head_chains.iter_mut(), func) } #[allow(dead_code)] pub fn head_finalized_request<F>(&mut self, func: F) -> Option<(usize, ProcessingResult)> where F: FnMut(&mut SyncingChain<T>) -> Option<ProcessingResult>, { ChainCollection::request_function( self.finalized_chains .iter_mut() .chain(self.head_chains.iter_mut()), func, ) } pub fn remove_finalized_chain(&mut self, index: usize) -> SyncingChain<T> { self.finalized_chains.swap_remove(index) } pub fn remove_head_chain(&mut self, index: usize) -> SyncingChain<T> { self.head_chains.swap_remove(index) } pub fn remove_chain(&mut self, index: usize) -> SyncingChain<T> { if index >= self.finalized_chains.len() { let index = index - self.finalized_chains.len(); self.head_chains.swap_remove(index) } else { self.finalized_chains.swap_remove(index) } } }
pub fn get_head_mut( &mut self, target_head_root: Hash256, target_head_slot: Slot, ) -> Option<&mut SyncingChain<T>> { ChainCollection::get_chain( self.head_chains.as_mut(), target_head_root, target_head_slot, ) }
function_block-full_function
[ { "content": "/// Ensures that the finalized root can be set to all values in `roots`.\n\nfn test_update_finalized_root(roots: &[(Hash256, Slot)]) {\n\n let harness = &FORKED_HARNESS;\n\n\n\n let lmd = harness.new_fork_choice();\n\n\n\n for (root, _slot) in roots.iter().rev() {\n\n let block = h...
Rust
truck-meshalgo/src/analyzers/collision.rs
leomcelroy/truck
081a6938f479b37f3516c3b380ce69e403f64d42
use super::*; pub trait Collision { fn collide_with(&self, other: &PolygonMesh) -> Option<(Point3, Point3)>; fn extract_interference(&self, other: &PolygonMesh) -> Vec<(Point3, Point3)>; } impl Collision for PolygonMesh { #[inline(always)] fn collide_with(&self, other: &PolygonMesh) -> Option<(Point3, Point3)> { are_colliding(self, other) } #[inline(always)] fn extract_interference(&self, other: &PolygonMesh) -> Vec<(Point3, Point3)> { collision(self, other) } } #[repr(u8)] #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] enum EndPointType { Front, Back, } #[derive(Clone, Copy, Debug, PartialEq, PartialOrd)] struct EndPoint { entity: f64, r#type: EndPointType, segnum: usize, index: usize, } impl EndPoint { #[inline(always)] fn new(entity: f64, r#type: EndPointType, segnum: usize, index: usize) -> EndPoint { EndPoint { entity, r#type, segnum, index, } } #[inline(always)] fn from_seg(seg: (f64, f64), segnum: usize, index: usize) -> Vec<EndPoint> { vec![ EndPoint::new(seg.0, EndPointType::Front, segnum, index), EndPoint::new(seg.1, EndPointType::Back, segnum, index), ] } } fn take_one_unit() -> Vector3 { loop { let normal = Vector3::new( 2.0 * rand::random::<f64>() - 1.0, 2.0 * rand::random::<f64>() - 1.0, 2.0 * rand::random::<f64>() - 1.0, ); if !normal.so_small() { return normal.normalize(); } } } fn tri_to_seg(tri: [Point3; 3], unit: Vector3) -> (f64, f64) { let a = tri[0].to_vec().dot(unit); let b = tri[1].to_vec().dot(unit); let c = tri[2].to_vec().dot(unit); (f64::min(f64::min(a, b), c), f64::max(f64::max(a, b), c)) } fn sorted_endpoints<I, J>(iter0: I, iter1: J) -> Vec<EndPoint> where I: IntoIterator<Item = [Point3; 3]>, J: IntoIterator<Item = [Point3; 3]>, { let unit = take_one_unit(); let mut res: Vec<EndPoint> = iter0 .into_iter() .enumerate() .filter(|(_, tri)| !(tri[1] - tri[0]).cross(tri[2] - tri[0]).so_small()) .flat_map(|(i, tri)| EndPoint::from_seg(tri_to_seg(tri, unit), 0, i)) .chain( iter1 .into_iter() .enumerate() .filter(|(_, tri)| !(tri[1] - tri[0]).cross(tri[2] - tri[0]).so_small()) .flat_map(|(i, tri)| EndPoint::from_seg(tri_to_seg(tri, unit), 1, i)), ) .collect(); res.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Greater)); res } fn colliding_segment_pairs(sort_endpoints: Vec<EndPoint>) -> impl Iterator<Item = (usize, usize)> { let mut current = [Vec::<usize>::new(), Vec::<usize>::new()]; sort_endpoints .into_iter() .filter_map( move |EndPoint { r#type, segnum, index, .. }| match r#type { EndPointType::Front => { current[segnum].push(index); Some(current[1 - segnum].clone().into_iter().map(move |i| { if segnum == 0 { (index, i) } else { (i, index) } })) } EndPointType::Back => { let i = current[segnum] .iter() .enumerate() .find(|(_, idx)| **idx == index) .unwrap() .0; current[segnum].swap_remove(i); None } }, ) .flatten() } fn disjoint_bdbs(tri0: [Point3; 3], tri1: [Point3; 3]) -> bool { let bdb0: BoundingBox<Point3> = tri0.iter().collect(); let bdb1: BoundingBox<Point3> = tri1.iter().collect(); bdb0.max()[0] < bdb1.min()[0] || bdb1.max()[0] < bdb0.min()[0] || bdb0.max()[1] < bdb1.min()[1] || bdb1.max()[1] < bdb0.min()[1] || bdb0.max()[2] < bdb1.min()[2] || bdb1.max()[2] < bdb0.min()[2] } fn collide_seg_triangle(seg: [Point3; 2], tri: [Point3; 3]) -> Option<Point3> { let ab = tri[1] - tri[0]; let bc = tri[2] - tri[1]; let ca = tri[0] - tri[2]; let nor = ab.cross(ca); if nor.so_small() { return None; } let ap = seg[0] - tri[0]; let aq = seg[1] - tri[0]; let dotapnor = ap.dot(nor); let dotaqnor = aq.dot(nor); if dotapnor * dotaqnor > 0.0 { return None; } let h = seg[0] + dotapnor / (dotapnor - dotaqnor) * (seg[1] - seg[0]); if f64::signum(ab.cross(nor).dot(h - tri[0]) + TOLERANCE2) + f64::signum(bc.cross(nor).dot(h - tri[1]) + TOLERANCE2) + f64::signum(ca.cross(nor).dot(h - tri[2]) + TOLERANCE2) >= 2.0 { Some(h) } else { None } } fn collide_triangles(tri0: [Point3; 3], tri1: [Point3; 3]) -> Option<(Point3, Point3)> { let mut tuple = (None, None); [ collide_seg_triangle([tri0[0], tri0[1]], tri1), collide_seg_triangle([tri0[1], tri0[2]], tri1), collide_seg_triangle([tri0[2], tri0[0]], tri1), collide_seg_triangle([tri1[0], tri1[1]], tri0), collide_seg_triangle([tri1[1], tri1[2]], tri0), collide_seg_triangle([tri1[2], tri1[0]], tri0), ] .iter() .for_each(|pt| match tuple { (None, _) => tuple.0 = *pt, (Some(_), None) => tuple.1 = *pt, (Some(ref mut p), Some(ref mut q)) => { if let Some(pt) = pt { let dist0 = pt.distance2(*p); let dist1 = pt.distance2(*q); let dist2 = p.distance2(*q); if dist2 < dist0 { *q = *pt; } else if dist2 < dist1 { *p = *pt; } } } }); match tuple { (Some(a), Some(b)) => Some((a, b)), _ => None, } } fn collision(poly0: &PolygonMesh, poly1: &PolygonMesh) -> Vec<(Point3, Point3)> { let tris0 = poly0.faces().triangle_iter().collect::<Vec<_>>(); let tris1 = poly1.faces().triangle_iter().collect::<Vec<_>>(); let iter0 = tris0.iter().map(|face| { [ poly0.positions()[face[0].pos], poly0.positions()[face[1].pos], poly0.positions()[face[2].pos], ] }); let iter1 = tris1.iter().map(|face| { [ poly1.positions()[face[0].pos], poly1.positions()[face[1].pos], poly1.positions()[face[2].pos], ] }); colliding_segment_pairs(sorted_endpoints(iter0, iter1)) .filter_map(|(idx0, idx1)| { let face0 = tris0[idx0]; let tri0 = [ poly0.positions()[face0[0].pos], poly0.positions()[face0[1].pos], poly0.positions()[face0[2].pos], ]; let face1 = tris1[idx1]; let tri1 = [ poly1.positions()[face1[0].pos], poly1.positions()[face1[1].pos], poly1.positions()[face1[2].pos], ]; if disjoint_bdbs(tri0, tri1) { None } else { collide_triangles(tri0, tri1) } }) .collect() } fn are_colliding(poly0: &PolygonMesh, poly1: &PolygonMesh) -> Option<(Point3, Point3)> { let tris0 = poly0.faces().triangle_iter().collect::<Vec<_>>(); let tris1 = poly1.faces().triangle_iter().collect::<Vec<_>>(); let iter0 = tris0.iter().map(|face| { [ poly0.positions()[face[0].pos], poly0.positions()[face[1].pos], poly0.positions()[face[2].pos], ] }); let iter1 = tris1.iter().map(|face| { [ poly1.positions()[face[0].pos], poly1.positions()[face[1].pos], poly1.positions()[face[2].pos], ] }); colliding_segment_pairs(sorted_endpoints(iter0, iter1)).find_map(|(idx0, idx1)| { let face0 = tris0[idx0]; let tri0 = [ poly0.positions()[face0[0].pos], poly0.positions()[face0[1].pos], poly0.positions()[face0[2].pos], ]; let face1 = tris1[idx1]; let tri1 = [ poly1.positions()[face1[0].pos], poly1.positions()[face1[1].pos], poly1.positions()[face1[2].pos], ]; if disjoint_bdbs(tri0, tri1) { None } else { collide_triangles(tri0, tri1) } }) } #[test] fn collide_triangles_test() { let tri0 = [ Point3::origin(), Point3::new(1.0, 0.0, 0.0), Point3::new(0.0, 1.0, 0.0), ]; let tri1 = [ Point3::new(0.0, 0.0, -1.0), Point3::new(-1.0, -1.0, 1.0), Point3::new(1.0, 1.0, 1.0), ]; assert!(collide_triangles(tri0, tri1).is_some()); let tri0 = [ Point3::new(0.0, 0.0, 0.0), Point3::new(1.0, 0.0, 0.0), Point3::new(0.0, 1.0, 0.0), ]; let tri1 = [ Point3::new(0.0, 0.0, 0.5), Point3::new(1.0, 0.0, 1.0), Point3::new(1.0, 1.0, 1.0), ]; assert!(collide_triangles(tri0, tri1).is_none()); }
use super::*; pub trait Collision { fn collide_with(&self, other: &PolygonMesh) -> Option<(Point3, Point3)>; fn extract_interference(&self, other: &PolygonMesh) -> Vec<(Point3, Point3)>; } impl Collision for PolygonMesh { #[inline(always)] fn collide_with(&self, other: &PolygonMesh) -> Option<(Point3, Point3)> { are_colliding(self, other) } #[inline(always)] fn extract_interference(&self, other: &PolygonMesh) -> Vec<(Point3, Point3)> { collision(self, other) } } #[repr(u8)] #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] enum EndPointType { Front, Back, } #[derive(Clone, Copy, Debug, PartialEq, PartialOrd)] struct EndPoint { entity: f64, r#type: EndPointType, segnum: usize, index: usize, } impl EndPoint { #[inline(always)] fn new(entity: f64, r#type: EndPointType, segnum: usize, index: usize) -> EndPoint { EndPoint { entity, r#type, segnum, index, } } #[inline(always)] fn from_seg(seg: (f64, f64), segnum: usize, index: usize) -> Vec<EndPoint> { vec![ EndPoint::new(seg.0, EndPointType::Front, segnum, index), EndPoint::new(seg.1, EndPointType::Back, segnum, index), ] } } fn take_one_unit() -> Vector3 { loop { let normal = Vector3::new( 2.0 * rand::random::<f64>() - 1.0, 2.0 * rand::random::<f64>() - 1.0, 2.0 * rand::random::<f64>() - 1.0, ); if !normal.so_small() { return normal.normalize(); } } } fn tri_to_seg(tri: [Point3; 3], unit: Vector3) -> (f64, f64) { let a = tri[0].to_vec().dot(unit); let b = tri[1].to_vec().dot(unit); let c = tri[2].to_vec().dot(unit); (f64::min(f64::min(a, b), c), f64::max(f64::max(a, b), c)) } fn sorted_endpoints<I, J>(iter0: I, iter1: J) -> Vec<EndPoint> where I: IntoIterator<Item = [Point3; 3]>, J: IntoIterator<Item = [Point3; 3]>, { let unit = take_one_unit(); let mut res: Vec<EndPoint> = iter0 .into_iter() .enumerate() .filter(|(_, tri)| !(tri[1] - tri[0]).cross(tri[2] - tri[0]).so_small()) .flat_map(|(i, tri)| EndPoint::from_seg(tri_to_seg(tri, unit), 0, i)) .chain( iter1 .into_iter() .enumerate() .filter(|(_, tri)| !(tri[1] - tri[0]).cross(tri[2] - tri[0]).so_small()) .flat_map(|(i, tri)| EndPoint::from_seg(tri_to_seg(tri, unit), 1, i)), ) .collect(); res.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Greater)); res } fn colliding_segment_pairs(sort_endpoints: Vec<EndPoint>) -> impl Iterator<Item = (usize, usize)> { let mut current = [Vec::<usize>::new(), Vec::<usize>::new()]; sort_endpoints .into_iter() .filter_map( move |EndPoint { r#type, segnum, index, .. }| match r#type { EndPointType::Front => { current[segnum].push(index); Some(current[1 - segnum].clone().into_iter().map(move |i| { if segnum == 0 { (index, i) } else { (i, index) } })) } EndPointType::Back => { let i = current[segnum] .iter() .enumerate() .find(|(_, idx)| **idx == index) .unwrap() .0; current[segnum].swap_remove(i); None } }, ) .flatten() } fn disjoint_bdbs(tri0: [Point3; 3], tri1: [Point3; 3]) -> bool { let bdb0: BoundingBox<Point3> = tri0.iter().collect(); let bdb1: BoundingBox<Point3> = tri1.iter().collect(); bdb0.max()[0] < bdb1.min()[0] || bdb1.max()[0] < bdb0.min()[0] || bdb0.max()[1] < bdb1.min()[1] || bdb1.max()[1] < bdb0.min()[1] || bdb0.max()[2] < bdb1.min()[2] || bdb1.max()[2] < bdb0.min()[2] } fn collide_seg_triangle(seg: [Point3; 2], tri: [Point3; 3]) -> Option<Point3> { let ab = tri[1] - tri[0]; let bc = tri[2] - tri[1]; let ca = tri[0] - tri[2]; let nor = ab.cross(ca); if nor.so_small() { return None; } let ap = seg[0] - tri[0]; let aq = seg[1] - tri[0]; let dotapnor = ap.dot(nor); let dotaqnor = aq.dot(nor); if dotapnor * dotaqnor > 0.0 { return None; } let h = seg[0] + dotapnor / (dotapnor - dotaqnor) * (seg[1] - seg[0]); if f64::signum(ab.cross(nor).dot(h - tri[0]) + TOLERANCE2) + f64::signum(bc.cross(nor).dot(h - tri[1]) + TOLERANCE2) + f64::signum(ca.cross(nor).dot(h - tri[2]) + TOLERANCE2) >= 2.0 { Some(h) } else { None } } fn collide_triangles(tri0: [Point3; 3], tri1: [Point3; 3]) -> Option<(Point3, Point3)> { let mut tuple = (None, None); [ collide_seg_triangle([tri0[0], tri0[1]], tri1), collide_seg_triangle([tri0[1], tri0[2]], tri1), collide_seg_triangle([tri0[2], tri0[0]], tri1),
each(|pt| match tuple { (None, _) => tuple.0 = *pt, (Some(_), None) => tuple.1 = *pt, (Some(ref mut p), Some(ref mut q)) => { if let Some(pt) = pt { let dist0 = pt.distance2(*p); let dist1 = pt.distance2(*q); let dist2 = p.distance2(*q); if dist2 < dist0 { *q = *pt; } else if dist2 < dist1 { *p = *pt; } } } }); match tuple { (Some(a), Some(b)) => Some((a, b)), _ => None, } } fn collision(poly0: &PolygonMesh, poly1: &PolygonMesh) -> Vec<(Point3, Point3)> { let tris0 = poly0.faces().triangle_iter().collect::<Vec<_>>(); let tris1 = poly1.faces().triangle_iter().collect::<Vec<_>>(); let iter0 = tris0.iter().map(|face| { [ poly0.positions()[face[0].pos], poly0.positions()[face[1].pos], poly0.positions()[face[2].pos], ] }); let iter1 = tris1.iter().map(|face| { [ poly1.positions()[face[0].pos], poly1.positions()[face[1].pos], poly1.positions()[face[2].pos], ] }); colliding_segment_pairs(sorted_endpoints(iter0, iter1)) .filter_map(|(idx0, idx1)| { let face0 = tris0[idx0]; let tri0 = [ poly0.positions()[face0[0].pos], poly0.positions()[face0[1].pos], poly0.positions()[face0[2].pos], ]; let face1 = tris1[idx1]; let tri1 = [ poly1.positions()[face1[0].pos], poly1.positions()[face1[1].pos], poly1.positions()[face1[2].pos], ]; if disjoint_bdbs(tri0, tri1) { None } else { collide_triangles(tri0, tri1) } }) .collect() } fn are_colliding(poly0: &PolygonMesh, poly1: &PolygonMesh) -> Option<(Point3, Point3)> { let tris0 = poly0.faces().triangle_iter().collect::<Vec<_>>(); let tris1 = poly1.faces().triangle_iter().collect::<Vec<_>>(); let iter0 = tris0.iter().map(|face| { [ poly0.positions()[face[0].pos], poly0.positions()[face[1].pos], poly0.positions()[face[2].pos], ] }); let iter1 = tris1.iter().map(|face| { [ poly1.positions()[face[0].pos], poly1.positions()[face[1].pos], poly1.positions()[face[2].pos], ] }); colliding_segment_pairs(sorted_endpoints(iter0, iter1)).find_map(|(idx0, idx1)| { let face0 = tris0[idx0]; let tri0 = [ poly0.positions()[face0[0].pos], poly0.positions()[face0[1].pos], poly0.positions()[face0[2].pos], ]; let face1 = tris1[idx1]; let tri1 = [ poly1.positions()[face1[0].pos], poly1.positions()[face1[1].pos], poly1.positions()[face1[2].pos], ]; if disjoint_bdbs(tri0, tri1) { None } else { collide_triangles(tri0, tri1) } }) } #[test] fn collide_triangles_test() { let tri0 = [ Point3::origin(), Point3::new(1.0, 0.0, 0.0), Point3::new(0.0, 1.0, 0.0), ]; let tri1 = [ Point3::new(0.0, 0.0, -1.0), Point3::new(-1.0, -1.0, 1.0), Point3::new(1.0, 1.0, 1.0), ]; assert!(collide_triangles(tri0, tri1).is_some()); let tri0 = [ Point3::new(0.0, 0.0, 0.0), Point3::new(1.0, 0.0, 0.0), Point3::new(0.0, 1.0, 0.0), ]; let tri1 = [ Point3::new(0.0, 0.0, 0.5), Point3::new(1.0, 0.0, 1.0), Point3::new(1.0, 1.0, 1.0), ]; assert!(collide_triangles(tri0, tri1).is_none()); }
collide_seg_triangle([tri1[0], tri1[1]], tri0), collide_seg_triangle([tri1[1], tri1[2]], tri0), collide_seg_triangle([tri1[2], tri1[0]], tri0), ] .iter() .for_
function_block-random_span
[ { "content": "fn tri_to_seg(tri: [Point3; 3], unit: Vector3, tol: f64) -> (f64, f64) {\n\n let a = tri[0].to_vec().dot(unit);\n\n let b = tri[1].to_vec().dot(unit);\n\n let c = tri[2].to_vec().dot(unit);\n\n (\n\n f64::min(f64::min(a, b), c) - tol,\n\n f64::max(f64::max(a, b), c) + tol...
Rust
testspace/src/history.rs
galacticfungus/Egg
a8d80d046f28ae9688432c69890dd44bf6516315
use std::fs; use std::path; use std::vec; #[derive(Clone, Debug)] pub enum FileItem { Directory(path::PathBuf), File(path::PathBuf), } #[derive(Debug, Clone)] pub struct FileHistory { history: vec::Vec<FileItem>, allow_cleanup: bool, } impl Default for FileHistory { fn default() -> FileHistory { FileHistory { history: vec::Vec::default(), allow_cleanup: true, } } } impl FileHistory { pub fn allow_cleanup(&mut self, cleanup: bool) { self.allow_cleanup = cleanup; } pub fn record_directory<P: AsRef<path::Path>>(&mut self, path: P) { let path = path.as_ref(); self.history.push(FileItem::Directory(path.to_path_buf())); } pub fn record_file<P: AsRef<path::Path>>(&mut self, path: P) { let path = path.as_ref(); self.history.push(FileItem::File(path.to_path_buf())); } pub fn cleanup(&mut self) { while let Some(file_item) = self.history.pop() { match file_item { FileItem::Directory(path) => { fs::remove_dir_all(&path).unwrap_or_else(|err| { eprintln!( "Failed to cleanup the test directory {}, error was {}", path.display(), err ); }); eprintln!("Deleted {}", path.display()); } FileItem::File(path) => { fs::remove_file(path.as_path()).unwrap_or_else(|err| { eprintln!( "Failed to cleanup the test file {}, error was {}", path.display(), err ); }); eprintln!("Deleted {}", path.display()); } } } self.history.clear(); } } #[cfg(test)] mod tests { use super::FileHistory; use std::fs; #[test] fn test_history() { let mut history = FileHistory::default(); let mut temp_dir = std::env::temp_dir(); temp_dir.push("remove_me"); let temp_path = temp_dir.as_path(); fs::create_dir(temp_path).unwrap_or_else(|err| { panic!( "Failed to create the temporary directory, error was {}", err ); }); assert!(temp_path.exists()); history.record_directory(temp_path); history.cleanup(); assert_eq!(temp_path.exists(), false); } #[test] fn test_file_history() { use byteorder::{self, LittleEndian, WriteBytesExt}; let mut history = FileHistory::default(); let mut temp_file = std::env::temp_dir(); temp_file.push("remove_me_123"); let temp_path = temp_file.as_path(); { let mut file = fs::OpenOptions::new() .create(true) .create_new(true) .write(true) .read(true) .open(temp_path) .unwrap_or_else(|err| { panic!("Failed to create the temp file, error was {}", err); }); file.write_u64::<LittleEndian>(12345).unwrap_or_else(|err| { panic!( "Failed writing test data during history test, error was {}", err ); }); } assert!(temp_path.exists()); history.record_file(temp_path); history.cleanup(); assert_eq!(temp_path.exists(), false); } }
use std::fs; use std::path; use std::vec; #[derive(Clone, Debug)] pub enum FileItem { Directory(path::PathBuf), File(path::PathBuf), } #[derive(Debug, Clone)] pub struct FileHistory { history: vec::Vec<FileItem>, allow_cleanup: bool, } impl Default for FileHistory { fn default() -> FileHistory { FileHistory { history: vec::Vec::default(), allow_cleanup: true, } } } impl FileHistory { pub fn allow_cleanup(&mut self, cleanup: bool) { self.allow_cleanup = cleanup; } pub fn record_directory<P: AsRef<path::Path>>(&mut self, path: P) { let path = path.as_ref(); self.history.push(FileItem::Directory(path.to_path_buf())); } pub fn record_file<P: AsRef<path::Path>>(&mut self, path: P) { let path = path.as_ref(); self.history.push(FileItem::File(path.to_path_buf())); }
} #[cfg(test)] mod tests { use super::FileHistory; use std::fs; #[test] fn test_history() { let mut history = FileHistory::default(); let mut temp_dir = std::env::temp_dir(); temp_dir.push("remove_me"); let temp_path = temp_dir.as_path(); fs::create_dir(temp_path).unwrap_or_else(|err| { panic!( "Failed to create the temporary directory, error was {}", err ); }); assert!(temp_path.exists()); history.record_directory(temp_path); history.cleanup(); assert_eq!(temp_path.exists(), false); } #[test] fn test_file_history() { use byteorder::{self, LittleEndian, WriteBytesExt}; let mut history = FileHistory::default(); let mut temp_file = std::env::temp_dir(); temp_file.push("remove_me_123"); let temp_path = temp_file.as_path(); { let mut file = fs::OpenOptions::new() .create(true) .create_new(true) .write(true) .read(true) .open(temp_path) .unwrap_or_else(|err| { panic!("Failed to create the temp file, error was {}", err); }); file.write_u64::<LittleEndian>(12345).unwrap_or_else(|err| { panic!( "Failed writing test data during history test, error was {}", err ); }); } assert!(temp_path.exists()); history.record_file(temp_path); history.cleanup(); assert_eq!(temp_path.exists(), false); } }
pub fn cleanup(&mut self) { while let Some(file_item) = self.history.pop() { match file_item { FileItem::Directory(path) => { fs::remove_dir_all(&path).unwrap_or_else(|err| { eprintln!( "Failed to cleanup the test directory {}, error was {}", path.display(), err ); }); eprintln!("Deleted {}", path.display()); } FileItem::File(path) => { fs::remove_file(path.as_path()).unwrap_or_else(|err| { eprintln!( "Failed to cleanup the test file {}, error was {}", path.display(), err ); }); eprintln!("Deleted {}", path.display()); } } } self.history.clear(); }
function_block-full_function
[ { "content": "/// Represents a file in the working directory that we may wish to snapshot or otherwise investigate,\n\n/// This structure does not contain the path of the file since the path is used as a key inside a map of WorkingFiles\n\nstruct WorkingFile {\n\n hash: Option<Hash>,\n\n file_size: u64,\n...
Rust
src/main.rs
RyanBluth/mangy2
2a64daf7c911f5af52ace6e185f6cb47deed8b3d
extern crate clap; extern crate term_table; use clap::{App, AppSettings, Arg, SubCommand}; use std::process; use std::fmt::Display; use std::fs::{metadata, File, OpenOptions}; use std::io::prelude::*; use std::process::{Command, Stdio}; use std::collections::HashMap; use term_table::row::Row; use term_table::Table; use term_table::cell::Cell; const GO: &'static str = "go"; const SET: &'static str = "set"; const KEY: &'static str = "key"; const VALUE: &'static str = "value"; const LIST: &'static str = "list"; const RUN: &'static str = "run"; const RUN_ARGS: &'static str = "run_args"; const DELETE: &'static str = "delete"; const STORE_FILE: &'static str = ".managed-alias-store"; #[derive(Debug)] struct GenericError { description: String, } impl GenericError { pub fn new(description: String) -> GenericError { return GenericError { description }; } } impl<T> From<T> for GenericError where T: Display, { fn from(x: T) -> Self { return GenericError::new(format!("{}", x)); } } fn main() { let matches = App::new("managed-alias") .version("1.0") .author("Ryan Bluth") .setting(AppSettings::ArgsNegateSubcommands) .arg( Arg::with_name(KEY) .help("Variable key") .required(false) .index(1) ) .arg( Arg::with_name(RUN_ARGS) .help("Arguments to pass to the command stored in the variable matching the provided key") .required(false) .multiple(true) ) .subcommand( SubCommand::with_name(GO) .alias("g") .about("Navigates to the value of the specified key") .arg( Arg::with_name(KEY) .help("Variable key") .required(true) .index(1), ), ) .subcommand( SubCommand::with_name(LIST) .alias("l") .about("Lists all variables"), ) .subcommand( SubCommand::with_name(RUN) .alias("r") .about("Execute the matching value for the provided key") .arg( Arg::with_name(KEY) .help("Variable key") .required(true) .index(1), ) .arg( Arg::with_name(RUN_ARGS) .help("Arguments to pass to the command stored in the variable matching the provided key") .required(false) .multiple(true) ) ) .subcommand( SubCommand::with_name(SET) .alias("s") .about("Sets the specified key to the specified value") .setting(AppSettings::AllowLeadingHyphen) .arg(Arg::with_name(KEY).help("Variable key").required(true)) .arg( Arg::with_name(VALUE) .help("Variable value") .required(true) .multiple(true) .allow_hyphen_values(true), ), ) .subcommand(SubCommand::with_name(DELETE) .alias("d") .about("Delete a key value pair") .arg(Arg::with_name(KEY).help("Variable key").required(true)) ) .get_matches(); if let Some(sub_matches) = matches.subcommand_matches(GO) { match sub_matches.value_of(KEY) { Some(key) => go(key), None => exit_with_message("go requires a variable key"), } } else if let Some(sub_matches) = matches.subcommand_matches(SET) { if sub_matches.is_present(KEY) && sub_matches.is_present(VALUE) { let key = sub_matches.value_of(KEY).unwrap(); let values = sub_matches.values_of(VALUE).unwrap(); set(key, values); } else { exit_with_message("A key and value must be provided") } } else if matches.is_present(LIST) { list(); } else if let Some(sub_matches) = matches.subcommand_matches(RUN) { match sub_matches.value_of(KEY) { Some(key) => run(key, sub_matches.values_of_lossy(RUN_ARGS)), None => exit_with_message("go requires a variable key"), } } else if let Some(sub_matches) = matches.subcommand_matches(DELETE) { match sub_matches.value_of(KEY) { Some(key) => delete(key), None => exit_with_message("Delete requires a variable key"), } } else if let Some(key) = matches.value_of(KEY) { let value = lookup(key); if let Some(value) = value { let metadata = metadata(value); match metadata { Ok(metadata) => { if metadata.is_dir() { go(key); } else { run(key, matches.values_of_lossy(RUN_ARGS)); } } Err(_) => run(key, matches.values_of_lossy(RUN_ARGS)), } } else { exit_with_message(format!("Invalid key {}", key)) } } } fn list() { let entries = get_entries(); let mut commands = Vec::new(); let mut paths = Vec::new(); for entry in entries.iter().collect::<Vec<(&String, &String)>>() { let metadata = metadata(entry.1); match metadata { Ok(_) => paths.push(entry), Err(_) => commands.push(entry), } } let mut table = Table::new(); table.add_row(Row::new(vec![Cell::new("COMMANDS", 2)])); for command in commands { table.add_row(Row::new(vec![Cell::new(command.0, 1), Cell::new(command.1, 1)])); } println!("{}\n", table.as_string()); table = Table::new(); table.add_row(Row::new(vec![Cell::new("PATHS", 2)])); for path in paths { table.add_row(Row::new(vec![Cell::new(path.0, 1), Cell::new(path.1, 1)])); } println!("{}", table.as_string()); } fn run(key: &str, args: Option<Vec<String>>) { match lookup(key) { Some(value) => for command in value.split("&") { let mut out_args: Vec<String> = command .split_whitespace() .map(|s| String::from(s)) .collect::<Vec<String>>(); if let Some(arg_vec) = args.clone() { let mut joined_args = String::new(); for arg in &arg_vec { joined_args.push_str(arg.clone().as_str()); joined_args.push(' '); } joined_args.pop(); for arg in out_args.clone().iter().enumerate() { let mut current = arg.1.clone(); for i in 0..arg_vec.len() { let token = format!("${}", i); current = current.replace(token.as_str(), arg_vec[i].as_str()); } current = current.replace("$*", joined_args.as_str()); out_args[arg.0] = current; } } let mut arg_iter = out_args.iter(); match Command::new(arg_iter.next().unwrap()) .args(arg_iter) .stdout(Stdio::inherit()) .spawn() { Ok(mut child) => { if let Err(e) = child.wait() { exit_with_message(format!( "Failed to wait for command {}. Error: {}", command, e )); } } Err(e) => exit_with_message(format!("Failed to execute {}. Error: {}", command, e)), }; }, None => exit_invalid_key(key), } } fn go(key: &str) { match lookup(key) { Some(value) => println!("*{}", value), None => exit_invalid_key(key), } } fn set(key: &str, mut values: clap::Values) { let mut entries = get_entries(); let mut combined = String::from(values.next().unwrap()); for v in values { combined.push_str(" "); combined.push_str(v); } entries.insert(String::from(key), combined); write_entries(entries); } fn delete(key: &str) { let mut entries = get_entries(); entries.remove(&String::from(key)); write_entries(entries); } fn lookup(key: &str) -> Option<String> { let entries = get_entries(); match entries.get(&String::from(key)) { None => None, Some(entry) => Some(entry.clone()), } } fn get_file_contents() -> String { let mut file: File = match File::open(get_file_dir()) { Ok(file) => file, Err(_) => { return String::new(); } }; let mut buf = String::new(); file.read_to_string(&mut buf).unwrap(); return buf; } fn get_file_dir() -> String { let mut exe_path = std::env::current_exe().unwrap(); exe_path.pop(); exe_path.push(STORE_FILE); let path = String::from(exe_path.to_path_buf().to_string_lossy()); return path; } fn get_entries() -> HashMap<String, String> { let mut result: HashMap<String, String> = HashMap::new(); let contents = get_file_contents(); let lines = contents.split('\n'); for line in lines { let mut pair = line.split("\":\""); let key = pair.next(); let val = pair.next(); if key.is_some() && val.is_some() { result.insert(String::from(key.unwrap()), String::from(val.unwrap())); } } return result; } fn write_entries(entries: HashMap<String, String>) { let mut out = String::new(); for entry in entries { out.push_str(format_entry(&entry.0, &entry.1).as_str()); } let mut file = match OpenOptions::new() .write(true) .create(true) .truncate(true) .read(true) .open(get_file_dir()) { Ok(file) => file, Err(e) => { exit_with_message(format!( "Failed to create file {}. Error: {}", STORE_FILE, e )); return; } }; if let Err(e) = file.write_all(out.as_bytes()) { exit_with_message(format!( "Failed to write value to {}. Error: {}", STORE_FILE, e )); }; } fn format_entry(key: &String, val: &String) -> String { return format!("{}\":\"{}\n", key, val); } fn exit_invalid_key(key: &str) { exit_with_message(format!("No value was found for key '{}'", key)); } fn exit_with_message<T>(message: T) where T: Display, { println!("{}", message); process::exit(1); }
extern crate clap; extern crate term_table; use clap::{App, AppSettings, Arg, SubCommand}; use std::process; use std::fmt::Display; use std::fs::{metadata, File, OpenOptions}; use std::io::prelude::*; use std::process::{Command, Stdio}; use std::collections::HashMap; use term_table::row::Row; use term_table::Table; use term_table::cell::Cell; const GO: &'static str = "go"; const SET: &'static str = "set"; const KEY: &'static str = "key"; const VALUE: &'static str = "value"; const LIST: &'static str = "list"; const RUN: &'static str = "run"; const RUN_ARGS: &'static str = "run_args"; const DELETE: &'static str = "delete"; const STORE_FILE: &'static str = ".managed-alias-store"; #[derive(Debug)] struct GenericError { description: String, } impl GenericError { pub fn new(description: String) -> GenericError { return GenericError { description }; } } impl<T> From<T> for GenericError where T: Display, { fn from(x: T) -> Self { return GenericError::new(format!("{}", x)); } } fn main() { let matches = App::new("managed-alias") .version("1.0") .author("Ryan Bluth") .setting(AppSettings::ArgsNegateSubcommands) .arg( Arg::with_name(KEY) .help("Variable key") .required(false) .index(1) ) .arg( Arg::with_name(RUN_ARGS) .help("Arguments to pass to the command stored in the variable matching the provided key") .required(false) .multiple(true) ) .subcommand( SubCommand::with_name(GO) .alias("g") .about("Navigates to the value of the specified key") .arg( Arg::with_name(KEY) .help("Variable key") .required(true) .index(1), ), ) .subcommand( SubCommand::with_name(LIST) .alias("l") .about("Lists all variables"), ) .subcommand( SubCommand::with_name(RUN) .alias("r") .about("Execute the matching value for the provided key") .arg( Arg::with_name(KEY) .help("Variable key") .required(true) .index(1), ) .arg( Arg::with_name(RUN_ARGS) .help("Arguments to pass to the command stored in the variable matching the provided key") .required(false) .multiple(true) ) ) .subcommand( SubCommand::with_name(SET) .alias("s") .about("Sets the specified key to the specified value") .setting(AppSettings::AllowLeadingHyphen) .arg(Arg::with_name(KEY).help("Variable key").required(true)) .arg( Arg::with_name(VALUE) .help("Variable value") .required(true) .multiple(true) .allow_hyphen_values(true), ), ) .subcommand(SubCommand::with_name(DELETE) .alias("d") .about("Delete a key value pair") .arg(Arg::with_name(KEY).help("Variable key").required(true)) ) .get_matches(); if let Some(sub_matches) = matches.subcommand_matches(GO) { match sub_matches.value_of(KEY) { Some(key) => go(key), None => exit_with_message("go requires a variable key"), } } else if let Some(sub_matches) = matches.subcommand_matches(SET) { if sub_matches.is_present(KEY) && sub_matches.is_present(VALUE) { let key = sub_matches.value_of(KEY).unwrap(); let values = sub_matches.values_of(VALUE).unwrap(); set(key, values); } else { exit_with_message("A key and value must be provided") } } else if matches.is_present(LIST) { list(); } else if let Some(sub_matches) = matches.subcommand_matc
etadata { Ok(metadata) => { if metadata.is_dir() { go(key); } else { run(key, matches.values_of_lossy(RUN_ARGS)); } } Err(_) => run(key, matches.values_of_lossy(RUN_ARGS)), } } else { exit_with_message(format!("Invalid key {}", key)) } } } fn list() { let entries = get_entries(); let mut commands = Vec::new(); let mut paths = Vec::new(); for entry in entries.iter().collect::<Vec<(&String, &String)>>() { let metadata = metadata(entry.1); match metadata { Ok(_) => paths.push(entry), Err(_) => commands.push(entry), } } let mut table = Table::new(); table.add_row(Row::new(vec![Cell::new("COMMANDS", 2)])); for command in commands { table.add_row(Row::new(vec![Cell::new(command.0, 1), Cell::new(command.1, 1)])); } println!("{}\n", table.as_string()); table = Table::new(); table.add_row(Row::new(vec![Cell::new("PATHS", 2)])); for path in paths { table.add_row(Row::new(vec![Cell::new(path.0, 1), Cell::new(path.1, 1)])); } println!("{}", table.as_string()); } fn run(key: &str, args: Option<Vec<String>>) { match lookup(key) { Some(value) => for command in value.split("&") { let mut out_args: Vec<String> = command .split_whitespace() .map(|s| String::from(s)) .collect::<Vec<String>>(); if let Some(arg_vec) = args.clone() { let mut joined_args = String::new(); for arg in &arg_vec { joined_args.push_str(arg.clone().as_str()); joined_args.push(' '); } joined_args.pop(); for arg in out_args.clone().iter().enumerate() { let mut current = arg.1.clone(); for i in 0..arg_vec.len() { let token = format!("${}", i); current = current.replace(token.as_str(), arg_vec[i].as_str()); } current = current.replace("$*", joined_args.as_str()); out_args[arg.0] = current; } } let mut arg_iter = out_args.iter(); match Command::new(arg_iter.next().unwrap()) .args(arg_iter) .stdout(Stdio::inherit()) .spawn() { Ok(mut child) => { if let Err(e) = child.wait() { exit_with_message(format!( "Failed to wait for command {}. Error: {}", command, e )); } } Err(e) => exit_with_message(format!("Failed to execute {}. Error: {}", command, e)), }; }, None => exit_invalid_key(key), } } fn go(key: &str) { match lookup(key) { Some(value) => println!("*{}", value), None => exit_invalid_key(key), } } fn set(key: &str, mut values: clap::Values) { let mut entries = get_entries(); let mut combined = String::from(values.next().unwrap()); for v in values { combined.push_str(" "); combined.push_str(v); } entries.insert(String::from(key), combined); write_entries(entries); } fn delete(key: &str) { let mut entries = get_entries(); entries.remove(&String::from(key)); write_entries(entries); } fn lookup(key: &str) -> Option<String> { let entries = get_entries(); match entries.get(&String::from(key)) { None => None, Some(entry) => Some(entry.clone()), } } fn get_file_contents() -> String { let mut file: File = match File::open(get_file_dir()) { Ok(file) => file, Err(_) => { return String::new(); } }; let mut buf = String::new(); file.read_to_string(&mut buf).unwrap(); return buf; } fn get_file_dir() -> String { let mut exe_path = std::env::current_exe().unwrap(); exe_path.pop(); exe_path.push(STORE_FILE); let path = String::from(exe_path.to_path_buf().to_string_lossy()); return path; } fn get_entries() -> HashMap<String, String> { let mut result: HashMap<String, String> = HashMap::new(); let contents = get_file_contents(); let lines = contents.split('\n'); for line in lines { let mut pair = line.split("\":\""); let key = pair.next(); let val = pair.next(); if key.is_some() && val.is_some() { result.insert(String::from(key.unwrap()), String::from(val.unwrap())); } } return result; } fn write_entries(entries: HashMap<String, String>) { let mut out = String::new(); for entry in entries { out.push_str(format_entry(&entry.0, &entry.1).as_str()); } let mut file = match OpenOptions::new() .write(true) .create(true) .truncate(true) .read(true) .open(get_file_dir()) { Ok(file) => file, Err(e) => { exit_with_message(format!( "Failed to create file {}. Error: {}", STORE_FILE, e )); return; } }; if let Err(e) = file.write_all(out.as_bytes()) { exit_with_message(format!( "Failed to write value to {}. Error: {}", STORE_FILE, e )); }; } fn format_entry(key: &String, val: &String) -> String { return format!("{}\":\"{}\n", key, val); } fn exit_invalid_key(key: &str) { exit_with_message(format!("No value was found for key '{}'", key)); } fn exit_with_message<T>(message: T) where T: Display, { println!("{}", message); process::exit(1); }
hes(RUN) { match sub_matches.value_of(KEY) { Some(key) => run(key, sub_matches.values_of_lossy(RUN_ARGS)), None => exit_with_message("go requires a variable key"), } } else if let Some(sub_matches) = matches.subcommand_matches(DELETE) { match sub_matches.value_of(KEY) { Some(key) => delete(key), None => exit_with_message("Delete requires a variable key"), } } else if let Some(key) = matches.value_of(KEY) { let value = lookup(key); if let Some(value) = value { let metadata = metadata(value); match m
function_block-random_span
[ { "content": "# managed-alias\n\n\n\nmanaged-alias is an alternative to the alias command. managed-alias allows you to maintain a list of aliases that you can modify on the fly and is persitent across terminal sessions.\n\n\n\n\n\n| Command | Result ...
Rust
src/bin/taxonate/app.rs
elasticdog/taxonate
6d7a5591778e6d53744ffde3d40ba91972d0840f
use std::{ collections::HashSet, env, io::{self, BufRead}, path::{Path, PathBuf}, }; use clap::{crate_authors, crate_name, crate_version, App, AppSettings, Arg, ArgMatches}; use taxonate::config::{Color, Config, LogLevel}; pub fn build() -> App<'static, 'static> { let color = env::var("TAXONATE_COLOR").unwrap_or_else(|_| "auto".to_owned()); let color_app_setting = match color.as_str() { "always" => AppSettings::ColorAlways, "never" => AppSettings::ColorNever, _ => AppSettings::ColorAuto, }; App::new(crate_name!()) .version(crate_version!()) .author(crate_authors!()) .setting(AppSettings::AllowInvalidUtf8) .setting(AppSettings::ColoredHelp) .setting(color_app_setting) .about( "Identify and filter files based on their programming language.\n\n\ Use '--help' instead of '-h' to see a more detailed version of the \ help text.", ) .long_about("Identify and filter files based on their programming language.") .arg( Arg::with_name("filename_only") .help("Suppresses display of the identified language") .long_help( "Suppresses normal output; only displays the file name and \ not the identified programming language", ) .short("f") .long("filename-only"), ) .arg( Arg::with_name("list_languages") .help("Lists supported programming languages") .long_help( "Displays a list of supported programming languages for \ filtering output", ) .short("L") .long("list-languages"), ) .arg( Arg::with_name("color") .help("Specifies when to use colored output") .short("c") .long("color") .takes_value(true) .value_name("WHEN") .possible_values(&["auto", "always", "never"]) .env("TAXONATE_COLOR") .default_value("auto"), ) .arg( Arg::with_name("debug") .help("Adjusts the log level for debugging") .short("d") .long("debug") .takes_value(true) .value_name("LEVEL") .possible_values(&["error", "warning", "info", "debug", "trace"]) .env("TAXONATE_DEBUG") .default_value("error"), ) .arg( Arg::with_name("language") .help("Filters output by programming language") .long_help( "Filters output to only show files identified as the given \ programming language", ) .short("l") .long("language") .takes_value(true) .value_name("LANGUAGE") .env("TAXONATE_LANGUAGE"), ) .arg( Arg::with_name("PATH") .help("File or directory to identify. Use '-' for standard input.") .long_help( "A file or directory to identify. Directories will have \ all files identified recursively. Use a dash ('-') to \ read from standard input.", ) .multiple(true), ) } pub fn config_from(matches: &ArgMatches) -> Config { let color = match matches.value_of("color").unwrap() { "auto" => Color::Auto, "always" => Color::Always, "never" => Color::Never, _ => unreachable!(), }; let filename_only = matches.is_present("filename_only"); let log_level = match matches.value_of("debug").unwrap() { "error" => LogLevel::Error, "warning" => LogLevel::Warning, "info" => LogLevel::Info, "debug" => LogLevel::Debug, "trace" => LogLevel::Trace, _ => unreachable!(), }; let language = matches.value_of("language").map(String::from); let mut paths: HashSet<PathBuf> = matches .values_of_os("PATH") .unwrap_or_default() .map(PathBuf::from) .collect(); if paths.remove(Path::new("-")) { let stdin = io::stdin(); for line in stdin.lock().lines() { paths.insert(PathBuf::from(line.unwrap())); } } if paths.is_empty() { paths.insert(PathBuf::from(".")); } Config::new() .set_color(color) .set_filename_only(filename_only) .set_log_level(log_level) .set_language(language) .set_paths(paths) }
use std::{ collections::HashSet, env, io::{self, BufRead}, path::{Path, PathBuf}, }; use clap::{crate_authors, crate_name, crate_version, App, AppSettings, Arg, ArgMatches}; use taxonate::config::{Color, Config, LogLevel}; pub fn build() -> App<'static, 'static> { let color = env::var("TAXONATE_COLOR").unwrap_or_else(|_| "auto".to_owned()); let color_app_setting = match color.as_str() { "always" => AppSettings::ColorAlways, "never" => AppSettings::ColorNever, _ => AppSettings::ColorAuto, }; App::new(crate_name!()) .version(crate_version!()) .author(crate_authors!()) .setting(AppSettings::AllowInvalidUtf8) .setting(AppSettings::ColoredHelp) .setting(color_app_setting) .about( "Identify and filter files based on their programming language.\n\n\ Use '--help' instead of '-h' to see a more detailed version of the \ help text.", ) .long_about("Identify and filter files based on their programming language.") .arg( Arg::with_name("filename_only") .help("Suppresses display of the identified language") .long_help( "Suppresses normal output; only displays the file name and \ not the identified programming language", ) .short("f") .long("filename-only"), ) .arg( Arg::with_name("list_languages") .help("Lists supported programming languages") .long_help( "Displays a list of supported programming languages for \ filtering output", ) .short("L") .long("list-languages"), ) .arg( Arg::with_name("color") .help("Specifies when to use colored output") .short("c") .long("color") .takes_value(true) .value_name("WHEN") .possible_values(&["auto", "always", "never"]) .env("TAXONATE_COLOR") .default_value("auto"), ) .arg( Arg::with_name("debug") .help("Adjusts the log level for debugging") .short("d") .long("debug") .takes_value(true) .value_name("LEVEL") .possible_values(&["error", "warning", "info", "debug", "trace"]) .env("TAXONATE_DEBUG") .default_value("error"), ) .arg( Arg::with_name("language") .help("Filters output by programming language") .long_help( "Filters output to only show files identified as the given \ programming language", ) .short("l") .long("language") .takes_value(true) .value_name("LANGUAGE") .env("TAXONATE_LANGUAGE"), ) .arg( Arg::with_name("PATH") .help("File or directory to identify. Use '-' for standard input.") .long_help( "A file or directory to identify. Directories will have \ all files identified recursively. Use a dash ('-') to \ read from standard input.", ) .multiple(true), ) } pub fn config_from(matches: &ArgMatches) -> Config { let color = match matches.value_of("color").unwrap() { "auto" => Color::Auto, "always" => Color::Always, "never" => Color::N
ove(Path::new("-")) { let stdin = io::stdin(); for line in stdin.lock().lines() { paths.insert(PathBuf::from(line.unwrap())); } } if paths.is_empty() { paths.insert(PathBuf::from(".")); } Config::new() .set_color(color) .set_filename_only(filename_only) .set_log_level(log_level) .set_language(language) .set_paths(paths) }
ever, _ => unreachable!(), }; let filename_only = matches.is_present("filename_only"); let log_level = match matches.value_of("debug").unwrap() { "error" => LogLevel::Error, "warning" => LogLevel::Warning, "info" => LogLevel::Info, "debug" => LogLevel::Debug, "trace" => LogLevel::Trace, _ => unreachable!(), }; let language = matches.value_of("language").map(String::from); let mut paths: HashSet<PathBuf> = matches .values_of_os("PATH") .unwrap_or_default() .map(PathBuf::from) .collect(); if paths.rem
function_block-random_span
[ { "content": "#[must_use]\n\npub fn identify(file: &Path) -> Option<&Language> {\n\n find_lang_by_interpreter(&file).or_else(|| find_lang_by_glob(&file))\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 0, "score": 96360.11204077046 }, { "content": "/// # Errors\n\n///\n\n/// Will return `...
Rust
core/src/reflection/microfacet_reflection.rs
hackmad/pbr-rust
d181621fcde300e88e1063b481fa240a49157627
#![allow(dead_code)] use super::*; use crate::microfacet::*; use bumpalo::Bump; use std::fmt; pub struct MicrofacetReflection<'arena> { bxdf_type: BxDFType, fresnel: &'arena mut Fresnel<'arena>, r: Spectrum, distribution: &'arena mut MicrofacetDistribution<'arena>, } impl<'arena> MicrofacetReflection<'arena> { #[allow(clippy::mut_from_ref)] pub fn alloc( arena: &'arena Bump, r: Spectrum, distribution: &'arena mut MicrofacetDistribution<'arena>, fresnel: &'arena mut Fresnel<'arena>, ) -> &'arena mut BxDF<'arena> { let model = arena.alloc(Self { bxdf_type: BxDFType::BSDF_REFLECTION | BxDFType::BSDF_GLOSSY, r, distribution, fresnel, }); arena.alloc(BxDF::MicrofacetReflection(model)) } #[allow(clippy::mut_from_ref)] pub fn clone_alloc(&self, arena: &'arena Bump) -> &'arena mut BxDF<'arena> { let distribution = self.distribution.clone_alloc(arena); let fresnel = self.fresnel.clone_alloc(arena); let model = arena.alloc(Self { bxdf_type: self.bxdf_type, r: self.r, distribution, fresnel, }); arena.alloc(BxDF::MicrofacetReflection(model)) } pub fn get_type(&self) -> BxDFType { self.bxdf_type } pub fn f(&self, wo: &Vector3f, wi: &Vector3f) -> Spectrum { let cos_theta_o = abs_cos_theta(wo); let cos_theta_i = abs_cos_theta(wi); let wh = wi + wo; if (cos_theta_i == 0.0 || cos_theta_o == 0.0) || (wh.x == 0.0 && wh.y == 0.0 && wh.z == 0.0) { Spectrum::ZERO } else { let wh = wh.normalize(); let f = self .fresnel .evaluate(wi.dot(&wh.face_forward(&Vector3f::new(0.0, 0.0, 1.0)))); self.r * self.distribution.d(&wh) * self.distribution.g(wo, wi) * f / (4.0 * cos_theta_i * cos_theta_o) } } pub fn sample_f(&self, wo: &Vector3f, u: &Point2f) -> BxDFSample { if wo.z == 0.0 { BxDFSample::from(self.bxdf_type) } else { let wh = self.distribution.sample_wh(wo, u); if wo.dot(&wh) < 0.0 { BxDFSample::from(self.bxdf_type) } else { let wi = reflect(wo, &wh); if !same_hemisphere(wo, &wi) { BxDFSample::new(Spectrum::ZERO, 0.0, wi, self.bxdf_type) } else { let pdf = self.distribution.pdf(wo, &wh) / (4.0 * wo.dot(&wh)); BxDFSample::new(self.f(wo, &wi), pdf, wi, self.bxdf_type) } } } } pub fn pdf(&self, wo: &Vector3f, wi: &Vector3f) -> Float { if same_hemisphere(wo, wi) { let wh = (wo + wi).normalize(); self.distribution.pdf(wo, &wh) / (4.0 * wo.dot(&wh)) } else { 0.0 } } } impl<'arena> fmt::Display for MicrofacetReflection<'arena> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!( f, "MicrofacetReflection {{ bxdf_type: {}, fresnel: {}, r: {}, distribution: {} }}", self.bxdf_type, self.fresnel, self.r, self.distribution ) } }
#![allow(dead_code)] use super::*; use crate::microfacet::*; use bumpalo::Bump; use std::fmt; pub struct MicrofacetReflection<'arena> { bxdf_type: BxDFType, fresnel: &'arena mut Fresnel<'arena>, r: Spectrum, distribution: &'arena mut MicrofacetDistribution<'arena>, } impl<'arena> MicrofacetReflection<'arena> { #[allow(clippy::mut_from_ref)] pub fn alloc( arena: &'arena Bump, r: Spectrum, distribution: &'arena mut MicrofacetDistribution<'arena>, fresnel: &'arena mut Fresnel<'arena>, ) -> &'arena mut BxDF<'arena> { let model = arena.alloc(Self { bxdf_type: BxDFType::BSDF_REFLECTION | BxDFType::BSDF_GLOSSY, r, distribution, fresnel, }); arena.alloc(BxDF::MicrofacetReflection(model)) } #[allow(clippy::mut_from_ref)] pub fn clone_alloc(&self, arena: &'arena Bump) -> &'arena mut BxDF<'arena> { let distribution = self.distribution.clone_alloc(arena); let fresnel = self.fresnel.clone_alloc(arena); let model = arena.alloc(Self { bxdf_type: self.bxdf_type, r: self.r, distribution, fresnel, }); arena.alloc(BxDF::MicrofacetReflection(model)) } pub fn get_type(&self) -> BxDFType { self.bxdf_type } pub fn f(&self, wo: &Vector3f, wi: &Vector3f) -> Spectrum { let cos_theta_o = abs_cos_theta(wo); let cos_theta_i = abs_cos_theta(wi); let wh = wi + wo; if (cos_theta_i == 0.0 || cos_theta_o == 0.0) || (wh.x == 0.0 && wh.y == 0.0 && wh.z == 0.0) { Spectrum::ZERO } else { let wh = wh.normalize(); let f = self .fresnel .evaluate(wi.dot(&wh.face_forward(&Vector3f::new(0.0, 0.0, 1.0)))); self.r * self.distribution.d(&wh) * self.distribution.g(wo, wi) * f / (4.0 * cos_theta_i * cos_theta_o) } } pub fn sample_f(&self, wo: &Vector3f, u: &Point2f) -> BxDFSample { if wo.z == 0.0 { BxDFSample::from(self.bxdf_type) } else { let wh = self.distribution.sample_wh(wo, u); if wo.dot(&wh) < 0.0 { BxDFSample::from(self.bxdf_type) } else { let wi = reflect(wo, &wh); if !same_hemisphere(wo, &wi) { BxDFSample::new(Spectrum::ZERO, 0.0, wi, self.bxdf_type) } else {
pub fn pdf(&self, wo: &Vector3f, wi: &Vector3f) -> Float { if same_hemisphere(wo, wi) { let wh = (wo + wi).normalize(); self.distribution.pdf(wo, &wh) / (4.0 * wo.dot(&wh)) } else { 0.0 } } } impl<'arena> fmt::Display for MicrofacetReflection<'arena> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!( f, "MicrofacetReflection {{ bxdf_type: {}, fresnel: {}, r: {}, distribution: {} }}", self.bxdf_type, self.fresnel, self.r, self.distribution ) } }
let pdf = self.distribution.pdf(wo, &wh) / (4.0 * wo.dot(&wh)); BxDFSample::new(self.f(wo, &wi), pdf, wi, self.bxdf_type) } } } }
function_block-function_prefix_line
[ { "content": "#[inline]\n\npub fn reflect(wo: &Vector3f, n: &Vector3f) -> Vector3f {\n\n -wo + 2.0 * wo.dot(n) * n\n\n}\n", "file_path": "core/src/reflection/common.rs", "rank": 0, "score": 258319.93077896046 }, { "content": "/// Uniformly sample a direction from a sphere.\n\n///\n\n/// *...
Rust
src/lib.rs
rkanati/podchamp
84488b1e92532052f1c1176150a8afa95779de65
#[macro_use] extern crate diesel; #[macro_use] extern crate diesel_migrations; use { chrono::{DateTime, Utc, NaiveDateTime}, thiserror::Error, url::Url, }; embed_migrations!(); pub mod models; pub mod schema; pub fn run_migrations(db: &diesel::sqlite::SqliteConnection) -> anyhow::Result<()> { embedded_migrations::run(db)?; Ok(()) } pub struct Database { conn: diesel::sqlite::SqliteConnection, } #[derive(Debug, Error)] pub enum OpenDatabaseError { #[error("invalid database path")] InvalidPath, #[error("creating database directory")] CreateDirectory(std::io::Error), #[error(transparent)] Diesel(#[from] diesel::result::ConnectionError), } impl Database { pub fn open(path: &std::path::Path) -> Result<Database, OpenDatabaseError> { let dir = path.parent().ok_or(OpenDatabaseError::InvalidPath)?; std::fs::create_dir_all(dir).map_err(OpenDatabaseError::CreateDirectory)?; let path = path.to_str().ok_or(OpenDatabaseError::InvalidPath)?; use diesel::prelude::*; let conn = SqliteConnection::establish(path)?; let db = Database{conn}; Ok(db) } } impl std::ops::Deref for Database { type Target = diesel::sqlite::SqliteConnection; fn deref(&self) -> &Self::Target { &self.conn } } #[derive(Debug, Error)] pub enum AddFeedError { #[error("feed named {0} already in database")] NameTaken(String), #[error(transparent)] Database(#[from] diesel::result::Error), } impl Database { pub fn add_feed(&mut self, name: &str, link: &Url, backlog: std::num::NonZeroU32, ) -> Result<(), AddFeedError> { let feed = models::NewFeed { name, uri: link.as_str(), backlog: backlog.get() as i32, fetch_since: None }; use diesel::{prelude::*, result::{Error, DatabaseErrorKind}}; diesel::insert_into(schema::feeds::table) .values(&feed) .execute(&self.conn) .map_err(|e| match e { Error::DatabaseError(DatabaseErrorKind::UniqueViolation, _) => AddFeedError::NameTaken(name.to_owned()), e => e.into(), })?; Ok(()) } } #[derive(Debug, Error)] pub enum RemoveFeedError { #[error("no feed named {0}")] NoSuchFeed(String), #[error(transparent)] Database(#[from] diesel::result::Error), } impl Database { pub fn remove_feed(&mut self, name: &str) -> Result<(), RemoveFeedError> { use{diesel::prelude::*, schema::feeds::dsl as dsl}; let n = diesel::delete(dsl::feeds.filter(dsl::name.eq(name))) .execute(&self.conn)?; if n == 0 { return Err(RemoveFeedError::NoSuchFeed(name.into())); } Ok(()) } } #[derive(Debug, Error)] pub enum GetFeedsError { #[error(transparent)] Database(#[from] diesel::result::Error), } #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum GetFeeds<'n> { All, One(&'n str), } impl Database { pub fn get_feeds(&self, which: GetFeeds<'_>) -> Result<Vec<models::Feed>, GetFeedsError> { use{diesel::prelude::*, schema::feeds::dsl as feeds}; let query = match which { GetFeeds::All => feeds::feeds.into_boxed(), GetFeeds::One(name) => feeds::feeds.filter(feeds::name.eq(name)).into_boxed() }; query.load::<models::Feed>(&self.conn) .map_err(GetFeedsError::Database) } } #[derive(Debug, Error)] pub enum SetColumnError { #[error("no feed named {0}")] NoSuchFeed(String), #[error(transparent)] Database(#[from] diesel::result::Error), } impl Database { pub fn set_link(&mut self, feed: &str, link: &Url) -> Result<(), SetColumnError> { use{diesel::prelude::*, schema::feeds::dsl as dsl}; let n = diesel::update(dsl::feeds.filter(dsl::name.eq(feed))) .set(dsl::uri.eq(link.as_str())) .execute(&self.conn)?; if n == 0 { return Err(SetColumnError::NoSuchFeed(feed.into())); } Ok(()) } pub fn set_backlog(&mut self, feed: &str, backlog: std::num::NonZeroU32) -> Result<(), SetColumnError> { use{diesel::prelude::*, schema::feeds::dsl as dsl}; let n = diesel::update(dsl::feeds.filter(dsl::name.eq(feed))) .set(dsl::backlog.eq(backlog.get() as i32)) .execute(&self.conn)?; if n == 0 { return Err(SetColumnError::NoSuchFeed(feed.into())); } Ok(()) } pub fn set_fetch_since(&mut self, feed: &str, since: &DateTime<Utc>) -> Result<(), SetColumnError> { use{diesel::prelude::*, schema::feeds::dsl as dsl}; let n = diesel::update(dsl::feeds.filter(dsl::name.eq(feed))) .set(dsl::fetch_since.eq(since.naive_utc())) .execute(&self.conn)?; if n == 0 { return Err(SetColumnError::NoSuchFeed(feed.into())); } Ok(()) } } #[derive(Debug, Error)] pub enum ResetRegisterError { #[error(transparent)] Database(#[from] diesel::result::Error), } impl Database { pub fn reset_register(&mut self, feed: &str) -> Result<(), ResetRegisterError> { use diesel::prelude::*; use schema::{register::dsl as register, feeds::dsl as feeds}; diesel::delete(register::register.filter(register::feed.eq(feed))) .execute(&self.conn)?; diesel::update(feeds::feeds.filter(feeds::name.eq(feed))) .set(feeds::fetch_since.eq::<Option<NaiveDateTime>>(None)) .execute(&self.conn)?; Ok(()) } } #[derive(Debug, Error)] pub enum IsEpisodeRegisteredError { #[error(transparent)] Database(#[from] diesel::result::Error), } impl Database { pub fn is_episode_registered(&self, feed: &str, guid: &str) -> Result<bool, IsEpisodeRegisteredError> { use {diesel::prelude::*, schema::register::dsl as register}; let n: i64 = register::register .filter(register::feed.eq(feed)) .filter(register::guid.eq(guid)) .count() .get_result(&self.conn)?; Ok(n != 0) } } #[derive(Debug, Error)] pub enum RegisterEpisodeError { #[error(transparent)] Database(#[from] diesel::result::Error), } impl Database { pub fn register_episode(&mut self, feed: &str, guid: &str) -> Result<(), RegisterEpisodeError> { let registration = models::NewRegistration{feed, guid}; use diesel::prelude::*; diesel::insert_into(schema::register::table) .values(&registration) .execute(&self.conn)?; Ok(()) } }
#[macro_use] extern crate diesel; #[macro_use] extern crate diesel_migrations; use { chrono::{DateTime, Utc, NaiveDateTime}, thiserror::Error, url::Url, }; embed_migrations!(); pub mod models; pub mod schema; pub fn run_migrations(db: &diesel::sqlite::SqliteConnection) -> anyhow::Result<()> { embedded_migrations::run(db)?; Ok(()) } pub struct Database { conn: diesel::sqlite::SqliteConnection, } #[derive(Debug, Error)] pub enum OpenDatabaseError { #[error("invalid database path")] InvalidPath, #[error("creating database directory")] CreateDirectory(std::io::Error), #[error(transparent)] Diesel(#[from] diesel::result::ConnectionError), } impl Database { pub fn open(path: &std::path::Path) -> Result<Database, OpenDatabaseError> { let dir = path.parent().ok_or(OpenDatabaseError::InvalidPath)?; std::fs::create_dir_all(dir).map_err(OpenDatabaseError::CreateDirectory)?; let path = path.to_str().ok_or(OpenDatabaseError::InvalidPath)?; use diesel::prelude::*; let conn = SqliteConnection::establish(path)?; let db = Database{conn}; Ok(db) } } impl std::ops::Deref for Database { type Target = diesel::sqlite::SqliteConnection; fn deref(&self) -> &Self::Target { &self.conn } } #[derive(Debug, Error)] pub enum AddFeedError { #[error("feed named {0} already in database")] NameTaken(String), #[error(transparent)] Database(#[from] diesel::result::Error), } impl Database { pub fn add_feed(&mut self, name: &str, link: &Url, backlog: std::num::NonZeroU32, ) -> Result<(), AddFeedError> { let feed = models::NewFeed { name, uri: link.as_str(), backlog: backlog.get() as i32, fetch_since: None }; use diesel::{prelude::*, result::{Error, DatabaseErrorKind}}; diesel::insert_into(schema::feeds::table) .values(&feed) .execute(&self.conn) .map_err(|e| match e { Error::DatabaseError(DatabaseErrorKind::UniqueViolation, _) => AddFeedError::NameTaken(name.to_owned()), e => e.into(), })?; Ok(()) } } #[derive(Debug, Error)] pub enum RemoveFeedError { #[error("no feed named {0}")] NoSuchFeed(String), #[error(transparent)] Database(#[from] diesel::result::Error), } impl Database { pub fn remove_feed(&mut self, name: &str) -> Result<(), RemoveFeedError> { use{diesel::prelude::*, schema::feeds::dsl as dsl}; let n = diesel::delete(dsl::feeds.filter(dsl::name.eq(name))) .execute(&self.conn)?; if n == 0 { return Err(RemoveFeedError::NoSuchFeed(name.into())); } Ok(()) } } #[derive(Debug, Error)] pub enum GetFeedsError { #[error(transparent)] Database(#[from] diesel::result::Error), } #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum GetFeeds<'n> { All, One(&'n str), } impl Database { pub fn get_feeds(&self, which: GetFeeds<'_>) -> Result<Vec<models::Feed>, GetFeedsError> { use{diesel::prelude::*, schema::feeds::dsl as feeds}; let query = match which { GetFeeds::All => feeds::feeds.into_boxed(), GetFeeds::One(name) => feeds::feeds.filter(feeds::name.eq(name)).into_boxed() }; query.load::<models::Feed>(&self.conn) .map_err(GetFeedsError::Database) } } #[derive(Debug, Error)] pub enum SetColumnError { #[error("no feed named {0}")] NoSuchFeed(String), #[error(transparent)] Database(#[from] diesel::result::Error), } impl Database { pub fn set_link(&mut self, feed: &str, link: &Url) -> Result<(), SetColumnError> { use{diesel::prelude::*, schema::feeds::dsl as dsl}; let n = diesel::update(dsl::feeds.filter(dsl::name.eq(feed))) .set(dsl::uri.eq(link.as_str())) .execute(&self.conn)?; if n == 0 { return Err(SetColumnError::NoSuchFeed(feed.into())); } Ok(()) } pub fn set_backlog(&mut self, feed: &str, backlog: std::num::NonZeroU32) -> Result<(), SetColumnError> { use{diesel::prelude::*, schema::feeds::dsl as dsl}; let n
0) } } #[derive(Debug, Error)] pub enum RegisterEpisodeError { #[error(transparent)] Database(#[from] diesel::result::Error), } impl Database { pub fn register_episode(&mut self, feed: &str, guid: &str) -> Result<(), RegisterEpisodeError> { let registration = models::NewRegistration{feed, guid}; use diesel::prelude::*; diesel::insert_into(schema::register::table) .values(&registration) .execute(&self.conn)?; Ok(()) } }
= diesel::update(dsl::feeds.filter(dsl::name.eq(feed))) .set(dsl::backlog.eq(backlog.get() as i32)) .execute(&self.conn)?; if n == 0 { return Err(SetColumnError::NoSuchFeed(feed.into())); } Ok(()) } pub fn set_fetch_since(&mut self, feed: &str, since: &DateTime<Utc>) -> Result<(), SetColumnError> { use{diesel::prelude::*, schema::feeds::dsl as dsl}; let n = diesel::update(dsl::feeds.filter(dsl::name.eq(feed))) .set(dsl::fetch_since.eq(since.naive_utc())) .execute(&self.conn)?; if n == 0 { return Err(SetColumnError::NoSuchFeed(feed.into())); } Ok(()) } } #[derive(Debug, Error)] pub enum ResetRegisterError { #[error(transparent)] Database(#[from] diesel::result::Error), } impl Database { pub fn reset_register(&mut self, feed: &str) -> Result<(), ResetRegisterError> { use diesel::prelude::*; use schema::{register::dsl as register, feeds::dsl as feeds}; diesel::delete(register::register.filter(register::feed.eq(feed))) .execute(&self.conn)?; diesel::update(feeds::feeds.filter(feeds::name.eq(feed))) .set(feeds::fetch_since.eq::<Option<NaiveDateTime>>(None)) .execute(&self.conn)?; Ok(()) } } #[derive(Debug, Error)] pub enum IsEpisodeRegisteredError { #[error(transparent)] Database(#[from] diesel::result::Error), } impl Database { pub fn is_episode_registered(&self, feed: &str, guid: &str) -> Result<bool, IsEpisodeRegisteredError> { use {diesel::prelude::*, schema::register::dsl as register}; let n: i64 = register::register .filter(register::feed.eq(feed)) .filter(register::guid.eq(guid)) .count() .get_result(&self.conn)?; Ok(n !=
random
[ { "content": "fn collect_recent_episodes<'c> (channel: &'c feed_rs::model::Feed, now: &DateTime<Utc>)\n\n -> Vec<(&'c feed_rs::model::Entry, &'c Url, DateTime<Utc>)>\n\n{\n\n let mut recents: Vec<_> = channel.entries.iter()\n\n // ignore items with no date, or no actual episode to download\n\n ...
Rust
src/from.rs
Eijebong/derive_more
591918e68bb695cb90842ae3a1d70551ade64be2
use std::collections::HashMap; use std::ops::Index; use quote::{ToTokens, Tokens}; use syn::{Data, DataEnum, DeriveInput, Field, Fields}; use utils::{field_idents, get_field_types, named_to_vec, number_idents, unnamed_to_vec}; pub fn expand(input: &DeriveInput, trait_name: &str) -> Tokens { match input.data { Data::Struct(ref data_struct) => match data_struct.fields { Fields::Unnamed(ref fields) => tuple_from(input, &unnamed_to_vec(fields)), Fields::Named(ref fields) => struct_from(input, &named_to_vec(fields)), Fields::Unit => struct_from(input, &[]), }, Data::Enum(ref data_enum) => enum_from(input, data_enum), _ => panic!(format!( "Only structs and enums can use derive({})", trait_name )), } } pub fn from_impl<T: ToTokens>(input: &DeriveInput, fields: &[&Field], body: T) -> Tokens { let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); let input_type = &input.ident; let original_types = &get_field_types(fields); quote!{ impl#impl_generics ::std::convert::From<(#(#original_types),*)> for #input_type#ty_generics #where_clause { #[allow(unused_variables)] #[inline] fn from(original: (#(#original_types),*)) -> #input_type#ty_generics { #body } } } } fn tuple_from(input: &DeriveInput, fields: &[&Field]) -> Tokens { let input_type = &input.ident; let body = tuple_body(input_type, fields); from_impl(input, fields, body) } fn tuple_body<T: ToTokens>(return_type: T, fields: &[&Field]) -> Tokens { if fields.len() == 1 { quote!(#return_type(original)) } else { let field_names = &number_idents(fields.len()); quote!(#return_type(#(original.#field_names),*)) } } fn struct_from(input: &DeriveInput, fields: &[&Field]) -> Tokens { let input_type = &input.ident; let body = struct_body(input_type, fields); from_impl(input, fields, body) } fn struct_body<T: ToTokens>(return_type: T, fields: &[&Field]) -> Tokens { if fields.len() == 1 { let field_name = &fields[0].ident; quote!(#return_type{#field_name: original}) } else { let argument_field_names = &number_idents(fields.len()); let field_names = &field_idents(fields); quote!(#return_type{#(#field_names: original.#argument_field_names),*}) } } fn enum_from(input: &DeriveInput, data_enum: &DataEnum) -> Tokens { let mut type_signature_counts = HashMap::new(); let input_type = &input.ident; for variant in &data_enum.variants { match variant.fields { Fields::Unnamed(ref fields) => { let original_types = unnamed_to_vec(fields).iter().map(|f| &f.ty).collect(); let counter = type_signature_counts.entry(original_types).or_insert(0); *counter += 1; } Fields::Named(ref fields) => { let original_types = named_to_vec(fields).iter().map(|f| &f.ty).collect(); let counter = type_signature_counts.entry(original_types).or_insert(0); *counter += 1; } Fields::Unit => { let counter = type_signature_counts.entry(vec![]).or_insert(0); *counter += 1; } } } let mut tokens = Tokens::new(); for variant in &data_enum.variants { match variant.fields { Fields::Unnamed(ref fields) => { let field_vec = &unnamed_to_vec(fields); let original_types = get_field_types(field_vec); if *type_signature_counts.index(&original_types) == 1 { let variant_ident = &variant.ident; let body = tuple_body(quote!(#input_type::#variant_ident), field_vec); from_impl(input, field_vec, body).to_tokens(&mut tokens) } } Fields::Named(ref fields) => { let field_vec = &named_to_vec(fields); let original_types = get_field_types(field_vec); if *type_signature_counts.index(&original_types) == 1 { let variant_ident = &variant.ident; let body = struct_body(quote!(#input_type::#variant_ident), field_vec); from_impl(input, field_vec, body).to_tokens(&mut tokens) } } Fields::Unit => { if *type_signature_counts.index(&vec![]) == 1 { let variant_ident = &variant.ident; let body = struct_body(quote!(#input_type::#variant_ident), &[]); from_impl(input, &[], body).to_tokens(&mut tokens) } } } } tokens }
use std::collections::HashMap; use std::ops::Index; use quote::{ToTokens, Tokens}; use syn::{Data, DataEnum, DeriveInput, Field, Fields}; use utils::{field_idents, get_field_types, named_to_vec, number_idents, unnamed_to_vec}; pub fn expand(input: &DeriveInput, trait_name: &str) -> Tokens { match input.data { Data::Struct(ref data_struct) => match data_struct.fields { Fields::Unnamed(ref fields) => tuple_from(input, &unnamed_to_vec(fields)), Fields::Named(ref fields) => struct_from(input, &named_to_vec(fields)), Fields::Unit => struct_from(input, &[]), }, Data::Enum(ref data_enum) => enum_from(input, data_enum), _ => panic!(format!( "Only structs and enums can use derive({})", trait_name )), } } pub fn from_impl<T: ToTokens>(input: &DeriveInput, fields: &[&Field], body: T) -> Tokens { let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); let input_type = &input.ident; let original_types = &get_field_types(fields); quote!{ impl#impl_generics ::std::convert::From<(#(#original_types),*)> for #input_type#ty_generics #where_clause { #[allow(unused_variables)] #[inline] fn from(original: (#(#original_types),*)) -> #input_type#ty_generics { #body } } } } fn tuple_from(input: &DeriveInput, fields: &[&Field]) -> Tokens { let input_type = &input.ident; let body = tuple_body(input_type, fields); from_impl(input, fields, body) } fn tuple_body<T: ToTokens>(return_type: T, fields: &[&Field]) -> Tokens { if fields.len() == 1 { quote!(#return_type(original)) } else { let field_names = &number_idents(fields.len()); quote!(#return_type(#(original.#field_names),*)) } } fn struct_from(input: &DeriveInput, fields: &[&Field]) -> Tokens { let input_type = &input.ident; let body = struct_body(input_type, fields); from_impl(input, fields, body) } fn struct_body<T: ToTokens>(return_type: T, fields: &[&Field]) -> Tokens { if fields.len() == 1 { let field_name = &
*counter += 1; } Fields::Named(ref fields) => { let original_types = named_to_vec(fields).iter().map(|f| &f.ty).collect(); let counter = type_signature_counts.entry(original_types).or_insert(0); *counter += 1; } Fields::Unit => { let counter = type_signature_counts.entry(vec![]).or_insert(0); *counter += 1; } } } let mut tokens = Tokens::new(); for variant in &data_enum.variants { match variant.fields { Fields::Unnamed(ref fields) => { let field_vec = &unnamed_to_vec(fields); let original_types = get_field_types(field_vec); if *type_signature_counts.index(&original_types) == 1 { let variant_ident = &variant.ident; let body = tuple_body(quote!(#input_type::#variant_ident), field_vec); from_impl(input, field_vec, body).to_tokens(&mut tokens) } } Fields::Named(ref fields) => { let field_vec = &named_to_vec(fields); let original_types = get_field_types(field_vec); if *type_signature_counts.index(&original_types) == 1 { let variant_ident = &variant.ident; let body = struct_body(quote!(#input_type::#variant_ident), field_vec); from_impl(input, field_vec, body).to_tokens(&mut tokens) } } Fields::Unit => { if *type_signature_counts.index(&vec![]) == 1 { let variant_ident = &variant.ident; let body = struct_body(quote!(#input_type::#variant_ident), &[]); from_impl(input, &[], body).to_tokens(&mut tokens) } } } } tokens }
fields[0].ident; quote!(#return_type{#field_name: original}) } else { let argument_field_names = &number_idents(fields.len()); let field_names = &field_idents(fields); quote!(#return_type{#(#field_names: original.#argument_field_names),*}) } } fn enum_from(input: &DeriveInput, data_enum: &DataEnum) -> Tokens { let mut type_signature_counts = HashMap::new(); let input_type = &input.ident; for variant in &data_enum.variants { match variant.fields { Fields::Unnamed(ref fields) => { let original_types = unnamed_to_vec(fields).iter().map(|f| &f.ty).collect(); let counter = type_signature_counts.entry(original_types).or_insert(0);
random
[ { "content": "/// Provides the hook to expand `#[derive(Into)]` into an implementation of `Into`\n\npub fn expand(input: &DeriveInput, _: &str) -> Tokens {\n\n let input_type = &input.ident;\n\n let field_vec: Vec<_>;\n\n let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl();...
Rust
src/parser.rs
ikanago/qz
3e59c25af2107d72e1cbe776c15c0aadd42525fc
use crate::{ header::{HeaderName, HeaderValue}, method::Method, status::StatusCode, Uri, Version, }; use std::convert::TryFrom; use std::str; #[derive(Debug)] pub struct Parser<'a> { state: &'a [u8], } impl<'a> Parser<'a> { pub fn new(input: &'a [u8]) -> Self { Self { state: input } } pub fn consume(&mut self) -> Option<u8> { self.state.split_first().map(|(&b, tail)| { self.state = tail; b }) } pub fn read_until(&mut self, target: u8) -> Option<&[u8]> { let index = self.state.iter().position(|&b| b == target)?; let (found, tail) = self.state.split_at(index); self.state = tail; self.consume(); Some(found) } pub fn read_until_whitespace(&mut self) -> Option<&[u8]> { self.read_until(b' ') } pub fn expect(&mut self, target: u8, error: StatusCode) -> crate::Result<()> { match self.consume() { Some(b) if b == target => Ok(()), _ => Err(error), } } pub fn parse_request_line(&mut self) -> crate::Result<(Method, Uri, Version)> { let method = self.parse_method()?; let uri = self.parse_uri()?; let version = self.parse_version()?; self.expect(b'\n', StatusCode::BadRequest)?; Ok((method, uri, version)) } fn parse_method(&mut self) -> crate::Result<Method> { match self.read_until_whitespace() { Some(method) => Method::try_from(method), None => Err(StatusCode::BadRequest), } } fn parse_uri(&mut self) -> crate::Result<Uri> { let uri = self.read_until_whitespace().ok_or(StatusCode::BadRequest)?; if uri.starts_with(&[b'/']) { Ok(Uri::new(uri)) } else { Err(StatusCode::BadRequest) } } fn parse_version(&mut self) -> crate::Result<Version> { let protocol = self .read_until(b'/') .ok_or(StatusCode::HttpVersionNotSupported)?; match str::from_utf8(protocol) { Ok("HTTP") => (), _ => return Err(StatusCode::HttpVersionNotSupported), } let version = self .read_until(b'\r') .ok_or(StatusCode::HttpVersionNotSupported)?; Version::try_from(version) } pub fn parse_header(&mut self) -> crate::Result<(HeaderName, HeaderValue)> { let header_name = self .read_until(b':') .ok_or(StatusCode::BadRequest)? .to_vec(); let header_name = HeaderName::from(header_name); self.expect(b' ', StatusCode::BadRequest)?; let header_value = self .read_until(b'\r') .ok_or(StatusCode::BadRequest)? .to_vec(); self.expect(b'\n', StatusCode::BadRequest)?; Ok((header_name, header_value)) } pub fn parse_body(&mut self, body_len: usize) -> crate::Result<Vec<u8>> { if body_len > self.state.len() { return Err(StatusCode::LengthRequired); } Ok(self.state[..body_len].to_vec()) } } #[cfg(test)] mod tests { use super::*; #[test] fn read_char() { let mut p = Parser::new(&[42, 43]); assert_eq!(Some(b'*'), p.consume()); assert_eq!(Some(b'+'), p.consume()); assert_eq!(None, p.consume()); } #[test] fn read_until_delim() { let bytes = "GET /index.html HTTP/1.1\r\n".as_bytes(); let mut p = Parser::new(bytes); assert_eq!(Some("GET".as_bytes()), p.read_until(b' ')); assert_eq!(Some("/index.html".as_bytes()), p.read_until(b' ')); assert_eq!(Some("HTTP/1.1".as_bytes()), p.read_until(b'\r')); } #[test] fn read_until_empty() { let mut p = Parser::new(&[]); assert_eq!(None, p.read_until(b' ')); } #[test] fn parse_request_line() { let bytes = "GET /index.html HTTP/1.1\r\n".as_bytes(); let mut p = Parser::new(bytes); let (method, uri, version) = p.parse_request_line().unwrap(); assert_eq!(Method::Get, method); assert_eq!(Uri::new(b"/index.html"), uri); assert_eq!(Version::OneDotOne, version); } #[test] fn parse_method() { let bytes = "GET /index.html HTTP/1.1\r\n".as_bytes(); let mut p = Parser::new(bytes); assert_eq!(Ok(Method::Get), p.parse_method()); } #[test] fn parse_uri() { let bytes = "/index.html HTTP/1.1\r\n".as_bytes(); let mut p = Parser::new(bytes); assert_eq!(Ok(Uri::new(b"/index.html")), p.parse_uri()); } #[test] fn parse_version() { let bytes = "HTTP/1.1\r\n".as_bytes(); let mut p = Parser::new(bytes); assert_eq!(Ok(Version::OneDotOne), p.parse_version()); } #[test] fn parse_header() { let bytes = b"Accept: */*\r\n"; let mut p = Parser::new(bytes); assert_eq!(Ok((HeaderName::Accept, b"*/*".to_vec())), p.parse_header()); } #[test] fn parse_body() { let bytes = b"Hello, World!"; let mut p = Parser::new(bytes); assert_eq!(Ok(b"Hello, World!".to_vec()), p.parse_body(13)); } }
use crate::{ header::{HeaderName, HeaderValue}, method::Method, status::StatusCode, Uri, Version, }; use std::convert::TryFrom; use std::str; #[derive(Debug)] pub struct Parser<'a> { state: &'a [u8], } impl<'a> Parser<'a> { pub fn new(input: &'a [u8]) -> Self { Self { state: input } } pub fn consume(&mut self) -> Option<u8> { self.state.split_first().map(|(&b, tail)| { self.state = tail; b }) } pub fn read_until(&mut self, target: u8) -> Option<&[u8]> { let index = self.state.iter().position(|&b| b == target)?; let (found, tail) = self.state.split_at(index); self.state = tail; self.consume(); Some(found) } pub fn read_until_whitespace(&mut self) -> Option<&[u8]> { self.read_until(b' ') }
pub fn parse_request_line(&mut self) -> crate::Result<(Method, Uri, Version)> { let method = self.parse_method()?; let uri = self.parse_uri()?; let version = self.parse_version()?; self.expect(b'\n', StatusCode::BadRequest)?; Ok((method, uri, version)) } fn parse_method(&mut self) -> crate::Result<Method> { match self.read_until_whitespace() { Some(method) => Method::try_from(method), None => Err(StatusCode::BadRequest), } } fn parse_uri(&mut self) -> crate::Result<Uri> { let uri = self.read_until_whitespace().ok_or(StatusCode::BadRequest)?; if uri.starts_with(&[b'/']) { Ok(Uri::new(uri)) } else { Err(StatusCode::BadRequest) } } fn parse_version(&mut self) -> crate::Result<Version> { let protocol = self .read_until(b'/') .ok_or(StatusCode::HttpVersionNotSupported)?; match str::from_utf8(protocol) { Ok("HTTP") => (), _ => return Err(StatusCode::HttpVersionNotSupported), } let version = self .read_until(b'\r') .ok_or(StatusCode::HttpVersionNotSupported)?; Version::try_from(version) } pub fn parse_header(&mut self) -> crate::Result<(HeaderName, HeaderValue)> { let header_name = self .read_until(b':') .ok_or(StatusCode::BadRequest)? .to_vec(); let header_name = HeaderName::from(header_name); self.expect(b' ', StatusCode::BadRequest)?; let header_value = self .read_until(b'\r') .ok_or(StatusCode::BadRequest)? .to_vec(); self.expect(b'\n', StatusCode::BadRequest)?; Ok((header_name, header_value)) } pub fn parse_body(&mut self, body_len: usize) -> crate::Result<Vec<u8>> { if body_len > self.state.len() { return Err(StatusCode::LengthRequired); } Ok(self.state[..body_len].to_vec()) } } #[cfg(test)] mod tests { use super::*; #[test] fn read_char() { let mut p = Parser::new(&[42, 43]); assert_eq!(Some(b'*'), p.consume()); assert_eq!(Some(b'+'), p.consume()); assert_eq!(None, p.consume()); } #[test] fn read_until_delim() { let bytes = "GET /index.html HTTP/1.1\r\n".as_bytes(); let mut p = Parser::new(bytes); assert_eq!(Some("GET".as_bytes()), p.read_until(b' ')); assert_eq!(Some("/index.html".as_bytes()), p.read_until(b' ')); assert_eq!(Some("HTTP/1.1".as_bytes()), p.read_until(b'\r')); } #[test] fn read_until_empty() { let mut p = Parser::new(&[]); assert_eq!(None, p.read_until(b' ')); } #[test] fn parse_request_line() { let bytes = "GET /index.html HTTP/1.1\r\n".as_bytes(); let mut p = Parser::new(bytes); let (method, uri, version) = p.parse_request_line().unwrap(); assert_eq!(Method::Get, method); assert_eq!(Uri::new(b"/index.html"), uri); assert_eq!(Version::OneDotOne, version); } #[test] fn parse_method() { let bytes = "GET /index.html HTTP/1.1\r\n".as_bytes(); let mut p = Parser::new(bytes); assert_eq!(Ok(Method::Get), p.parse_method()); } #[test] fn parse_uri() { let bytes = "/index.html HTTP/1.1\r\n".as_bytes(); let mut p = Parser::new(bytes); assert_eq!(Ok(Uri::new(b"/index.html")), p.parse_uri()); } #[test] fn parse_version() { let bytes = "HTTP/1.1\r\n".as_bytes(); let mut p = Parser::new(bytes); assert_eq!(Ok(Version::OneDotOne), p.parse_version()); } #[test] fn parse_header() { let bytes = b"Accept: */*\r\n"; let mut p = Parser::new(bytes); assert_eq!(Ok((HeaderName::Accept, b"*/*".to_vec())), p.parse_header()); } #[test] fn parse_body() { let bytes = b"Hello, World!"; let mut p = Parser::new(bytes); assert_eq!(Ok(b"Hello, World!".to_vec()), p.parse_body(13)); } }
pub fn expect(&mut self, target: u8, error: StatusCode) -> crate::Result<()> { match self.consume() { Some(b) if b == target => Ok(()), _ => Err(error), } }
function_block-full_function
[ { "content": "pub fn filename_to_mime<P: AsRef<Path>>(filename: P) -> &'static [u8] {\n\n match filename.as_ref().extension().and_then(OsStr::to_str) {\n\n Some(\"txt\") => TEXT_PLAIN,\n\n Some(\"html\") => TEXT_HTML,\n\n Some(\"css\") => TEXT_CSS,\n\n Some(\"js\") => TEXT_JAVASCR...
Rust
crates/codec/src/codec/limit.rs
YZITE/futures
9d4300dfaa22d0bc7cb51bf41edd305aa95b2839
#![allow(missing_docs)] use super::{Decoder, Encoder, EncoderError}; use bytes::{Buf, BytesMut}; pub trait SkipAheadHandler: Sized + std::fmt::Debug { fn continue_skipping(self, src: &[u8]) -> Result<(usize, Option<Self>), ()>; } impl SkipAheadHandler for () { fn continue_skipping(self, _: &[u8]) -> Result<(usize, Option<Self>), ()> { Ok((0, None)) } } pub trait DecoderWithSkipAhead: Decoder { type Handler: SkipAheadHandler; fn prepare_skip_ahead(&mut self, src: &mut BytesMut) -> Self::Handler; } #[derive(Debug)] pub struct Limit<C: DecoderWithSkipAhead> { inner: C, max_frame_size: usize, skip_ahead_state: Option<<C as DecoderWithSkipAhead>::Handler>, decoder_defunct: bool, } impl<C> Limit<C> where C: DecoderWithSkipAhead, { pub fn new(inner: C, max_frame_size: usize) -> Self { Self { inner, max_frame_size, skip_ahead_state: None, decoder_defunct: false, } } } #[derive(Debug, thiserror::Error)] pub enum LimitError<E: std::error::Error + 'static> { #[error("frame size limit exceeded (detected at {0} bytes)")] LimitExceeded(usize), #[error("codec couldn't recover from invalid / too big frame")] Defunct, #[error(transparent)] Inner(#[from] E), } impl<C: DecoderWithSkipAhead + EncoderError> EncoderError for Limit<C> { type Error = LimitError<<C as EncoderError>::Error>; } impl<Item, C> Encoder<Item> for Limit<C> where Item: ?Sized, C: Encoder<Item> + DecoderWithSkipAhead, { fn encode(&mut self, src: &Item, dst: &mut BytesMut) -> Result<(), Self::Error> { let mut tmp_dst = dst.split_off(dst.len()); self.inner.encode(src, &mut tmp_dst)?; if tmp_dst.len() > self.max_frame_size { return Err(LimitError::LimitExceeded(tmp_dst.len())); } dst.unsplit(tmp_dst); Ok(()) } } impl<C> Decoder for Limit<C> where C: DecoderWithSkipAhead, { type Item = <C as Decoder>::Item; type Error = LimitError<<C as Decoder>::Error>; fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> { while let Some(sas) = self.skip_ahead_state.take() { match sas.continue_skipping(&src) { Ok((amount, next)) => { self.skip_ahead_state = next; debug_assert!(amount <= src.len()); src.advance(amount); debug_assert!(amount != 0 || self.skip_ahead_state.is_none()); if src.len() == 0 { return Ok(None); } } Err(()) => { self.decoder_defunct = true; } } } if self.decoder_defunct { src.clear(); return Err(LimitError::Defunct); } match self.inner.decode(src) { Ok(None) if src.len() > self.max_frame_size => { self.skip_ahead_state = Some(self.inner.prepare_skip_ahead(src)); Err(LimitError::LimitExceeded(src.len())) } Ok(x) => Ok(x), Err(x) => Err(LimitError::Inner(x)), } } } /* #[cfg(test)] mod tests { use super::*; mod decode { use super::*; #[test] fn x() { } } } */
#![allow(missing_docs)] use super::{Decoder, Encoder, EncoderError}; use bytes::{Buf, BytesMut}; pub trait SkipAheadHandler: Sized + std::fmt::Debug { fn continue_skipping(self, src: &[u8]) -> Result<(usize, Option<Self>), ()>; } impl SkipAheadHandler for () { fn continue_skipping(self, _: &[u8]) -> Result<(usize, Option<Self>), ()> { Ok((0, None)) } } pub trait DecoderWithSkipAhead: Decoder { type Handler: SkipAheadHandler; fn prepare_skip_ahead(&mut self, src: &mut BytesMut) -> Self::Handler; } #[derive(Debug)] pub struct Limit<C: DecoderWithSkipAhead> { inner: C, max_frame_size: usize, skip_ahead_state: Option<<C as DecoderWithSkipAhead>::Handler>, decoder_defunct: bool, } impl<C> Limit<C> where C: DecoderWithSkipAhead, { pub fn new(inner: C, max_frame_size: usize) -> Self { Self { inner, max_frame_size, skip_ahead_state: None, decoder_defunct: false, } } } #[derive(Debug, thiserror::Error)] pub enum LimitError<E: std::error::Error + 'static> { #[error("frame size limit exceeded (detected at {0} bytes)")] LimitExceeded(usize), #[error("codec couldn't recover from invalid / too big frame")] Defunct, #[error(transparent)] Inner(#[from] E), } impl<C: DecoderWithSkipAhead + EncoderError> EncoderError for Limit<C> { type Error = LimitError<<C as EncoderError>::Error>; } impl<Item, C> Encoder<Item> for Limit<C> where Item: ?Sized, C: Encoder<Item> + DecoderWithSkipAhead, { fn encode(&mut self, src: &Item, dst: &mut BytesMut) -> Result<(), Self::Error> { let mut tmp_dst = dst.split_off(dst.len()); self.inner.encode(src, &mut tmp_dst)?; if tmp_dst.len() > self.max_frame_size { return Err(LimitError::LimitExceeded(tmp_dst.len())); } dst.unsplit(tmp_dst); Ok(()) } } impl<C> Decoder for Limit<C> where C: DecoderWithSkipAhead, { type Item = <C as Decoder>::Item; type Error = LimitError<<C as Decoder>::Error>; fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> { while let Some(sas) = self.skip_ahead_state.take() { match sas.continue_skipping(&src) { Ok((amount, next)) => { self.skip_ahead_state = next; debug_assert!(amount <= src.len()); src.advance(amount); debug_assert!(amount != 0 || self.skip_ahead_state.is_none()); if src.len() == 0 { return Ok(None); } } Err(()) => { self.decoder_defunct = true; } } } if self.decoder_defunct { src.clear(); return Err(LimitError::Defunct); }
} } /* #[cfg(test)] mod tests { use super::*; mod decode { use super::*; #[test] fn x() { } } } */
match self.inner.decode(src) { Ok(None) if src.len() > self.max_frame_size => { self.skip_ahead_state = Some(self.inner.prepare_skip_ahead(src)); Err(LimitError::LimitExceeded(src.len())) } Ok(x) => Ok(x), Err(x) => Err(LimitError::Inner(x)), }
if_condition
[ { "content": "/// Encoding of messages as bytes, for use with [`Framed`](crate::Framed).\n\n///\n\n/// `Item` is the type of items consumed by `encode`\n\npub trait Encoder<Item: ?Sized>: EncoderError {\n\n /// Encodes an item into the `BytesMut` provided by dst.\n\n fn encode(&mut self, item: &Item, dst:...
Rust
src/diagnostics/archivist/tests/v2/src/logs/budget.rs
wwjiang007/fuchsia-1
0db66b52b5bcd3e27c8b8c2163925309e8522f94
use crate::{constants::*, test_topology, utils}; use anyhow::Error; use archivist_lib::configs::parse_config; use component_events::{events::*, matcher::ExitStatusMatcher}; use diagnostics_data::{Data, LogError, Logs, Severity}; use diagnostics_hierarchy::trie::TrieIterableNode; use diagnostics_message::message::{fx_log_packet_t, METADATA_SIZE}; use diagnostics_reader::{ArchiveReader, Inspect, SubscriptionResultsStream}; use fidl_fuchsia_archivist_tests::{ SocketPuppetControllerRequest, SocketPuppetControllerRequestStream, SocketPuppetProxy, }; use fidl_fuchsia_component as fcomponent; use fidl_fuchsia_diagnostics::ArchiveAccessorMarker; use fidl_fuchsia_io::DirectoryMarker; use fidl_fuchsia_sys2::{ChildRef, EventSourceMarker, RealmMarker}; use fuchsia_async::{Task, Timer}; use fuchsia_component::{client, server::ServiceFs}; use fuchsia_component_test::{builder::*, mock, RealmInstance}; use fuchsia_zircon as zx; use futures::{ channel::mpsc::{self, Receiver}, StreamExt, }; use rand::{prelude::SliceRandom, rngs::StdRng, SeedableRng}; use std::{collections::BTreeMap, ops::Deref, time::Duration}; use tracing::{debug, info, trace}; const TEST_PACKET_LEN: usize = 49; const MAX_PUPPETS: usize = 5; #[fuchsia_async::run_singlethreaded(test)] async fn test_budget() { fuchsia_syslog::init().unwrap(); fuchsia_syslog::set_severity(fuchsia_syslog::levels::DEBUG); info!("testing that the archivist's log buffers correctly enforce their budget"); info!("creating nested environment for collecting diagnostics"); let mut env = PuppetEnv::create(MAX_PUPPETS).await; info!("check that archivist log state is clean"); env.assert_archivist_state_matches_expected().await; for i in 0..MAX_PUPPETS { env.launch_puppet(i).await; } env.validate().await; } struct PuppetEnv { max_puppets: usize, instance: RealmInstance, controllers: Receiver<SocketPuppetControllerRequestStream>, messages_allowed_in_cache: usize, messages_sent: Vec<MessageReceipt>, launched_monikers: Vec<String>, running_puppets: Vec<Puppet>, inspect_reader: ArchiveReader, log_reader: ArchiveReader, log_subscription: SubscriptionResultsStream<Logs>, rng: StdRng, _log_errors: Task<()>, } impl PuppetEnv { async fn create(max_puppets: usize) -> Self { let (sender, controllers) = mpsc::channel(1); let mut builder = test_topology::create(test_topology::Options { archivist_url: ARCHIVIST_WITH_SMALL_CACHES, }) .await .expect("create base topology"); builder .add_component( "mocks-server", ComponentSource::Mock(mock::Mock::new(move |mock_handles: mock::MockHandles| { Box::pin(run_mocks(mock_handles, sender.clone())) })), ) .await .unwrap(); for i in 0..max_puppets { let name = format!("test/puppet-{}", i); builder .add_component(name.clone(), ComponentSource::url(SOCKET_PUPPET_COMPONENT_URL)) .await .unwrap() .add_route(CapabilityRoute { capability: Capability::protocol( "fuchsia.archivist.tests.SocketPuppetController", ), source: RouteEndpoint::component("mocks-server"), targets: vec![RouteEndpoint::component(name.clone())], }) .unwrap() .add_route(CapabilityRoute { capability: Capability::protocol("fuchsia.logger.LogSink"), source: RouteEndpoint::component("test/archivist"), targets: vec![RouteEndpoint::component(name)], }) .unwrap(); } info!("starting our instance"); let mut realm = builder.build(); test_topology::expose_test_realm_protocol(&mut realm).await; let instance = realm.create().await.expect("create instance"); let config = parse_config("/pkg/data/config/small-caches-config.json").unwrap(); let messages_allowed_in_cache = config.logs.max_cached_original_bytes / TEST_PACKET_LEN; let archive = || instance.root.connect_to_protocol_at_exposed_dir::<ArchiveAccessorMarker>().unwrap(); let mut inspect_reader = ArchiveReader::new(); inspect_reader .with_archive(archive()) .with_minimum_schema_count(1) .add_selector("archivist:root/logs_buffer") .add_selector("archivist:root/sources"); let mut log_reader = ArchiveReader::new(); log_reader .with_archive(archive()) .with_minimum_schema_count(0) .retry_if_empty(false); let (log_subscription, mut errors) = log_reader.snapshot_then_subscribe::<Logs>().unwrap().split_streams(); let _log_errors = Task::spawn(async move { if let Some(error) = errors.next().await { panic!("{:#?}", error); } }); Self { max_puppets, controllers, instance, messages_allowed_in_cache, messages_sent: vec![], launched_monikers: vec![], running_puppets: vec![], inspect_reader, log_reader, log_subscription, rng: StdRng::seed_from_u64(0xA455), _log_errors, } } async fn launch_puppet(&mut self, id: usize) { assert!(id < self.max_puppets); let mut child_ref = ChildRef { name: format!("puppet-{}", id), collection: None }; let (exposed_dir, server_end) = fidl::endpoints::create_proxy::<DirectoryMarker>().unwrap(); let realm = self.instance.root.connect_to_protocol_at_exposed_dir::<RealmMarker>().unwrap(); realm.open_exposed_dir(&mut child_ref, server_end).await.unwrap().unwrap(); let _ = client::connect_to_protocol_at_dir_root::<fcomponent::BinderMarker>(&exposed_dir) .unwrap(); debug!("waiting for controller request"); let mut controller = self.controllers.next().await.unwrap(); debug!("waiting for ControlPuppet call"); let proxy = match controller.next().await { Some(Ok(SocketPuppetControllerRequest::ControlPuppet { to_control, control_handle, })) => { control_handle.shutdown(); to_control.into_proxy().unwrap() } _ => panic!("did not expect that"), }; let moniker = format!( "fuchsia_component_test_collection:{}/test/puppet-{}", self.instance.root.child_name(), id ); let puppet = Puppet { id, moniker: moniker.clone(), proxy }; info!("having the puppet connect to LogSink"); puppet.connect_to_log_sink().await.unwrap(); info!("observe the puppet appears in archivist's inspect output"); self.launched_monikers.push(moniker); self.running_puppets.push(puppet); while self.current_expected_sources() != self.current_observed_sources().await { Timer::new(Duration::from_millis(100)).await; } } fn current_expected_sources(&self) -> BTreeMap<String, Count> { let mut expected_sources = BTreeMap::new(); for source in &self.launched_monikers { expected_sources.insert(source.clone(), Count { total: 0, dropped: 0 }); } for (prior_messages, receipt) in self.messages_sent.iter().rev().enumerate() { let mut puppet_count = expected_sources.get_mut(&receipt.moniker).unwrap(); puppet_count.total += 1; if prior_messages >= self.messages_allowed_in_cache { puppet_count.dropped += 1; } } expected_sources .into_iter() .filter(|(moniker, count)| { let has_messages = count.total > 0 && count.total != count.dropped; let is_running = self.running_puppets.iter().find(|puppet| moniker == &puppet.moniker).is_some(); is_running || has_messages }) .collect() } async fn current_observed_sources(&self) -> BTreeMap<String, Count> { let results = self.inspect_reader.snapshot::<Inspect>().await.unwrap().into_iter().next().unwrap(); let root = results.payload.as_ref().unwrap(); let mut counts = BTreeMap::new(); let sources = root.get_child("sources").unwrap(); for (moniker, source) in sources.get_children() { if let Some(logs) = source.get_child("logs") { let total = logs.get_child("total").unwrap(); let total_number = *total.get_property("number").unwrap().uint().unwrap() as usize; let total_bytes = *total.get_property("bytes").unwrap().uint().unwrap() as usize; assert_eq!(total_bytes, total_number * TEST_PACKET_LEN); let dropped = logs.get_child("dropped").unwrap(); let dropped_number = *dropped.get_property("number").unwrap().uint().unwrap() as usize; let dropped_bytes = *dropped.get_property("bytes").unwrap().uint().unwrap() as usize; assert_eq!(dropped_bytes, dropped_number * TEST_PACKET_LEN); counts.insert( moniker.clone(), Count { total: total_number, dropped: dropped_number }, ); } } counts } async fn assert_archivist_state_matches_expected(&self) { let expected_sources = self.current_expected_sources(); let observed_sources = self.current_observed_sources().await; assert_eq!(observed_sources, expected_sources); let expected_drops = || expected_sources.iter().filter(|(_, c)| c.dropped > 0); let mut expected_logs = self .messages_sent .iter() .rev() .take(self.messages_allowed_in_cache) .rev(); trace!("reading log snapshot"); let observed_logs = self.log_reader.snapshot::<Logs>().await.unwrap().into_iter(); let mut dropped_message_warnings = BTreeMap::new(); for observed in observed_logs { if observed.metadata.errors.is_some() { dropped_message_warnings.insert(observed.moniker.clone(), observed); } else { let expected = expected_logs.next().unwrap(); assert_eq!(expected, &observed); } } for (moniker, Count { dropped, .. }) in expected_drops() { let dropped_logs_warning = dropped_message_warnings.remove(moniker).unwrap(); assert_eq!( dropped_logs_warning.metadata.errors, Some(vec![LogError::DroppedLogs { count: *dropped as u64 }]) ); assert_eq!(dropped_logs_warning.metadata.severity, Severity::Warn); } assert!(dropped_message_warnings.is_empty(), "must have encountered all expected warnings"); } async fn validate(mut self) { let overall_messages_to_log = self.messages_allowed_in_cache * 15; let iteration_for_killing_a_puppet = self.messages_allowed_in_cache; let event_source = EventSource::from_proxy(client::connect_to_protocol::<EventSourceMarker>().unwrap()); let mut event_stream = event_source .subscribe(vec![EventSubscription::new(vec![Stopped::NAME], EventMode::Async)]) .await .unwrap(); info!("having the puppets log packets until overflow"); for i in 0..overall_messages_to_log { trace!(i, "loop ticked"); if i == iteration_for_killing_a_puppet { let to_stop = self.running_puppets.pop().unwrap(); let receipt = to_stop.emit_packet().await; self.check_receipt(receipt).await; let id = to_stop.id; drop(to_stop); utils::wait_for_component_stopped_event( &self.instance.root.child_name(), &format!("puppet-{}", id), ExitStatusMatcher::Clean, &mut event_stream, ) .await; } let puppet = self.running_puppets.choose(&mut self.rng).unwrap(); let receipt = puppet.emit_packet().await; self.check_receipt(receipt).await; } assert_eq!( self.current_expected_sources().len(), self.running_puppets.len(), "must have stopped a component and rolled out all of its logs" ); info!("test complete!"); } async fn check_receipt(&mut self, receipt: MessageReceipt) { let next_message = self.log_subscription.next().await.unwrap(); assert_eq!(receipt, next_message); self.messages_sent.push(receipt); self.assert_archivist_state_matches_expected().await; } } struct Puppet { proxy: SocketPuppetProxy, moniker: String, id: usize, } impl std::fmt::Debug for Puppet { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("Puppet").field("moniker", &self.moniker).finish() } } impl Puppet { async fn emit_packet(&self) -> MessageReceipt { let timestamp = zx::Time::get_monotonic().into_nanos(); let mut packet: fx_log_packet_t = Default::default(); packet.metadata.severity = fuchsia_syslog::levels::INFO; packet.metadata.time = timestamp; packet.fill_data(1..(TEST_PACKET_LEN - METADATA_SIZE), b'A' as _); self.proxy.emit_packet(packet.as_bytes()).await.unwrap(); MessageReceipt { timestamp, moniker: self.moniker.clone() } } } impl Deref for Puppet { type Target = SocketPuppetProxy; fn deref(&self) -> &Self::Target { &self.proxy } } async fn run_mocks( mock_handles: mock::MockHandles, mut sender: mpsc::Sender<SocketPuppetControllerRequestStream>, ) -> Result<(), Error> { let mut fs = ServiceFs::new(); fs.dir("svc").add_fidl_service(move |stream: SocketPuppetControllerRequestStream| { sender.start_send(stream).unwrap(); }); fs.serve_connection(mock_handles.outgoing_dir.into_channel())?; fs.collect::<()>().await; Ok(()) } #[derive(Clone, Copy, Debug, PartialEq)] struct Count { total: usize, dropped: usize, } #[derive(Clone, Debug, PartialEq)] struct MessageReceipt { moniker: String, timestamp: i64, } impl PartialEq<Data<Logs>> for MessageReceipt { fn eq(&self, other: &Data<Logs>) -> bool { other.moniker.starts_with(&self.moniker) && *other.metadata.timestamp as i64 == self.timestamp } }
use crate::{constants::*, test_topology, utils}; use anyhow::Error; use archivist_lib::configs::parse_config; use component_events::{events::*, matcher::ExitStatusMatcher}; use diagnostics_data::{Data, LogError, Logs, Severity}; use diagnostics_hierarchy::trie::TrieIterableNode; use diagnostics_message::message::{fx_log_packet_t, METADATA_SIZE}; use diagnostics_reader::{ArchiveReader, Inspect, SubscriptionResultsStream}; use fidl_fuchsia_archivist_tests::{ SocketPuppetControllerRequest, SocketPuppetControllerRequestStream, SocketPuppetProxy, }; use fidl_fuchsia_component as fcomponent; use fidl_fuchsia_diagnostics::ArchiveAccessorMarker; use fidl_fuchsia_io::DirectoryMarker; use fidl_fuchsia_sys2::{ChildRef, EventSourceMarker, RealmMarker}; use fuchsia_async::{Task, Timer}; use fuchsia_component::{client, server::ServiceFs}; use fuchsia_component_test::{builder::*, mock, RealmInstance}; use fuchsia_zircon as zx; use futures::{ channel::mpsc::{self, Receiver}, StreamExt, }; use rand::{prelude::SliceRandom, rngs::StdRng, SeedableRng}; use std::{collections::BTreeMap, ops::Deref, time::Duration}; use tracing::{debug, info, trace}; const TEST_PACKET_LEN: usize = 49; const MAX_PUPPETS: usize = 5; #[fuchsia_async::run_singlethreaded(test)]
struct PuppetEnv { max_puppets: usize, instance: RealmInstance, controllers: Receiver<SocketPuppetControllerRequestStream>, messages_allowed_in_cache: usize, messages_sent: Vec<MessageReceipt>, launched_monikers: Vec<String>, running_puppets: Vec<Puppet>, inspect_reader: ArchiveReader, log_reader: ArchiveReader, log_subscription: SubscriptionResultsStream<Logs>, rng: StdRng, _log_errors: Task<()>, } impl PuppetEnv { async fn create(max_puppets: usize) -> Self { let (sender, controllers) = mpsc::channel(1); let mut builder = test_topology::create(test_topology::Options { archivist_url: ARCHIVIST_WITH_SMALL_CACHES, }) .await .expect("create base topology"); builder .add_component( "mocks-server", ComponentSource::Mock(mock::Mock::new(move |mock_handles: mock::MockHandles| { Box::pin(run_mocks(mock_handles, sender.clone())) })), ) .await .unwrap(); for i in 0..max_puppets { let name = format!("test/puppet-{}", i); builder .add_component(name.clone(), ComponentSource::url(SOCKET_PUPPET_COMPONENT_URL)) .await .unwrap() .add_route(CapabilityRoute { capability: Capability::protocol( "fuchsia.archivist.tests.SocketPuppetController", ), source: RouteEndpoint::component("mocks-server"), targets: vec![RouteEndpoint::component(name.clone())], }) .unwrap() .add_route(CapabilityRoute { capability: Capability::protocol("fuchsia.logger.LogSink"), source: RouteEndpoint::component("test/archivist"), targets: vec![RouteEndpoint::component(name)], }) .unwrap(); } info!("starting our instance"); let mut realm = builder.build(); test_topology::expose_test_realm_protocol(&mut realm).await; let instance = realm.create().await.expect("create instance"); let config = parse_config("/pkg/data/config/small-caches-config.json").unwrap(); let messages_allowed_in_cache = config.logs.max_cached_original_bytes / TEST_PACKET_LEN; let archive = || instance.root.connect_to_protocol_at_exposed_dir::<ArchiveAccessorMarker>().unwrap(); let mut inspect_reader = ArchiveReader::new(); inspect_reader .with_archive(archive()) .with_minimum_schema_count(1) .add_selector("archivist:root/logs_buffer") .add_selector("archivist:root/sources"); let mut log_reader = ArchiveReader::new(); log_reader .with_archive(archive()) .with_minimum_schema_count(0) .retry_if_empty(false); let (log_subscription, mut errors) = log_reader.snapshot_then_subscribe::<Logs>().unwrap().split_streams(); let _log_errors = Task::spawn(async move { if let Some(error) = errors.next().await { panic!("{:#?}", error); } }); Self { max_puppets, controllers, instance, messages_allowed_in_cache, messages_sent: vec![], launched_monikers: vec![], running_puppets: vec![], inspect_reader, log_reader, log_subscription, rng: StdRng::seed_from_u64(0xA455), _log_errors, } } async fn launch_puppet(&mut self, id: usize) { assert!(id < self.max_puppets); let mut child_ref = ChildRef { name: format!("puppet-{}", id), collection: None }; let (exposed_dir, server_end) = fidl::endpoints::create_proxy::<DirectoryMarker>().unwrap(); let realm = self.instance.root.connect_to_protocol_at_exposed_dir::<RealmMarker>().unwrap(); realm.open_exposed_dir(&mut child_ref, server_end).await.unwrap().unwrap(); let _ = client::connect_to_protocol_at_dir_root::<fcomponent::BinderMarker>(&exposed_dir) .unwrap(); debug!("waiting for controller request"); let mut controller = self.controllers.next().await.unwrap(); debug!("waiting for ControlPuppet call"); let proxy = match controller.next().await { Some(Ok(SocketPuppetControllerRequest::ControlPuppet { to_control, control_handle, })) => { control_handle.shutdown(); to_control.into_proxy().unwrap() } _ => panic!("did not expect that"), }; let moniker = format!( "fuchsia_component_test_collection:{}/test/puppet-{}", self.instance.root.child_name(), id ); let puppet = Puppet { id, moniker: moniker.clone(), proxy }; info!("having the puppet connect to LogSink"); puppet.connect_to_log_sink().await.unwrap(); info!("observe the puppet appears in archivist's inspect output"); self.launched_monikers.push(moniker); self.running_puppets.push(puppet); while self.current_expected_sources() != self.current_observed_sources().await { Timer::new(Duration::from_millis(100)).await; } } fn current_expected_sources(&self) -> BTreeMap<String, Count> { let mut expected_sources = BTreeMap::new(); for source in &self.launched_monikers { expected_sources.insert(source.clone(), Count { total: 0, dropped: 0 }); } for (prior_messages, receipt) in self.messages_sent.iter().rev().enumerate() { let mut puppet_count = expected_sources.get_mut(&receipt.moniker).unwrap(); puppet_count.total += 1; if prior_messages >= self.messages_allowed_in_cache { puppet_count.dropped += 1; } } expected_sources .into_iter() .filter(|(moniker, count)| { let has_messages = count.total > 0 && count.total != count.dropped; let is_running = self.running_puppets.iter().find(|puppet| moniker == &puppet.moniker).is_some(); is_running || has_messages }) .collect() } async fn current_observed_sources(&self) -> BTreeMap<String, Count> { let results = self.inspect_reader.snapshot::<Inspect>().await.unwrap().into_iter().next().unwrap(); let root = results.payload.as_ref().unwrap(); let mut counts = BTreeMap::new(); let sources = root.get_child("sources").unwrap(); for (moniker, source) in sources.get_children() { if let Some(logs) = source.get_child("logs") { let total = logs.get_child("total").unwrap(); let total_number = *total.get_property("number").unwrap().uint().unwrap() as usize; let total_bytes = *total.get_property("bytes").unwrap().uint().unwrap() as usize; assert_eq!(total_bytes, total_number * TEST_PACKET_LEN); let dropped = logs.get_child("dropped").unwrap(); let dropped_number = *dropped.get_property("number").unwrap().uint().unwrap() as usize; let dropped_bytes = *dropped.get_property("bytes").unwrap().uint().unwrap() as usize; assert_eq!(dropped_bytes, dropped_number * TEST_PACKET_LEN); counts.insert( moniker.clone(), Count { total: total_number, dropped: dropped_number }, ); } } counts } async fn assert_archivist_state_matches_expected(&self) { let expected_sources = self.current_expected_sources(); let observed_sources = self.current_observed_sources().await; assert_eq!(observed_sources, expected_sources); let expected_drops = || expected_sources.iter().filter(|(_, c)| c.dropped > 0); let mut expected_logs = self .messages_sent .iter() .rev() .take(self.messages_allowed_in_cache) .rev(); trace!("reading log snapshot"); let observed_logs = self.log_reader.snapshot::<Logs>().await.unwrap().into_iter(); let mut dropped_message_warnings = BTreeMap::new(); for observed in observed_logs { if observed.metadata.errors.is_some() { dropped_message_warnings.insert(observed.moniker.clone(), observed); } else { let expected = expected_logs.next().unwrap(); assert_eq!(expected, &observed); } } for (moniker, Count { dropped, .. }) in expected_drops() { let dropped_logs_warning = dropped_message_warnings.remove(moniker).unwrap(); assert_eq!( dropped_logs_warning.metadata.errors, Some(vec![LogError::DroppedLogs { count: *dropped as u64 }]) ); assert_eq!(dropped_logs_warning.metadata.severity, Severity::Warn); } assert!(dropped_message_warnings.is_empty(), "must have encountered all expected warnings"); } async fn validate(mut self) { let overall_messages_to_log = self.messages_allowed_in_cache * 15; let iteration_for_killing_a_puppet = self.messages_allowed_in_cache; let event_source = EventSource::from_proxy(client::connect_to_protocol::<EventSourceMarker>().unwrap()); let mut event_stream = event_source .subscribe(vec![EventSubscription::new(vec![Stopped::NAME], EventMode::Async)]) .await .unwrap(); info!("having the puppets log packets until overflow"); for i in 0..overall_messages_to_log { trace!(i, "loop ticked"); if i == iteration_for_killing_a_puppet { let to_stop = self.running_puppets.pop().unwrap(); let receipt = to_stop.emit_packet().await; self.check_receipt(receipt).await; let id = to_stop.id; drop(to_stop); utils::wait_for_component_stopped_event( &self.instance.root.child_name(), &format!("puppet-{}", id), ExitStatusMatcher::Clean, &mut event_stream, ) .await; } let puppet = self.running_puppets.choose(&mut self.rng).unwrap(); let receipt = puppet.emit_packet().await; self.check_receipt(receipt).await; } assert_eq!( self.current_expected_sources().len(), self.running_puppets.len(), "must have stopped a component and rolled out all of its logs" ); info!("test complete!"); } async fn check_receipt(&mut self, receipt: MessageReceipt) { let next_message = self.log_subscription.next().await.unwrap(); assert_eq!(receipt, next_message); self.messages_sent.push(receipt); self.assert_archivist_state_matches_expected().await; } } struct Puppet { proxy: SocketPuppetProxy, moniker: String, id: usize, } impl std::fmt::Debug for Puppet { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("Puppet").field("moniker", &self.moniker).finish() } } impl Puppet { async fn emit_packet(&self) -> MessageReceipt { let timestamp = zx::Time::get_monotonic().into_nanos(); let mut packet: fx_log_packet_t = Default::default(); packet.metadata.severity = fuchsia_syslog::levels::INFO; packet.metadata.time = timestamp; packet.fill_data(1..(TEST_PACKET_LEN - METADATA_SIZE), b'A' as _); self.proxy.emit_packet(packet.as_bytes()).await.unwrap(); MessageReceipt { timestamp, moniker: self.moniker.clone() } } } impl Deref for Puppet { type Target = SocketPuppetProxy; fn deref(&self) -> &Self::Target { &self.proxy } } async fn run_mocks( mock_handles: mock::MockHandles, mut sender: mpsc::Sender<SocketPuppetControllerRequestStream>, ) -> Result<(), Error> { let mut fs = ServiceFs::new(); fs.dir("svc").add_fidl_service(move |stream: SocketPuppetControllerRequestStream| { sender.start_send(stream).unwrap(); }); fs.serve_connection(mock_handles.outgoing_dir.into_channel())?; fs.collect::<()>().await; Ok(()) } #[derive(Clone, Copy, Debug, PartialEq)] struct Count { total: usize, dropped: usize, } #[derive(Clone, Debug, PartialEq)] struct MessageReceipt { moniker: String, timestamp: i64, } impl PartialEq<Data<Logs>> for MessageReceipt { fn eq(&self, other: &Data<Logs>) -> bool { other.moniker.starts_with(&self.moniker) && *other.metadata.timestamp as i64 == self.timestamp } }
async fn test_budget() { fuchsia_syslog::init().unwrap(); fuchsia_syslog::set_severity(fuchsia_syslog::levels::DEBUG); info!("testing that the archivist's log buffers correctly enforce their budget"); info!("creating nested environment for collecting diagnostics"); let mut env = PuppetEnv::create(MAX_PUPPETS).await; info!("check that archivist log state is clean"); env.assert_archivist_state_matches_expected().await; for i in 0..MAX_PUPPETS { env.launch_puppet(i).await; } env.validate().await; }
function_block-full_function
[]
Rust
cortex-m-rtfm/macros/src/codegen/util.rs
ButtNaked/m4mon8
017dc3e1caed191804e795c0709580f1a58ad73e
use core::sync::atomic::{AtomicUsize, Ordering}; use proc_macro2::{Span, TokenStream as TokenStream2}; use quote::quote; use rtfm_syntax::{ast::App, Context, Core}; use syn::{Attribute, Ident, LitInt, PatType}; use crate::check::Extra; pub fn capacity_literal(capacity: u8) -> LitInt { LitInt::new(&capacity.to_string(), Span::call_site()) } pub fn capacity_typenum(capacity: u8, round_up_to_power_of_two: bool) -> TokenStream2 { let capacity = if round_up_to_power_of_two { capacity.checked_next_power_of_two().expect("UNREACHABLE") } else { capacity }; let ident = Ident::new(&format!("U{}", capacity), Span::call_site()); quote!(rtfm::export::consts::#ident) } pub fn cfg_core(core: Core, cores: u8) -> Option<TokenStream2> { if cores == 1 { None } else if cfg!(feature = "heterogeneous") { let core = core.to_string(); Some(quote!(#[cfg(core = #core)])) } else { None } } pub fn fq_ident(task: &Ident, sender: Core) -> Ident { Ident::new( &format!("{}_S{}_FQ", task.to_string(), sender), Span::call_site(), ) } pub fn impl_mutex( extra: &Extra, cfgs: &[Attribute], cfg_core: Option<&TokenStream2>, resources_prefix: bool, name: &Ident, ty: TokenStream2, ceiling: u8, ptr: TokenStream2, ) -> TokenStream2 { let (path, priority) = if resources_prefix { (quote!(resources::#name), quote!(self.priority())) } else { (quote!(#name), quote!(self.priority)) }; let device = extra.device; quote!( #(#cfgs)* #cfg_core impl<'a> rtfm::Mutex for #path<'a> { type T = #ty; #[inline(always)] fn lock<R>(&mut self, f: impl FnOnce(&mut #ty) -> R) -> R { const CEILING: u8 = #ceiling; unsafe { rtfm::export::lock( #ptr, #priority, CEILING, #device::NVIC_PRIO_BITS, f, ) } } } ) } pub fn init_barrier(initializer: Core) -> Ident { Ident::new(&format!("IB{}", initializer), Span::call_site()) } pub fn inputs_ident(task: &Ident, sender: Core) -> Ident { Ident::new(&format!("{}_S{}_INPUTS", task, sender), Span::call_site()) } pub fn instants_ident(task: &Ident, sender: Core) -> Ident { Ident::new(&format!("{}_S{}_INSTANTS", task, sender), Span::call_site()) } pub fn interrupt_ident(core: Core, cores: u8) -> Ident { let span = Span::call_site(); if cores == 1 { Ident::new("Interrupt", span) } else { Ident::new(&format!("Interrupt_{}", core), span) } } pub fn is_exception(name: &Ident) -> bool { let s = name.to_string(); match &*s { "MemoryManagement" | "BusFault" | "UsageFault" | "SecureFault" | "SVCall" | "DebugMonitor" | "PendSV" | "SysTick" => true, _ => false, } } pub fn late_resources_ident(init: &Ident) -> Ident { Ident::new( &format!("{}LateResources", init.to_string()), Span::call_site(), ) } fn link_section_index() -> usize { static INDEX: AtomicUsize = AtomicUsize::new(0); INDEX.fetch_add(1, Ordering::Relaxed) } pub fn link_section(section: &str, core: Core) -> Option<TokenStream2> { if cfg!(feature = "homogeneous") { let section = format!(".{}_{}.rtfm{}", section, core, link_section_index()); Some(quote!(#[link_section = #section])) } else { None } } pub fn link_section_uninit(core: Option<Core>) -> Option<TokenStream2> { let section = if let Some(core) = core { let index = link_section_index(); if cfg!(feature = "homogeneous") { format!(".uninit_{}.rtfm{}", core, index) } else { format!(".uninit.rtfm{}", index) } } else { if cfg!(feature = "heterogeneous") { return None; } format!(".uninit.rtfm{}", link_section_index()) }; Some(quote!(#[link_section = #section])) } pub fn locals_ident(ctxt: Context, app: &App) -> Ident { let mut s = match ctxt { Context::Init(core) => app.inits[&core].name.to_string(), Context::Idle(core) => app.idles[&core].name.to_string(), Context::HardwareTask(ident) | Context::SoftwareTask(ident) => ident.to_string(), }; s.push_str("Locals"); Ident::new(&s, Span::call_site()) } pub fn rendezvous_ident(core: Core) -> Ident { Ident::new(&format!("RV{}", core), Span::call_site()) } pub fn regroup_inputs( inputs: &[PatType], ) -> ( Vec<TokenStream2>, TokenStream2, Vec<TokenStream2>, TokenStream2, ) { if inputs.len() == 1 { let ty = &inputs[0].ty; ( vec![quote!(_0: #ty)], quote!(_0), vec![quote!(_0)], quote!(#ty), ) } else { let mut args = vec![]; let mut pats = vec![]; let mut tys = vec![]; for (i, input) in inputs.iter().enumerate() { let i = Ident::new(&format!("_{}", i), Span::call_site()); let ty = &input.ty; args.push(quote!(#i: #ty)); pats.push(quote!(#i)); tys.push(quote!(#ty)); } let tupled = { let pats = pats.clone(); quote!((#(#pats,)*)) }; let ty = quote!((#(#tys,)*)); (args, tupled, pats, ty) } } pub fn resources_ident(ctxt: Context, app: &App) -> Ident { let mut s = match ctxt { Context::Init(core) => app.inits[&core].name.to_string(), Context::Idle(core) => app.idles[&core].name.to_string(), Context::HardwareTask(ident) | Context::SoftwareTask(ident) => ident.to_string(), }; s.push_str("Resources"); Ident::new(&s, Span::call_site()) } pub fn rq_ident(receiver: Core, priority: u8, sender: Core) -> Ident { Ident::new( &format!("R{}_P{}_S{}_RQ", receiver, priority, sender), Span::call_site(), ) } pub fn schedule_ident(name: &Ident, sender: Core) -> Ident { Ident::new( &format!("schedule_{}_S{}", name.to_string(), sender), Span::call_site(), ) } pub fn schedule_t_ident(core: Core) -> Ident { Ident::new(&format!("T{}", core), Span::call_site()) } pub fn spawn_barrier(receiver: Core) -> Ident { Ident::new(&format!("SB{}", receiver), Span::call_site()) } pub fn spawn_ident(name: &Ident, sender: Core) -> Ident { Ident::new( &format!("spawn_{}_S{}", name.to_string(), sender), Span::call_site(), ) } pub fn spawn_t_ident(receiver: Core, priority: u8, sender: Core) -> Ident { Ident::new( &format!("R{}_P{}_S{}_T", receiver, priority, sender), Span::call_site(), ) } pub fn suffixed(name: &str, core: u8) -> Ident { let span = Span::call_site(); if cfg!(feature = "homogeneous") { Ident::new(&format!("{}_{}", name, core), span) } else { Ident::new(name, span) } } pub fn tq_ident(core: Core) -> Ident { Ident::new(&format!("TQ{}", core), Span::call_site()) }
use core::sync::atomic::{AtomicUsize, Ordering}; use proc_macro2::{Span, TokenStream as TokenStream2}; use quote::quote; use rtfm_syntax::{ast::App, Context, Core}; use syn::{Attribute, Ident, LitInt, PatType}; use crate::check::Extra; pub fn capacity_literal(capacity: u8) -> LitInt { LitInt::new(&capacity.to_string(), Span::call_site()) } pub fn capacity_typenum(capacity: u8, round_up_to_power_of_two: bool) -> TokenStream2 { let capacity = if round_up_to_power_of_two { capacity.checked_next_power_of_two().expect("UNREACHABLE") } else { capacity }; let ident = Ident::new(&format!("U{}", capacity), Span::call_site()); quote!(rtfm::export::consts::#ident) } pub fn cfg_core(core: Core, cores: u8) -> Option<TokenStream2> { if cores == 1 { None } else if cfg!(feature = "heterogeneous") { let core = core.to_string(); Some(quote!(#[cfg(core = #core)])) } else { None } } pub fn fq_ident(task: &Ident, sender: Core) -> Ident { Ident::new( &format!("{}_S{}_FQ", task.to_string(), sender), Span::call_site(), ) } pub fn impl_mutex( extra: &Extra, cfgs: &[Attribute], cfg_core: Option<&TokenStream2>, resources_prefix: bool, name: &Ident, ty: TokenStream2, ceiling: u8, ptr: TokenStream2, ) -> TokenStream2 { let (path, priority) = if resources_prefix { (quote!(resources::#name), quote!(self.priority())) } else { (quote!(#name), quote!(self.priority)) }; let device = extra.device; quote!( #(#cfgs)* #cfg_core impl<'a> rtfm::Mutex for #path<'a> { type T = #ty; #[inline(always)] fn lock<R>(&mut self, f: impl FnOnce(&mut #ty) -> R) -> R { const CEILING: u8 = #ceiling; unsafe { rtfm::export::lock( #ptr, #priority, CEILING, #device::NVIC_PRIO_BITS, f, ) } } } ) } pub fn init_barrier(initializer: Core) -> Ident { Ident::new(&format!("IB{}", initializer), Span::call_site()) } pub fn inputs_ident(task: &Ident, sender: Core) -> Ident { Ident::new(&format!("{}_S{}_INPUTS", task, sender), Span::call_site()) } pub fn instants_ident(task: &Ident, sender: Core) -> Ident { Ident::new(&format!("{}_S{}_INSTANTS", task, sender), Span::call_site()) } pub fn interrupt_ident(core: Core, cores: u8) -> Ident { let span = Span::call_site(); if cores == 1 { Ident::new("Interrupt", span) } else { Ident::new(&format!("Interrupt_{}", core), span) } } pub fn is_exception(name: &Ident) -> bool { let s = name.to_string(); match &*s { "MemoryManagement" | "BusFault" | "UsageFault" | "SecureFault" | "SVCall" | "DebugMonitor" | "PendSV" | "SysTick" => true, _ => false, } } pub fn late_resources_ident(init: &Ident) -> Ident { Ident::new( &format!("{}LateResources", init.to_string()), Span::call_site(), ) } fn link_section_index() -> usize { static INDEX: AtomicUsize = AtomicUsize::new(0); INDEX.fetch_add(1, Ordering::Relaxed) }
pub fn link_section_uninit(core: Option<Core>) -> Option<TokenStream2> { let section = if let Some(core) = core { let index = link_section_index(); if cfg!(feature = "homogeneous") { format!(".uninit_{}.rtfm{}", core, index) } else { format!(".uninit.rtfm{}", index) } } else { if cfg!(feature = "heterogeneous") { return None; } format!(".uninit.rtfm{}", link_section_index()) }; Some(quote!(#[link_section = #section])) } pub fn locals_ident(ctxt: Context, app: &App) -> Ident { let mut s = match ctxt { Context::Init(core) => app.inits[&core].name.to_string(), Context::Idle(core) => app.idles[&core].name.to_string(), Context::HardwareTask(ident) | Context::SoftwareTask(ident) => ident.to_string(), }; s.push_str("Locals"); Ident::new(&s, Span::call_site()) } pub fn rendezvous_ident(core: Core) -> Ident { Ident::new(&format!("RV{}", core), Span::call_site()) } pub fn regroup_inputs( inputs: &[PatType], ) -> ( Vec<TokenStream2>, TokenStream2, Vec<TokenStream2>, TokenStream2, ) { if inputs.len() == 1 { let ty = &inputs[0].ty; ( vec![quote!(_0: #ty)], quote!(_0), vec![quote!(_0)], quote!(#ty), ) } else { let mut args = vec![]; let mut pats = vec![]; let mut tys = vec![]; for (i, input) in inputs.iter().enumerate() { let i = Ident::new(&format!("_{}", i), Span::call_site()); let ty = &input.ty; args.push(quote!(#i: #ty)); pats.push(quote!(#i)); tys.push(quote!(#ty)); } let tupled = { let pats = pats.clone(); quote!((#(#pats,)*)) }; let ty = quote!((#(#tys,)*)); (args, tupled, pats, ty) } } pub fn resources_ident(ctxt: Context, app: &App) -> Ident { let mut s = match ctxt { Context::Init(core) => app.inits[&core].name.to_string(), Context::Idle(core) => app.idles[&core].name.to_string(), Context::HardwareTask(ident) | Context::SoftwareTask(ident) => ident.to_string(), }; s.push_str("Resources"); Ident::new(&s, Span::call_site()) } pub fn rq_ident(receiver: Core, priority: u8, sender: Core) -> Ident { Ident::new( &format!("R{}_P{}_S{}_RQ", receiver, priority, sender), Span::call_site(), ) } pub fn schedule_ident(name: &Ident, sender: Core) -> Ident { Ident::new( &format!("schedule_{}_S{}", name.to_string(), sender), Span::call_site(), ) } pub fn schedule_t_ident(core: Core) -> Ident { Ident::new(&format!("T{}", core), Span::call_site()) } pub fn spawn_barrier(receiver: Core) -> Ident { Ident::new(&format!("SB{}", receiver), Span::call_site()) } pub fn spawn_ident(name: &Ident, sender: Core) -> Ident { Ident::new( &format!("spawn_{}_S{}", name.to_string(), sender), Span::call_site(), ) } pub fn spawn_t_ident(receiver: Core, priority: u8, sender: Core) -> Ident { Ident::new( &format!("R{}_P{}_S{}_T", receiver, priority, sender), Span::call_site(), ) } pub fn suffixed(name: &str, core: u8) -> Ident { let span = Span::call_site(); if cfg!(feature = "homogeneous") { Ident::new(&format!("{}_{}", name, core), span) } else { Ident::new(name, span) } } pub fn tq_ident(core: Core) -> Ident { Ident::new(&format!("TQ{}", core), Span::call_site()) }
pub fn link_section(section: &str, core: Core) -> Option<TokenStream2> { if cfg!(feature = "homogeneous") { let section = format!(".{}_{}.rtfm{}", section, core, link_section_index()); Some(quote!(#[link_section = #section])) } else { None } }
function_block-full_function
[ { "content": "pub fn xpend(_core: u8, _interrupt: impl Nr) {}\n\n\n\n/// Fake monotonic timer\n\npub struct MT;\n\n\n\nimpl Monotonic for MT {\n\n type Instant = Instant;\n\n\n\n fn ratio() -> Fraction {\n\n Fraction {\n\n numerator: 1,\n\n denominator: 1,\n\n }\n\n ...
Rust
src/cache/cache.rs
ELCHILEN0/memcached
f0291bdddd050b2cb46014e1364a435f48ed7c50
use cache::key::Key; use cache::value::Value; use cache::data_entry::DataEntry; use cache::storage_structure::CacheStorageStructure; use cache::replacement_policy::CacheReplacementPolicy; use cache::error::CacheError; pub struct CacheMetrics { pub evictions: u64, pub hit_count_get: u64, pub hit_count_set: u64, pub hit_count_delete: u64, pub miss_count_get: u64, pub miss_count_set: u64, pub miss_count_delete: u64, } pub struct Cache<T, R> { pub capacity: usize, pub item_lifetime: u64, pub max_key_len: usize, pub max_val_len: usize, pub storage_structure: T, pub replacement_policy: R, pub metrics: CacheMetrics, } impl CacheMetrics { pub fn new() -> CacheMetrics { CacheMetrics { evictions: 0, hit_count_get: 0, hit_count_set: 0, hit_count_delete: 0, miss_count_get: 0, miss_count_set: 0, miss_count_delete: 0, } } } impl <T: CacheStorageStructure, R: CacheReplacementPolicy> Cache<T, R> { pub fn new(capacity: usize, storage_structure: T, replacement_policy: R) -> Cache<T, R> { Cache { capacity: capacity, item_lifetime: 60 * 1000, max_key_len: 256, max_val_len: 512, storage_structure: storage_structure, replacement_policy: replacement_policy, metrics: CacheMetrics::new() } } pub fn get(&mut self, key: Key) -> Option<DataEntry> { match self.storage_structure.get(key) { Some((index, entry)) => { self.replacement_policy.update(index); self.metrics.hit_count_get += 1; Some(entry) }, None => { self.metrics.miss_count_get += 1; None } } } pub fn set(&mut self, key: Key, value: Value) -> Result<(), CacheError> { let entry = DataEntry::new(key.clone(), value); let current_elem_size = match self.storage_structure.get(key) { Some((_, curr_entry)) => curr_entry.len(), None => 0, }; if current_elem_size == 0 { self.metrics.miss_count_set += 1; } else { self.metrics.hit_count_set += 1; } loop { if self.storage_structure.size() + entry.len() - current_elem_size <= self.capacity { break; } try!(self.evict_next()); self.metrics.evictions += 1; } let (index, _) = self.storage_structure.set(entry); self.replacement_policy.update(index); Ok(()) } pub fn remove(&mut self, key: Key) { match self.storage_structure.remove(key) { Some((index, _)) => { self.replacement_policy.remove(index); self.metrics.hit_count_delete += 1; }, None => { self.metrics.miss_count_delete += 1; }, }; } pub fn contains(&mut self, key: Key) -> bool { self.storage_structure.contains(key) } fn evict_next(&mut self) -> Result<(), CacheError> { match self.replacement_policy.evict_next() { Ok(evict_index) => { match self.storage_structure.remove_index(evict_index) { Some((_, _)) => Ok(()), None => Err(CacheError::EvictionFailure) } }, Err(err) => Err(err) } } }
use cache::key::Key; use cache::value::Value; use cache::data_entry::DataEntry; use cache::storage_structure::CacheStorageStructure; use cache::replacement_policy::CacheReplacementPolicy; use cache::error::CacheError; pub struct CacheMetrics { pub evictions: u64, pub hit_count_get: u64, pub hit_count_set: u64, pub hit_count_delete: u64, pub miss_count_get: u64, pub miss_count_set: u64, pub miss_count_delete: u64, } pub struct Cache<T, R> { pub capacity: usize, pub item_lifetime: u64, pub max_key_len: usize, pub max_val_len: usize, pub storage_structure: T, pub replacement_policy: R, pub metrics: CacheMetrics, } impl CacheMetrics { pub fn new() -> CacheMetrics { CacheMetrics { evictions: 0, hit_count_get: 0, hit_count_set: 0, hit_count_delete: 0, miss_count_get: 0, miss_count_set: 0, miss_count_delete: 0, } } } impl <T: CacheStorageStructure, R: CacheReplacementPolicy> Cache<T, R> { pub fn new(capacity: usize, storage_structure: T, replacement_policy: R) -> Cache<T, R> { Cache { capacity: capacity, item_lifetime: 60 * 1000, max_key_len: 256, max_val_len: 512, storage_structure: storage_structure, replacement_policy: replacement_policy, metrics: CacheMetrics::new() } }
pub fn set(&mut self, key: Key, value: Value) -> Result<(), CacheError> { let entry = DataEntry::new(key.clone(), value); let current_elem_size = match self.storage_structure.get(key) { Some((_, curr_entry)) => curr_entry.len(), None => 0, }; if current_elem_size == 0 { self.metrics.miss_count_set += 1; } else { self.metrics.hit_count_set += 1; } loop { if self.storage_structure.size() + entry.len() - current_elem_size <= self.capacity { break; } try!(self.evict_next()); self.metrics.evictions += 1; } let (index, _) = self.storage_structure.set(entry); self.replacement_policy.update(index); Ok(()) } pub fn remove(&mut self, key: Key) { match self.storage_structure.remove(key) { Some((index, _)) => { self.replacement_policy.remove(index); self.metrics.hit_count_delete += 1; }, None => { self.metrics.miss_count_delete += 1; }, }; } pub fn contains(&mut self, key: Key) -> bool { self.storage_structure.contains(key) } fn evict_next(&mut self) -> Result<(), CacheError> { match self.replacement_policy.evict_next() { Ok(evict_index) => { match self.storage_structure.remove_index(evict_index) { Some((_, _)) => Ok(()), None => Err(CacheError::EvictionFailure) } }, Err(err) => Err(err) } } }
pub fn get(&mut self, key: Key) -> Option<DataEntry> { match self.storage_structure.get(key) { Some((index, entry)) => { self.replacement_policy.update(index); self.metrics.hit_count_get += 1; Some(entry) }, None => { self.metrics.miss_count_get += 1; None } } }
function_block-full_function
[ { "content": "// TODO: This will eventually be removed once a client is implemented, for now this exists for the purposes of telnet\n\npub fn parse_command<T: CacheStorageStructure, R: CacheReplacementPolicy>(command: &str, cache: &mut Cache<T, R>) -> Option<MemPacket> {\n\n let mut iter = command.split_whit...
Rust
generator/src/block_data.rs
Redrield/feather
8614692f8e0a24979853e29d41567b5a82249831
use super::WriteExt; use byteorder::{LittleEndian, WriteBytesExt}; use failure::Error; use indexmap::IndexMap; use std::collections::HashMap; use std::fs::File; use std::io::{BufReader, BufWriter, Write}; pub const DEFAULT_STATE_ID: u16 = 1; #[derive(Clone, Debug, Deserialize, Deref, DerefMut)] pub struct BlockReport { #[serde(flatten)] pub blocks: IndexMap<String, Block>, } #[derive(Clone, Debug, Deserialize)] pub struct Block { pub states: Vec<State>, pub properties: Option<BlockProperties>, } #[derive(Clone, Debug, Deserialize, Deref, DerefMut)] pub struct BlockProperties { #[serde(flatten)] pub props: HashMap<String, Vec<String>>, } #[derive(Clone, Debug, Deserialize)] pub struct State { pub id: u16, #[serde(default)] pub default: bool, pub properties: Option<StateProperties>, } #[derive(Clone, Debug, Deserialize, Deref, DerefMut, Default)] pub struct StateProperties { #[serde(flatten)] pub props: HashMap<String, String>, } pub fn generate_mappings_file( input: &str, output: &str, native_input: &str, proto: u32, version: &str, ) -> Result<(), Error> { info!( "Generating mappings file {} using input report {} and native report {}", output, input, native_input ); let in_file = File::open(input)?; let out_file = File::create(output)?; let native_file = File::open(native_input)?; info!("Parsing data files"); let report: BlockReport = serde_json::from_reader(BufReader::new(&in_file))?; let native_report: BlockReport = serde_json::from_reader(BufReader::new(&native_file))?; info!("Parsing successful"); let mut out = BufWriter::new(&out_file); write_header(&mut out, version, proto, false)?; let mut state_bufs = vec![]; for (string_id, block) in &native_report.blocks { for state in &block.states { let mut state_buf = vec![]; let props = state.properties.clone().unwrap_or_default(); let props = props.props; let state_id = find_state_in_report(&report, string_id.as_str(), &props) .unwrap_or(DEFAULT_STATE_ID); state_buf.write_u16::<LittleEndian>(state.id)?; state_buf.write_u16::<LittleEndian>(state_id)?; state_bufs.push(state_buf); } } out.write_u32::<LittleEndian>(state_bufs.len() as u32)?; for buf in state_bufs { out.write_all(&buf)?; } out.flush()?; info!("Mappings file generated successfully"); Ok(()) } pub fn generate_native_mappings_file( input: &str, output: &str, proto: u32, version: &str, ) -> Result<(), Error> { info!( "Generating native mappings file {} using input report {}", output, input ); let in_file = File::open(input)?; let out_file = File::create(output)?; info!("Parsing data file"); let report: BlockReport = serde_json::from_reader(BufReader::new(&in_file))?; info!("Parsing successful"); let mut out = BufWriter::new(&out_file); write_header(&mut out, version, proto, true)?; let mut count = 0; let mut buf = vec![]; for (block_name, block) in &report.blocks { for state in &block.states { buf.write_string(block_name.as_str())?; let len = { if let Some(props) = state.properties.as_ref() { props.props.len() } else { 0 } }; buf.write_u32::<LittleEndian>(len as u32)?; if let Some(props) = state.properties.as_ref() { for (name, value) in &props.props { buf.write_string(name.as_str())?; buf.write_string(value.as_str())?; } } buf.write_u16::<LittleEndian>(state.id)?; count += 1; } } out.write_u32::<LittleEndian>(count)?; out.write_all(&buf)?; info!("Mappings file generated successfully"); Ok(()) } fn find_state_in_report( report: &BlockReport, name: &str, props: &HashMap<String, String>, ) -> Option<u16> { let block = report.blocks.get(name)?; let state = block.states.iter().find(|state| match &state.properties { None => props.is_empty(), Some(state_props) => props == &state_props.props, })?; Some(state.id) } fn write_header<W: Write>( out: &mut W, version: &str, proto: u32, native: bool, ) -> Result<(), Error> { out.write_all(b"FEATHER_BLOCK_DATA_FILE")?; out.write_string(version)?; out.write_u32::<LittleEndian>(proto)?; out.write_u8(native as u8)?; Ok(()) }
use super::WriteExt; use byteorder::{LittleEndian, WriteBytesExt}; use failure::Error; use indexmap::IndexMap; use std::collections::HashMap; use std::fs::File; use std::io::{BufReader, BufWriter, Write}; pub const DEFAULT_STATE_ID: u16 = 1; #[derive(Clone, Debug, Deserialize, Deref, DerefMut)] pub struct BlockReport { #[serde(flatten)] pub blocks: IndexMap<String, Block>, } #[derive(Clone, Debug, Deserialize)] pub struct Block { pub states: Vec<State>, pub properties: Option<BlockProperties>, } #[derive(Clone, Debug, Deserialize, Deref, DerefMut)] pub struct BlockProperties { #[serde(flatten)] pub props: HashMap<String, Vec<String>>, } #[derive(Clone, Debug, Deserialize)] pub struct State { pub id: u16, #[serde(default)] pub default: bool, pub properties: Option<StateProperties>, } #[derive(Clone, Debug, Deserialize, Deref, DerefMut, Default)] pub struct StateProperties { #[serde(flatten)] pub props: HashMap<String, String>, } pub fn generate_mappings_file( input: &str, output: &str, native_input: &str, proto: u32, version: &str, ) -> Result<(), Error> { info!( "Generating mappings file {} using input report {} and native report {}", output, input, native_input ); let in_file = File::open(input)?; let out_file = File::create(output)?; let native_file = File::open(native_input)?; info!("Parsing data files"); let report: BlockReport = serde_json::from_reader(BufReader::new(&in_file))?; let native_report: BlockReport = serde_json::from_reader(BufReader::new(&native_file))?; info!("Parsing successful"); let mut out = BufWriter::new(&out_file); write_header(&mut out, version, proto, false)?; let mut state_bufs = vec![]; for (string_id, block) in &native_report.blocks { for state in &block.states { let mut state_buf = vec![]; let props = state.properties.clone().unwrap_or_default(); let props = props.props; let state_id = find_state_in_report(&report, string_id.as_str(), &props) .unwrap_or(DEFAULT_STATE_ID); state_buf.write_u16::<LittleEndian>(state.id)?; state_buf.write_u16::<LittleEndian>(state_id)?; state_bufs.push(state_buf); } } out.write_u32::<LittleEndian>(state_bufs.len() as u32)?; for buf in state_bufs { out.write_all(&buf)?; } out.flush()?; info!("Mappings file generated successfully"); Ok(()) } pub fn generate_native_mappings_file( input: &str, output: &str, proto: u32, version: &str, ) -> Result<(), Error> { info!( "Generating native mappings file {} using input report {}", output, input ); let in_file = File::open(input)?; let out_file = File::create(output)?;
fn find_state_in_report( report: &BlockReport, name: &str, props: &HashMap<String, String>, ) -> Option<u16> { let block = report.blocks.get(name)?; let state = block.states.iter().find(|state| match &state.properties { None => props.is_empty(), Some(state_props) => props == &state_props.props, })?; Some(state.id) } fn write_header<W: Write>( out: &mut W, version: &str, proto: u32, native: bool, ) -> Result<(), Error> { out.write_all(b"FEATHER_BLOCK_DATA_FILE")?; out.write_string(version)?; out.write_u32::<LittleEndian>(proto)?; out.write_u8(native as u8)?; Ok(()) }
info!("Parsing data file"); let report: BlockReport = serde_json::from_reader(BufReader::new(&in_file))?; info!("Parsing successful"); let mut out = BufWriter::new(&out_file); write_header(&mut out, version, proto, true)?; let mut count = 0; let mut buf = vec![]; for (block_name, block) in &report.blocks { for state in &block.states { buf.write_string(block_name.as_str())?; let len = { if let Some(props) = state.properties.as_ref() { props.props.len() } else { 0 } }; buf.write_u32::<LittleEndian>(len as u32)?; if let Some(props) = state.properties.as_ref() { for (name, value) in &props.props { buf.write_string(name.as_str())?; buf.write_string(value.as_str())?; } } buf.write_u16::<LittleEndian>(state.id)?; count += 1; } } out.write_u32::<LittleEndian>(count)?; out.write_all(&buf)?; info!("Mappings file generated successfully"); Ok(()) }
function_block-function_prefix_line
[ { "content": "pub fn generate_mappings_file(input: &str, output: &str) -> Result<(), Error> {\n\n info!(\"Parsing data file\");\n\n let report = load_report(input)?;\n\n info!(\"Data file parsed successfully\");\n\n\n\n info!(\"Generating mappings file {}\", output);\n\n\n\n let buf = mappings::g...
Rust
src/input/wacom.rs
vlisivka/libremarkable
5f87bd1ec152fab94a2ed93b41ee4f1219de2a9d
use atomic::Atomic; use evdev::raw::input_event; use input::{InputDeviceState, InputEvent}; use std; use std::sync::atomic::{AtomicU16, Ordering}; use framebuffer::cgmath; use framebuffer::common::{DISPLAYHEIGHT, DISPLAYWIDTH, WACOMHEIGHT, WACOMWIDTH}; const WACOM_HSCALAR: f32 = (DISPLAYWIDTH as f32) / (WACOMWIDTH as f32); const WACOM_VSCALAR: f32 = (DISPLAYHEIGHT as f32) / (WACOMHEIGHT as f32); const EV_SYNC: u16 = 0; const EV_KEY: u16 = 1; const EV_ABS: u16 = 3; const WACOM_EVCODE_PRESSURE: u16 = 24; const WACOM_EVCODE_DISTANCE: u16 = 25; const WACOM_EVCODE_XTILT: u16 = 26; const WACOM_EVCODE_YTILT: u16 = 27; const WACOM_EVCODE_XPOS: u16 = 0; const WACOM_EVCODE_YPOS: u16 = 1; pub struct WacomState { last_x: AtomicU16, last_y: AtomicU16, last_xtilt: AtomicU16, last_ytilt: AtomicU16, last_dist: AtomicU16, last_pressure: AtomicU16, last_tool: Atomic<Option<WacomPen>>, } impl ::std::default::Default for WacomState { fn default() -> Self { WacomState { last_x: AtomicU16::new(0), last_y: AtomicU16::new(0), last_xtilt: AtomicU16::new(0), last_ytilt: AtomicU16::new(0), last_dist: AtomicU16::new(0), last_pressure: AtomicU16::new(0), last_tool: Atomic::new(None), } } } #[repr(u16)] #[derive(PartialEq, Copy, Clone, Debug)] pub enum WacomPen { ToolPen = 320, ToolRubber = 321, Touch = 330, Stylus = 331, Stylus2 = 332, } #[derive(PartialEq, Copy, Clone)] pub enum WacomEventType { InstrumentChange, Hover, Draw, Unknown, } #[derive(PartialEq, Copy, Clone)] pub enum WacomEvent { InstrumentChange { pen: WacomPen, state: bool, }, Hover { position: cgmath::Point2<f32>, distance: u16, tilt: cgmath::Vector2<u16>, }, Draw { position: cgmath::Point2<f32>, pressure: u16, tilt: cgmath::Vector2<u16>, }, Unknown, } pub fn decode(ev: &input_event, outer_state: &InputDeviceState) -> Option<InputEvent> { let state = match outer_state { InputDeviceState::WacomState(ref state_arc) => state_arc, _ => unreachable!(), }; match ev._type { EV_SYNC => match state.last_tool.load(Ordering::Relaxed) { Some(WacomPen::ToolPen) => Some(InputEvent::WacomEvent { event: WacomEvent::Hover { position: cgmath::Point2 { x: (f32::from(state.last_x.load(Ordering::Relaxed)) * WACOM_HSCALAR), y: (f32::from(state.last_y.load(Ordering::Relaxed)) * WACOM_VSCALAR), }, distance: state.last_dist.load(Ordering::Relaxed) as u16, tilt: cgmath::Vector2 { x: state.last_xtilt.load(Ordering::Relaxed), y: state.last_ytilt.load(Ordering::Relaxed), }, }, }), Some(WacomPen::Touch) => Some(InputEvent::WacomEvent { event: WacomEvent::Draw { position: cgmath::Point2 { x: (f32::from(state.last_x.load(Ordering::Relaxed)) * WACOM_HSCALAR), y: (f32::from(state.last_y.load(Ordering::Relaxed)) * WACOM_VSCALAR), }, pressure: state.last_pressure.load(Ordering::Relaxed), tilt: cgmath::Vector2 { x: state.last_xtilt.load(Ordering::Relaxed), y: state.last_ytilt.load(Ordering::Relaxed), }, }, }), _ => None, }, EV_KEY => { /* key (device detected - device out of range etc.) */ if ev.code < WacomPen::ToolPen as u16 || ev.code > WacomPen::Stylus2 as u16 { return None; } let pen: WacomPen = unsafe { std::mem::transmute_copy(&ev.code) }; state.last_tool.store(Some(pen), Ordering::Relaxed); Some(InputEvent::WacomEvent { event: WacomEvent::InstrumentChange { pen, state: ev.value != 0, }, }) } EV_ABS => { match ev.code { WACOM_EVCODE_DISTANCE => { if state.last_pressure.load(Ordering::Relaxed) == 0 { state.last_dist.store(ev.value as u16, Ordering::Relaxed); state .last_tool .store(Some(WacomPen::ToolPen), Ordering::Relaxed); } else { state .last_pressure .fetch_add(ev.value as u16, Ordering::Relaxed); state .last_tool .store(Some(WacomPen::Touch), Ordering::Relaxed); } } WACOM_EVCODE_XTILT => { state.last_xtilt.store(ev.value as u16, Ordering::Relaxed); } WACOM_EVCODE_YTILT => { state.last_ytilt.store(ev.value as u16, Ordering::Relaxed); } WACOM_EVCODE_PRESSURE => { state .last_pressure .store(ev.value as u16, Ordering::Relaxed);; } WACOM_EVCODE_XPOS => { let val = ev.value as u16; state.last_y.store(WACOMHEIGHT - val, Ordering::Relaxed); } WACOM_EVCODE_YPOS => { state.last_x.store(ev.value as u16, Ordering::Relaxed); } _ => { debug!( "Unknown absolute event code for Wacom [type: {0} code: {1} value: {2}]", ev._type, ev.code, ev.value ); } } None } _ => { debug!( "Unknown event TYPE for Wacom [type: {0} code: {1} value: {2}]", ev._type, ev.code, ev.value ); None } } }
use atomic::Atomic; use evdev::raw::input_event; use input::{InputDeviceState, InputEvent}; use std; use std::sync::atomic::{AtomicU16, Ordering}; use framebuffer::cgmath; use framebuffer::common::{DISPLAYHEIGHT, DISPLAYWIDTH, WACOMHEIGHT, WACOMWIDTH}; const WACOM_HSCALAR: f32 = (DISPLAYWIDTH as f32) / (WACOMWIDTH as f32); const WACOM_VSCALAR: f32 = (DISPLAYHEIGHT as f32) / (WACOMHEIGHT as f32); const EV_SYNC: u16 = 0; const EV_KEY: u16 = 1; const EV_ABS: u16 = 3; const WACOM_EVCODE_PRESSURE: u16 = 24; const WACOM_EVCODE_DISTANCE: u16 = 25; const WACOM_EVCODE_XTILT: u16 = 26; const WACOM_EVCODE_YTILT: u16 = 27; const WACOM_EVCODE_XPOS: u16 = 0; const WACOM_EVCODE_YPOS: u16 = 1; pub struct WacomState { last_x: AtomicU16, last_y: AtomicU16, last_xtilt: AtomicU16, last_ytilt: AtomicU16, last_dist: AtomicU16, last_pressure: AtomicU16, last_tool: Atomic<Option<WacomPen>>, } impl ::std::default::Default for WacomState {
} #[repr(u16)] #[derive(PartialEq, Copy, Clone, Debug)] pub enum WacomPen { ToolPen = 320, ToolRubber = 321, Touch = 330, Stylus = 331, Stylus2 = 332, } #[derive(PartialEq, Copy, Clone)] pub enum WacomEventType { InstrumentChange, Hover, Draw, Unknown, } #[derive(PartialEq, Copy, Clone)] pub enum WacomEvent { InstrumentChange { pen: WacomPen, state: bool, }, Hover { position: cgmath::Point2<f32>, distance: u16, tilt: cgmath::Vector2<u16>, }, Draw { position: cgmath::Point2<f32>, pressure: u16, tilt: cgmath::Vector2<u16>, }, Unknown, } pub fn decode(ev: &input_event, outer_state: &InputDeviceState) -> Option<InputEvent> { let state = match outer_state { InputDeviceState::WacomState(ref state_arc) => state_arc, _ => unreachable!(), }; match ev._type { EV_SYNC => match state.last_tool.load(Ordering::Relaxed) { Some(WacomPen::ToolPen) => Some(InputEvent::WacomEvent { event: WacomEvent::Hover { position: cgmath::Point2 { x: (f32::from(state.last_x.load(Ordering::Relaxed)) * WACOM_HSCALAR), y: (f32::from(state.last_y.load(Ordering::Relaxed)) * WACOM_VSCALAR), }, distance: state.last_dist.load(Ordering::Relaxed) as u16, tilt: cgmath::Vector2 { x: state.last_xtilt.load(Ordering::Relaxed), y: state.last_ytilt.load(Ordering::Relaxed), }, }, }), Some(WacomPen::Touch) => Some(InputEvent::WacomEvent { event: WacomEvent::Draw { position: cgmath::Point2 { x: (f32::from(state.last_x.load(Ordering::Relaxed)) * WACOM_HSCALAR), y: (f32::from(state.last_y.load(Ordering::Relaxed)) * WACOM_VSCALAR), }, pressure: state.last_pressure.load(Ordering::Relaxed), tilt: cgmath::Vector2 { x: state.last_xtilt.load(Ordering::Relaxed), y: state.last_ytilt.load(Ordering::Relaxed), }, }, }), _ => None, }, EV_KEY => { /* key (device detected - device out of range etc.) */ if ev.code < WacomPen::ToolPen as u16 || ev.code > WacomPen::Stylus2 as u16 { return None; } let pen: WacomPen = unsafe { std::mem::transmute_copy(&ev.code) }; state.last_tool.store(Some(pen), Ordering::Relaxed); Some(InputEvent::WacomEvent { event: WacomEvent::InstrumentChange { pen, state: ev.value != 0, }, }) } EV_ABS => { match ev.code { WACOM_EVCODE_DISTANCE => { if state.last_pressure.load(Ordering::Relaxed) == 0 { state.last_dist.store(ev.value as u16, Ordering::Relaxed); state .last_tool .store(Some(WacomPen::ToolPen), Ordering::Relaxed); } else { state .last_pressure .fetch_add(ev.value as u16, Ordering::Relaxed); state .last_tool .store(Some(WacomPen::Touch), Ordering::Relaxed); } } WACOM_EVCODE_XTILT => { state.last_xtilt.store(ev.value as u16, Ordering::Relaxed); } WACOM_EVCODE_YTILT => { state.last_ytilt.store(ev.value as u16, Ordering::Relaxed); } WACOM_EVCODE_PRESSURE => { state .last_pressure .store(ev.value as u16, Ordering::Relaxed);; } WACOM_EVCODE_XPOS => { let val = ev.value as u16; state.last_y.store(WACOMHEIGHT - val, Ordering::Relaxed); } WACOM_EVCODE_YPOS => { state.last_x.store(ev.value as u16, Ordering::Relaxed); } _ => { debug!( "Unknown absolute event code for Wacom [type: {0} code: {1} value: {2}]", ev._type, ev.code, ev.value ); } } None } _ => { debug!( "Unknown event TYPE for Wacom [type: {0} code: {1} value: {2}]", ev._type, ev.code, ev.value ); None } } }
fn default() -> Self { WacomState { last_x: AtomicU16::new(0), last_y: AtomicU16::new(0), last_xtilt: AtomicU16::new(0), last_ytilt: AtomicU16::new(0), last_dist: AtomicU16::new(0), last_pressure: AtomicU16::new(0), last_tool: Atomic::new(None), } }
function_block-full_function
[ { "content": "struct thrinit {\n\n int sid;\n\n int tid;\n\n int *data;\n\n};\n\n\n\n\n\nextern \"C\" {\n\n #include \"libremarkable/lib.h\"\n\n #include \"libremarkable/bitmap.h\"\n\n #include \"libremarkable/shapes.h\"\n\n}\n\n\n\n#define BITS_PER_LONG (sizeof(long) * 8)\n\n#define NBITS(x) ((((x)...
Rust
crypto/src/verify.rs
AllSafeCybercurity/RClient
88aa5fe784621041b05038ae62139398a34b74bc
pub trait USizeExt { fn constrain_value(&self) -> usize; } pub trait SliceExt { fn constrain_value(&self) -> usize; } impl USizeExt for usize { fn constrain_value(&self) -> usize { *self } } impl<T: AsRef<[u8]>> SliceExt for T { fn constrain_value(&self) -> usize { self.as_ref().len() } } #[macro_export] macro_rules! verify_keygen { ($size:expr => $buf:expr) => {{ #[allow(unused_imports)] use $crate::verify::{SliceExt, USizeExt}; let error = if $buf.constrain_value() != $size { Err("Invalid buffer size") } else { Ok(()) }; error.map_err(|e| $crate::Error::CryptoError(e.into()))?; }}; } #[macro_export] macro_rules! verify_auth { ($key:expr => [$key_size:expr], => [$buf:expr, $tag_size:expr]) => {{ #[allow(unused_imports)] use $crate::verify::{SliceExt, USizeExt}; let error = if $key.constrain_value() != $key_size { Err("Invalid key length") } else if $buf.constrain_value() < $tag_size { Err("Buffer is too small") } else { Ok(()) }; error.map_err(|e| $crate::Error::CryptoError(e.into()))?; }}; } #[macro_export] macro_rules! verify_encrypt { ($key:expr => [$key_size:expr], $nonce:expr => [$nonce_size:expr], $plain:expr => [$buf:expr, $plain_limit:expr]) => {{ #[allow(unused_imports)] use $crate::verify::{SliceExt, USizeExt}; let error = if $key.constrain_value() != $key_size { Err("Invalid key length") } else if $nonce.constrain_value() != $nonce_size { Err("Invalid nonce length") } else if $plain.constrain_value() > $plain_limit { Err("Too much data") } else if $plain.constrain_value() > $buf.constrain_value() { Err("Buffer is too small") } else { Ok(()) }; error.map_err(|e| $crate::Error::CryptoError(e.into()))?; }}; } #[macro_export] macro_rules! verify_decrypt { ($key:expr => [$key_size:expr], $nonce:expr => [$nonce_size:expr], $cipher:expr => [$buf:expr, $cipher_limit:expr]) => {{ #[allow(unused_imports)] use $crate::verify::{SliceExt, USizeExt}; let error = if $key.constrain_value() != $key_size { Err("Invalid key length") } else if $nonce.constrain_value() != $nonce_size { Err("Invalid nonce length") } else if $cipher.constrain_value() > $cipher_limit { Err("Too much data") } else if $cipher.constrain_value() > $buf.constrain_value() { Err("Buffer is too small") } else { Ok(()) }; error.map_err(|e| $crate::Error::CryptoError(e.into()))?; }}; } #[macro_export] macro_rules! verify_seal { ($key:expr => [$key_size:expr], $nonce:expr => [$nonce_const:expr], $plain:expr => [$buf:expr, $plain_limit:expr]) => {{ #[allow(unused_imports)] use $crate::verify::{SliceExt, USizeExt}; let error = if $key.constrain_value() != $key_size { Err("Invalid key length") } else if $nonce.constrain_value() != $nonce_const { Err("Invalid nonce length") } else if $plain.constrain_value() > $plain_limit { Err("Too much data") } else if $buf.constrain_value() < $plain.constrain_value() + CHACHAPOLY_TAG { Err("Buffer is too small") } else { Ok(()) }; error.map_err(|e| $crate::Error::CryptoError(e.into()))?; }}; } #[macro_export] macro_rules! verify_open { ($key:expr => [$key_size:expr], $nonce:expr => [$nonce_size:expr], $cipher:expr => [$buf:expr, $tag_size:expr, $cipher_limit:expr]) => {{ #[allow(unused_imports)] use $crate::verify::{SliceExt, USizeExt}; let error = if $key.constrain_value() != $key_size { Err("Invalid key length") } else if $nonce.constrain_value() != $nonce_size { Err("Invalid nonce length") } else if $cipher.constrain_value() > $cipher_limit { Err("Too much data") } else if $cipher.constrain_value() < $tag_size { return Err($crate::Error::InvalidData.into()); } else if $buf.constrain_value() + $tag_size < $cipher.constrain_value() { Err("Buffer is too small") } else { Ok(()) }; error.map_err(|e| $crate::Error::CryptoError(e.into()))?; }}; }
pub trait USizeExt { fn constrain_value(&self) -> usize; } pub trait SliceExt { fn constrain_value(&self) -> usize; } impl USizeExt for usize { fn constrain_value(&self) -> usize { *self } } impl<T: AsRef<[u8]>> SliceExt for T { fn constrain_value(&self) -> usize { self.as_ref().len() } } #[macro_export] macro_rules! verify_keygen { ($size:expr => $buf:expr) => {{ #[allow(unused_imports)] use $crate::v
("Buffer is too small") } else { Ok(()) }; error.map_err(|e| $crate::Error::CryptoError(e.into()))?; }}; } #[macro_export] macro_rules! verify_encrypt { ($key:expr => [$key_size:expr], $nonce:expr => [$nonce_size:expr], $plain:expr => [$buf:expr, $plain_limit:expr]) => {{ #[allow(unused_imports)] use $crate::verify::{SliceExt, USizeExt}; let error = if $key.constrain_value() != $key_size { Err("Invalid key length") } else if $nonce.constrain_value() != $nonce_size { Err("Invalid nonce length") } else if $plain.constrain_value() > $plain_limit { Err("Too much data") } else if $plain.constrain_value() > $buf.constrain_value() { Err("Buffer is too small") } else { Ok(()) }; error.map_err(|e| $crate::Error::CryptoError(e.into()))?; }}; } #[macro_export] macro_rules! verify_decrypt { ($key:expr => [$key_size:expr], $nonce:expr => [$nonce_size:expr], $cipher:expr => [$buf:expr, $cipher_limit:expr]) => {{ #[allow(unused_imports)] use $crate::verify::{SliceExt, USizeExt}; let error = if $key.constrain_value() != $key_size { Err("Invalid key length") } else if $nonce.constrain_value() != $nonce_size { Err("Invalid nonce length") } else if $cipher.constrain_value() > $cipher_limit { Err("Too much data") } else if $cipher.constrain_value() > $buf.constrain_value() { Err("Buffer is too small") } else { Ok(()) }; error.map_err(|e| $crate::Error::CryptoError(e.into()))?; }}; } #[macro_export] macro_rules! verify_seal { ($key:expr => [$key_size:expr], $nonce:expr => [$nonce_const:expr], $plain:expr => [$buf:expr, $plain_limit:expr]) => {{ #[allow(unused_imports)] use $crate::verify::{SliceExt, USizeExt}; let error = if $key.constrain_value() != $key_size { Err("Invalid key length") } else if $nonce.constrain_value() != $nonce_const { Err("Invalid nonce length") } else if $plain.constrain_value() > $plain_limit { Err("Too much data") } else if $buf.constrain_value() < $plain.constrain_value() + CHACHAPOLY_TAG { Err("Buffer is too small") } else { Ok(()) }; error.map_err(|e| $crate::Error::CryptoError(e.into()))?; }}; } #[macro_export] macro_rules! verify_open { ($key:expr => [$key_size:expr], $nonce:expr => [$nonce_size:expr], $cipher:expr => [$buf:expr, $tag_size:expr, $cipher_limit:expr]) => {{ #[allow(unused_imports)] use $crate::verify::{SliceExt, USizeExt}; let error = if $key.constrain_value() != $key_size { Err("Invalid key length") } else if $nonce.constrain_value() != $nonce_size { Err("Invalid nonce length") } else if $cipher.constrain_value() > $cipher_limit { Err("Too much data") } else if $cipher.constrain_value() < $tag_size { return Err($crate::Error::InvalidData.into()); } else if $buf.constrain_value() + $tag_size < $cipher.constrain_value() { Err("Buffer is too small") } else { Ok(()) }; error.map_err(|e| $crate::Error::CryptoError(e.into()))?; }}; }
erify::{SliceExt, USizeExt}; let error = if $buf.constrain_value() != $size { Err("Invalid buffer size") } else { Ok(()) }; error.map_err(|e| $crate::Error::CryptoError(e.into()))?; }}; } #[macro_export] macro_rules! verify_auth { ($key:expr => [$key_size:expr], => [$buf:expr, $tag_size:expr]) => {{ #[allow(unused_imports)] use $crate::verify::{SliceExt, USizeExt}; let error = if $key.constrain_value() != $key_size { Err("Invalid key length") } else if $buf.constrain_value() < $tag_size { Err
random
[ { "content": "/// A Hash interface\n\npub trait Hash {\n\n /// Get the information block that describes the hash\n\n fn info(&self) -> HashInfo;\n\n /// hashes data and returns the hash length. `buf` contains the outgoing hashed data. \n\n fn hash(&self, buf: &mut [u8], data: &[u8]) -> Result<usize...
Rust
src/vm/mod.rs
bpandreotti/monkey-rust
970b808530495d94d1bb82a3787c198bcef3b7c7
#[cfg(test)] mod tests; use crate::builtins::{self, BuiltinFn}; use crate::compiler::code::*; use crate::error::{MonkeyError, MonkeyResult, RuntimeError::*}; use crate::lexer::token::Token; use crate::object::*; use std::collections::HashMap; const STACK_SIZE: usize = 2048; pub const GLOBALS_SIZE: usize = 65536; struct Frame { instructions: Instructions, free_vars: Vec<Object>, pc: usize, base_pointer: usize, } struct FrameStack(Vec<Frame>); impl FrameStack { fn top(&self) -> &Frame { self.0.last().expect("No frames in frame stack") } fn top_mut(&mut self) -> &mut Frame { self.0.last_mut().expect("No frames in frame stack") } fn push(&mut self, frame: Frame) { self.0.push(frame); } fn pop(&mut self) { self.0.pop(); } fn read_u16_from_top(&mut self) -> u16 { let value = read_u16(&self.top().instructions.0[self.top().pc + 1..]); self.top_mut().pc += 2; value } fn read_u8_from_top(&mut self) -> u8 { let value = self.top().instructions.0[self.top().pc + 1]; self.top_mut().pc += 1; value } } pub struct VM { stack: Vec<Object>, sp: usize, pub globals: Box<[Object]>, } impl VM { pub fn new() -> VM { let mut globals = Vec::with_capacity(GLOBALS_SIZE); globals.resize(GLOBALS_SIZE, Object::Nil); let globals = globals.into_boxed_slice(); VM { stack: Vec::with_capacity(STACK_SIZE), sp: 0, globals, } } pub fn run(&mut self, bytecode: Bytecode) -> MonkeyResult<()> { let mut frame_stack = FrameStack({ let root_frame = Frame { instructions: bytecode.instructions, free_vars: Vec::new(), pc: 0, base_pointer: 0, }; vec![root_frame] }); let constants = bytecode.constants; loop { if frame_stack.top().pc >= frame_stack.top().instructions.0.len() { if frame_stack.0.len() == 1 { break; } else { panic!("Reached end of instructions in non-root frame") } } use OpCode::*; let op = OpCode::from_byte(frame_stack.top().instructions.0[frame_stack.top().pc]); match op { OpConstant => { let constant_index = frame_stack.read_u16_from_top() as usize; self.push(constants[constant_index].clone())?; } OpPop => { self.pop()?; } OpAdd | OpSub | OpMul | OpDiv | OpExponent | OpModulo | OpEquals | OpNotEquals | OpGreaterThan | OpGreaterEq => self.execute_binary_operation(op)?, OpTrue => self.push(Object::Boolean(true))?, OpFalse => self.push(Object::Boolean(false))?, OpPrefixMinus | OpPrefixNot => self.execute_prefix_operation(op)?, OpJumpNotTruthy => { let pos = frame_stack.read_u16_from_top() as usize; if !Object::is_truthy(&self.pop()?) { frame_stack.top_mut().pc = pos - 1; } } OpJump => { let pos = frame_stack.read_u16_from_top() as usize; frame_stack.top_mut().pc = pos - 1; } OpNil => self.push(Object::Nil)?, OpSetGlobal => { let index = frame_stack.read_u16_from_top() as usize; self.globals[index] = self.pop()?.clone(); } OpGetGlobal => { let index = frame_stack.read_u16_from_top() as usize; self.push(self.globals[index].clone())?; } OpSetLocal => { let index = frame_stack.read_u8_from_top() as usize; self.stack[frame_stack.top().base_pointer + index] = self.pop()?; } OpGetLocal => { let index = frame_stack.read_u8_from_top() as usize; self.push(self.stack[frame_stack.top().base_pointer + index].clone())? } OpArray => { let num_elements = frame_stack.read_u16_from_top() as usize; let arr = self.take(num_elements); self.push(Object::Array(Box::new(arr)))?; } OpHash => { let num_elements = frame_stack.read_u16_from_top() as usize; let entries = self.take(2 * num_elements); let mut map = HashMap::new(); for i in 0..num_elements { let key = &entries[i * 2]; let value = &entries[i * 2 + 1]; let hashable = HashableObject::from_object(key.clone()) .ok_or_else(|| MonkeyError::Vm(HashKeyTypeError(key.type_str())))?; map.insert(hashable, value.clone()); } self.push(Object::Hash(Box::new(map)))?; } OpIndex => { let index = self.pop()?; let obj = self.pop()?; self.execute_index_operation(obj, index)?; } OpCall => { let num_args = frame_stack.read_u8_from_top() as usize; let func = self.stack.remove(self.sp - 1 - num_args); self.sp -= 1; match func { Object::Closure(c) => { self.execute_closure_call(&mut frame_stack, *c, num_args)?; continue; } Object::Builtin(f) => self.execute_builtin_call(f, num_args)?, _ => return Err(MonkeyError::Vm(NotCallable(func.type_str()))), } } OpReturn => { let returned_value = self.pop()?; self.sp = frame_stack.top().base_pointer; self.stack.truncate(self.sp); frame_stack.pop(); self.push(returned_value)?; continue; } OpGetBuiltin => { let index = frame_stack.read_u8_from_top() as usize; let builtin = builtins::ALL_BUILTINS[index].1.clone(); self.push(Object::Builtin(builtin))?; } OpClosure => { let constant_index = frame_stack.read_u16_from_top() as usize; let num_free_vars = frame_stack.read_u8_from_top() as usize; let func = constants[constant_index].clone(); let free_vars = self.take(num_free_vars); if let Object::CompiledFunc(func) = func { let closure = Closure { func: *func, free_vars, }; self.push(Object::Closure(Box::new(closure)))?; } else { panic!("Trying to build closure with non-function object"); } }, OpGetFree => { let index = frame_stack.read_u8_from_top() as usize; self.push(frame_stack.top().free_vars[index].clone())?; }, } frame_stack.top_mut().pc += 1; } Ok(()) } fn push(&mut self, obj: Object) -> MonkeyResult<()> { if self.sp >= STACK_SIZE { Err(MonkeyError::Vm(StackOverflow)) } else { self.stack.push(obj); self.sp += 1; Ok(()) } } pub fn pop(&mut self) -> MonkeyResult<Object> { if self.sp == 0 { Err(MonkeyError::Vm(StackUnderflow)) } else { self.sp -= 1; Ok(self.stack.pop().unwrap()) } } fn take(&mut self, num_items: usize) -> Vec<Object> { let v = self.stack.split_off(self.sp - num_items); self.sp -= num_items; v } fn execute_binary_operation(&mut self, operation: OpCode) -> MonkeyResult<()> { use Object::*; let right = self.pop()?; let left = self.pop()?; match (left, operation, right) { (Integer(l), op, Integer(r)) => self.execute_integer_operation(op, l, r), (Boolean(l), op, Boolean(r)) => self.execute_bool_operation(op, l, r), (Str(l), OpCode::OpAdd, Str(r)) => self.execute_str_concat(&l, &r), (l, op, r) => Err(MonkeyError::Vm(InfixTypeError( l.type_str(), op.equivalent_token().unwrap(), r.type_str(), ))), } } fn execute_integer_operation(&mut self, op: OpCode, left: i64, right: i64) -> MonkeyResult<()> { let result = match op { OpCode::OpAdd => Object::Integer(left + right), OpCode::OpSub => Object::Integer(left - right), OpCode::OpMul => Object::Integer(left * right), OpCode::OpDiv if right == 0 => return Err(MonkeyError::Vm(DivOrModByZero)), OpCode::OpDiv => Object::Integer(left / right), OpCode::OpExponent if right < 0 => return Err(MonkeyError::Vm(NegativeExponent)), OpCode::OpExponent => Object::Integer(left.pow(right as u32)), OpCode::OpModulo if right == 0 => return Err(MonkeyError::Vm(DivOrModByZero)), OpCode::OpModulo => Object::Integer(left % right), OpCode::OpEquals => Object::Boolean(left == right), OpCode::OpNotEquals => Object::Boolean(left != right), OpCode::OpGreaterThan => Object::Boolean(left > right), OpCode::OpGreaterEq => Object::Boolean(left >= right), _ => unreachable!(), }; self.push(result)?; Ok(()) } fn execute_bool_operation(&mut self, op: OpCode, left: bool, right: bool) -> MonkeyResult<()> { let result = match op { OpCode::OpEquals => Object::Boolean(left == right), OpCode::OpNotEquals => Object::Boolean(left != right), _ => { return Err(MonkeyError::Vm(InfixTypeError( "bool", op.equivalent_token().unwrap(), "bool", ))) } }; self.push(result)?; Ok(()) } fn execute_str_concat(&mut self, left: &str, right: &str) -> MonkeyResult<()> { self.push(Object::Str(Box::new(left.to_string() + right))) } fn execute_prefix_operation(&mut self, op: OpCode) -> MonkeyResult<()> { let right = self.pop()?; match op { OpCode::OpPrefixMinus => { if let Object::Integer(i) = right { self.push(Object::Integer(-i))?; } else { return Err(MonkeyError::Vm(PrefixTypeError( Token::Minus, right.type_str(), ))); } } OpCode::OpPrefixNot => { let value = !right.is_truthy(); self.push(Object::Boolean(value))?; } _ => unreachable!(), } Ok(()) } fn execute_index_operation(&mut self, obj: Object, index: Object) -> MonkeyResult<()> { let result = match (obj, index) { (Object::Array(vector), Object::Integer(i)) => { if i < 0 || i >= vector.len() as i64 { Err(IndexOutOfBounds(i)) } else { Ok(vector.into_iter().nth(i as usize).unwrap()) } } (Object::Array(_), other) => Err(IndexTypeError(other.type_str())), (Object::Hash(map), key) => { let key_type = key.type_str(); let key = HashableObject::from_object(key.clone()) .ok_or(MonkeyError::Vm(HashKeyTypeError(key_type)))?; let value = map.get(&key).ok_or(MonkeyError::Vm(KeyError(key)))?; Ok(value.clone()) } (Object::Str(s), Object::Integer(i)) => { let chars = s.chars().collect::<Vec<_>>(); if i < 0 || i >= chars.len() as i64 { Err(IndexOutOfBounds(i)) } else { Ok(Object::Str(Box::new(chars[i as usize].to_string()))) } } (Object::Str(_), other) => Err(IndexTypeError(other.type_str())), (other, _) => Err(IndexingWrongType(other.type_str())), }; let result = result.map_err(MonkeyError::Vm)?; self.push(result) } fn execute_closure_call( &mut self, frame_stack: &mut FrameStack, closure: Closure, num_args: usize, ) -> MonkeyResult<()> { if closure.func.num_params as usize != num_args { return Err(MonkeyError::Vm(WrongNumberOfArgs( closure.func.num_params as usize, num_args, ))); } frame_stack.top_mut().pc += 1; let new_frame = Frame { instructions: closure.func.instructions, free_vars: closure.free_vars, pc: 0, base_pointer: self.sp - num_args, }; frame_stack.push(new_frame); self.sp += closure.func.num_locals as usize; self.stack.resize(self.sp, Object::Nil); Ok(()) } fn execute_builtin_call(&mut self, func: BuiltinFn, num_args: usize) -> MonkeyResult<()> { let args = self.take(num_args); let result = func.0(args).map_err(MonkeyError::Vm)?; self.push(result) } }
#[cfg(test)] mod tests; use crate::builtins::{self, BuiltinFn}; use crate::compiler::code::*; use crate::error::{MonkeyError, MonkeyResult, RuntimeError::*}; use crate::lexer::token::Token; use crate::object::*; use std::collections::HashMap; const STACK_SIZE: usize = 2048; pub const GLOBALS_SIZE: usize = 65536; struct Frame { instructions: Instructions, free_vars: Vec<Object>, pc: usize, base_pointer: usize, } struct FrameStack(Vec<Frame>); impl FrameStack { fn top(&self) -> &Frame { self.0.last().expect("No frames in frame stack") } fn top_mut(&mut self) -> &mut Frame { self.0.last_mut().expect("No frames in frame stack") } fn push(&mut self, frame: Frame) { self.0.push(frame); } fn pop(&mut self) { self.0.pop(); } fn read_u16_from_top(&mut self) -> u16 { let value = read_u16(&self.top().instructions.0[self.top().pc + 1..]); self.top_mut().pc += 2; value } fn read_u8_from_top(&mut self) -> u8 { let value = self.top().instructions.0[self.top().pc + 1]; self.top_mut().pc += 1; value } } pub struct VM { stack: Vec<Object>, sp: usize, pub globals: Box<[Object]>, } impl VM { pub fn new() -> VM { let mut globals = Vec::with_capacity(GLOBALS_SIZE); globals.resize(GLOBALS_SIZE, Object::Nil); let globals = globals.into_boxed_slice(); VM { stack: Vec::with_capacity(STACK_SIZE), sp: 0, globals, } } pub fn run(&mut self, bytecode: Bytecode) -> MonkeyResult<()> { let mut frame_stack = FrameStack({ let root_frame = Frame { instructions: bytecode.instructions, free_vars: Vec::new(), pc: 0, base_pointer: 0, }; vec![root_frame] }); let constants = bytecode.constants; loop { if frame_stack.top().pc >= frame_stack.top().instructions.0.len() { if frame_stack.0.len() == 1 { break; } else { panic!("Reached end of instructions in non-root frame") } } use OpCode::*; let op = OpCode::from_byte(frame_stack.top().instructions.0[frame_stack.top().pc]); match op { OpConstant => { let constant_index = frame_stack.read_u16_from_top() as usize; self.push(constants[constant_index].clone())?; } OpPop => { self.pop()?; } OpAdd | OpSub | OpMul | OpDiv | OpExponent | OpModulo | OpEquals | OpNotEquals | OpGreaterThan | OpGreaterEq => self.execute_binary_operation(op)?, OpTrue => self.push(Object::Boolean(true))?, OpFalse => self.push(Object::Boolean(false))?, OpPrefixMinus | OpPrefixNot => self.execute_prefix_operation(op)?, OpJumpNotTruthy => { let pos = frame_stack.read_u16_from_top() as usize; if !Object::is_truthy(&self.pop()?) { frame_stack.top_mut().pc = pos - 1; } } OpJump => { let pos = frame_stack.read_u16_from_top() as usize; frame_stack.top_mut().pc = pos - 1; } OpNil => self.push(Object::Nil)?, OpSetGlobal => { let index = frame_stack.read_u16_from_top() as usize; self.globals[index] = self.pop()?.clone(); } OpGetGlobal => { let index = frame_stack.read_u16_from_top() as usize; self.push(self.globals[index].clone())?; } OpSetLocal => { let index = frame_stack.read_u8_from_top() as usize; self.stack[frame_stack.top().base_pointer + index] = self.pop()?; } OpGetLocal => { let index = frame_stack.read_u8_from_top() as usize; self.push(self.stack[frame_stack.top().base_pointer + index].clone())? } OpArray => { let num_elements = frame_stack.read_u16_from_top() as usize; let arr = self.take(num_elements); self.push(Object::Array(Box::new(arr)))?; } OpHash => { let num_elements = frame_stack.read_u16_from_top() as usize; let entries = self.take(2 * num_elements); let mut map = HashMap::new(); for i in 0..num_elements { let key = &entries[i * 2]; let value = &entries[i * 2 + 1]; let hashable = HashableObject::from_object(key.clone()) .ok_or_else(|| MonkeyError::Vm(HashKeyTypeError(key.type_str())))?; map.insert(hashable, value.clone()); } self.push(Object::Hash(Box::new(map)))?; } OpIndex => { let index = self.pop()?; let obj = self.pop()?; self.execute_index_operation(obj, index)?; } OpCall => { let num_args = frame_stack.read_u8_from_top() as usize; let func = self.stack.remove(self.sp - 1 - num_args); self.sp -= 1; match func { Object::Closure(c) => { self.execute_closure_call(&mut frame_stack, *c, num_args)?; continue; } Object::Builtin(f) => self.execute_builtin_call(f, num_args)?, _ => return Err(MonkeyError::Vm(NotCallable(func.type_str()))), } } OpReturn => { let returned_value = self.pop()?; self.sp = frame_stack.top().base_pointer; self.stack.truncate(self.sp); frame_stack.pop(); self.push(returned_value)?; continue; } OpGetBuiltin => { let index = frame_stack.read_u8_from_top() as usize; let builtin = builtins::ALL_BUILTINS[index].1.clone(); self.push(Object::Builtin(builtin))?; } OpClosure => { let constant_index = frame_stack.read_u16_from_top() as usize; let num_free_vars = frame_stack.read_u8_from_top() as usize; let func = constants[constant_index].clone(); let free_vars = self.take(num_free_vars); if let Object::CompiledFunc(func) = func { let closure = Closure { func: *func, free_vars, }; self.push(Object::Closure(Box::new(closure)))?; } else { panic!("Trying to build closure with non-function object"); } }, OpGetFree => { let index = frame_stack.read_u8_from_top() as usize; self.push(frame_stack.top().free_vars[index].clone())?; }, } frame_stack.top_mut().pc += 1; } Ok(()) } fn push(&mut self, obj: Object) -> MonkeyResult<()> { if self.sp >= STACK_SIZE { Err(MonkeyError::Vm(StackOverflow)) } else { self.stack.push(obj); self.sp += 1; Ok(()) } } pub fn pop(&mut self) -> MonkeyResult<Object> { if self.sp == 0 { Err(MonkeyError::Vm(StackUnderflow)) } else { self.sp -= 1; Ok(self.stack.pop().unwrap()) } } fn take(&mut self, num_items: usize) -> Vec<Object> { let v = self.stack.split_off(self.sp - num_items); self.sp -= num_items; v } fn execute_binary_operation(&mut self, operation: OpCode) -> MonkeyResult<()> { use Object::*; let right = self.pop()?; let left = self.pop()?; match (left, operation, right) { (Integer(l), op, Integer(r)) => self.execute_integer_operation(op, l, r), (Boolean(l), op, Boolean(r)) => self.execute_bool_operation(op, l, r), (Str(l), OpCode::OpAdd, Str(r)) => self.execute_str_concat(&l, &r), (l, op, r) => Err(MonkeyError::Vm(InfixTypeError( l.type_str(), op.equivalent_token().unwrap(), r.type_str(), ))), } } fn execute_integer_operation(&mut self, op: OpCode, left: i64, right: i64) -> MonkeyResult<()> { let result = match op { OpCode::OpAdd => Object::Integer(left + right), OpCode::OpSub => Object::Integer(left - right), OpCode::OpMul => Object::Integer(left * right), OpCode::OpDiv if right == 0 => return Err(MonkeyError::Vm(DivOrModByZero)), OpCode::OpDiv => Object::Integer(left / right), OpCode::OpExponent if right < 0 => return Err(MonkeyError::Vm(NegativeExponent)), OpCode::OpExponent => Object::Integer(left.pow(right as u32)), OpCode::OpModulo if right == 0 => return Err(MonkeyError::Vm(DivOrModByZero)), OpCode::OpModulo => Object::Integer(left % right), OpCode::OpEquals => Object::Boolean(left == right), OpCode::OpNotEquals => Object::Boolean(left != right), OpCode::OpGreaterThan => Object::Boolean(left > right), OpCode::OpGreaterEq => Object::Boolean(left >= right), _ => unreachable!(), }; self.push(result)?; Ok(()) } fn execute_bool_operation(&mut self, op: OpCode, left: bool, right: bool) -> MonkeyResult<()> { let result = match op { OpCode::OpEquals => Object::Boolean(left == right), OpCode::OpNotEquals => Object::Boolean(left != right), _ => { return Err(MonkeyError::Vm(InfixTypeError( "bool", op.equivalent_token().unwrap(), "bool", ))) } }; self.push(result)?; Ok(()) } fn execute_str_concat(&mut self, left: &str, right: &str) -> MonkeyResult<()> { self.push(Object::Str(Box::new(left.to_string() + right))) } fn execute_prefix_operation(&mut self, op: OpCode) -> MonkeyResult<()> { let right = self.pop()?; match op { OpCode::OpPrefixMinus => { if let Object::Integer(i) = right { self.push(Object::Integer(-i))?; } else { return Err(MonkeyError::Vm(PrefixTypeError( Token::Minus, right.type_str(), ))); } } OpCode::OpPrefixNot => { let value = !right.is_truthy(); self.push(Object::Boolean(value))?; } _ => unreachable!(), } Ok(()) } fn execute_index_operation(&mut self, obj: Object, index: Object) -> MonkeyResult<()> { let result = match (obj, index) { (Object::Array(vector), Object::Integer(i)) => { if i < 0 || i >= vector.len() as i64 { Err(IndexOutOfBounds(i)) } else { Ok(vector.into_iter().nth(i as usize).unwrap()) } } (Object::Array(_), other) => Err(IndexTypeError(other.type_str())), (Object::Hash(map), key) => { let key_type = key.type_str(); let key = HashableObject::from_object(key.clone()) .ok_or(MonkeyError::Vm(HashKeyTypeError(key_type)))?; let value = map.get(&key).ok_or(MonkeyError::Vm(KeyError(key)))?; Ok(value.clone()) } (Object::Str(s), Object::Integer(i)) => { let chars = s.chars().collect::<Vec<_>>(); if i < 0 || i >= chars.len() as i64 { Err(IndexOutOfBounds(i)) } else { Ok(Object::Str(Box::new(chars[i as usize].to_string()))) } } (Object::Str(_), other) => Err(IndexTypeError(other.type_str())), (other, _) => Err(IndexingWrongType(other.type_str())), }; let result = result.map_err(MonkeyError::Vm)?; self.push(result) } fn execute_closure_call( &mut self, frame_stack: &mut FrameStack, closure: Closure, num_args: usize, ) -> MonkeyResult<()> { if closure.func.num_params as usize != num_args { return Err(MonkeyError::Vm(WrongNumberOfArgs( closure.func.num_params as usize, num_args, ))); } frame_stack.top_mut().
self.stack.resize(self.sp, Object::Nil); Ok(()) } fn execute_builtin_call(&mut self, func: BuiltinFn, num_args: usize) -> MonkeyResult<()> { let args = self.take(num_args); let result = func.0(args).map_err(MonkeyError::Vm)?; self.push(result) } }
pc += 1; let new_frame = Frame { instructions: closure.func.instructions, free_vars: closure.free_vars, pc: 0, base_pointer: self.sp - num_args, }; frame_stack.push(new_frame); self.sp += closure.func.num_locals as usize;
function_block-random_span
[ { "content": "pub fn eval_index_expression(object: &Object, index: &Object) -> Result<Object, RuntimeError> {\n\n // This function is pub because the \"get\" built-in needs to call it\n\n match (object, index) {\n\n (Object::Array(vector), Object::Integer(i)) => {\n\n if *i < 0 || *i >= ...
Rust
language/vm/src/proptest_types/types.rs
w3f-community/sp-move-vm
1c94891be56ec67eb04a8d1bd21775219d526f48
use crate::{ file_format::{ FieldDefinition, IdentifierIndex, ModuleHandleIndex, SignatureToken, StructDefinition, StructFieldInformation, StructHandle, StructHandleIndex, TableIndex, TypeSignature, }, proptest_types::signature::{KindGen, SignatureTokenGen}, }; use proptest::{ collection::{vec, SizeRange}, option, prelude::*, sample::Index as PropIndex, std_facade::hash_set::HashSet, }; use std::{cmp::max, collections::BTreeSet}; #[derive(Debug)] struct TypeSignatureIndex(u16); #[derive(Debug)] pub struct StDefnMaterializeState { pub identifiers_len: usize, pub struct_handles: Vec<StructHandle>, pub new_handles: BTreeSet<(ModuleHandleIndex, IdentifierIndex)>, } impl StDefnMaterializeState { pub fn new(identifiers_len: usize, struct_handles: Vec<StructHandle>) -> Self { Self { identifiers_len, struct_handles, new_handles: BTreeSet::new(), } } fn add_struct_handle(&mut self, handle: StructHandle) -> Option<StructHandleIndex> { if self.new_handles.insert((handle.module, handle.name)) { self.struct_handles.push(handle); Some(StructHandleIndex((self.struct_handles.len() - 1) as u16)) } else { None } } fn contains_nominal_resource(&self, signature: &SignatureToken) -> bool { use SignatureToken::*; match signature { Signer => true, Struct(struct_handle_index) => { self.struct_handles[struct_handle_index.0 as usize].is_nominal_resource } StructInstantiation(struct_handle_index, type_args) => { self.struct_handles[struct_handle_index.0 as usize].is_nominal_resource || type_args.iter().any(|t| self.contains_nominal_resource(t)) } Vector(targ) => self.contains_nominal_resource(targ), Reference(token) | MutableReference(token) => self.contains_nominal_resource(token), Bool | U8 | U64 | U128 | Address | TypeParameter(_) => false, } } } #[derive(Clone, Debug)] pub struct StructHandleGen { module_idx: PropIndex, name_idx: PropIndex, is_nominal_resource: bool, type_parameters: Vec<KindGen>, } impl StructHandleGen { pub fn strategy(kind_count: impl Into<SizeRange>) -> impl Strategy<Value = Self> { ( any::<PropIndex>(), any::<PropIndex>(), any::<bool>(), vec(KindGen::strategy(), kind_count), ) .prop_map( |(module_idx, name_idx, is_nominal_resource, type_parameters)| Self { module_idx, name_idx, is_nominal_resource, type_parameters, }, ) } pub fn materialize(self, module_len: usize, identifiers_len: usize) -> StructHandle { let idx = max(self.module_idx.index(module_len) as TableIndex, 1); let mut type_parameters = vec![]; for type_param in self.type_parameters { type_parameters.push(type_param.materialize()); } StructHandle { module: ModuleHandleIndex(idx as TableIndex), name: IdentifierIndex(self.name_idx.index(identifiers_len) as TableIndex), is_nominal_resource: self.is_nominal_resource, type_parameters, } } } #[derive(Clone, Debug)] pub struct StructDefinitionGen { name_idx: PropIndex, is_nominal_resource: bool, type_parameters: Vec<KindGen>, is_public: bool, field_defs: Option<Vec<FieldDefinitionGen>>, } impl StructDefinitionGen { pub fn strategy( field_count: impl Into<SizeRange>, kind_count: impl Into<SizeRange>, ) -> impl Strategy<Value = Self> { ( any::<PropIndex>(), any::<bool>(), vec(KindGen::strategy(), kind_count), any::<bool>(), option::of(vec(FieldDefinitionGen::strategy(), field_count)), ) .prop_map( |(name_idx, is_nominal_resource, type_parameters, is_public, field_defs)| Self { name_idx, is_nominal_resource, type_parameters, is_public, field_defs, }, ) } pub fn materialize( self, state: &mut StDefnMaterializeState, ) -> (Option<StructDefinition>, usize) { let mut field_names = HashSet::new(); let mut fields = vec![]; match self.field_defs { None => (), Some(field_defs_gen) => { for fd_gen in field_defs_gen { let field = fd_gen.materialize(state); if field_names.insert(field.name) { fields.push(field); } } } }; let is_nominal_resource = if fields.is_empty() { self.is_nominal_resource } else { self.is_nominal_resource || fields.iter().any(|field| { let field_sig = &field.signature.0; state.contains_nominal_resource(field_sig) }) }; let handle = StructHandle { module: ModuleHandleIndex(0), name: IdentifierIndex(self.name_idx.index(state.identifiers_len) as TableIndex), is_nominal_resource, type_parameters: self .type_parameters .into_iter() .map(|kind| kind.materialize()) .collect(), }; match state.add_struct_handle(handle) { Some(struct_handle) => { if fields.is_empty() { ( Some(StructDefinition { struct_handle, field_information: StructFieldInformation::Native, }), 0, ) } else { let field_count = fields.len(); let field_information = StructFieldInformation::Declared(fields); ( Some(StructDefinition { struct_handle, field_information, }), field_count, ) } } None => (None, 0), } } } #[derive(Clone, Debug)] struct FieldDefinitionGen { name_idx: PropIndex, signature_gen: SignatureTokenGen, } impl FieldDefinitionGen { fn strategy() -> impl Strategy<Value = Self> { (any::<PropIndex>(), SignatureTokenGen::atom_strategy()).prop_map( |(name_idx, signature_gen)| Self { name_idx, signature_gen, }, ) } fn materialize(self, state: &StDefnMaterializeState) -> FieldDefinition { FieldDefinition { name: IdentifierIndex(self.name_idx.index(state.identifiers_len) as TableIndex), signature: TypeSignature(self.signature_gen.materialize(&state.struct_handles)), } } }
use crate::{ file_format::{ FieldDefinition, IdentifierIndex, ModuleHandleIndex, SignatureToken, StructDefinition, StructFieldInformation, StructHandle, StructHandleIndex, TableIndex, TypeSignature, }, proptest_types::signature::{KindGen, SignatureTokenGen}, }; use proptest::{ collection::{vec, SizeRange}, option, prelude::*, sample::Index as PropIndex, std_facade::hash_set::HashSet, }; use std::{cmp::max, collections::BTreeSet}; #[derive(Debug)] struct TypeSignatureIndex(u16); #[derive(Debug)] pub struct StDefnMaterializeState { pub identifiers_len: usize, pub struct_handles: Vec<StructHandle>, pub new_handles: BTreeSet<(ModuleHandleIndex, IdentifierIndex)>, } impl StDefnMaterializeState { pub fn new(identifiers_len: usize, struct_handles: Vec<StructHandle>) -> Self { Self { identifiers_len, struct_handles, new_handles: BTreeSet::new(), } } fn add_struct_handle(&mut self, handle: StructHandle) -> Option<StructHandleIndex> { if self.new_handles.insert((handle.module, handle.name)) { self.struct_handles.push(handle); Some(StructHandleIndex((self.struct_handles.len() - 1) as u16)) } else { None } } fn contains_nominal_resource(&self, signature: &SignatureToken) -> bool { use SignatureToken::*; match signature { Signer => true, Struct(struct_handle_index) => { self.struct_handles[struct_handle_index.0 as usize].is_nominal_resource } StructInstantiation(struct_handle_index, type_args) => { self.struct_handles[struct_handle_index.0 as usize].is_nominal_resource || type_args.iter().any(|t| self.contains_nominal_resource(t)) }
inal_resource, type_parameters, is_public, field_defs, }, ) } pub fn materialize( self, state: &mut StDefnMaterializeState, ) -> (Option<StructDefinition>, usize) { let mut field_names = HashSet::new(); let mut fields = vec![]; match self.field_defs { None => (), Some(field_defs_gen) => { for fd_gen in field_defs_gen { let field = fd_gen.materialize(state); if field_names.insert(field.name) { fields.push(field); } } } }; let is_nominal_resource = if fields.is_empty() { self.is_nominal_resource } else { self.is_nominal_resource || fields.iter().any(|field| { let field_sig = &field.signature.0; state.contains_nominal_resource(field_sig) }) }; let handle = StructHandle { module: ModuleHandleIndex(0), name: IdentifierIndex(self.name_idx.index(state.identifiers_len) as TableIndex), is_nominal_resource, type_parameters: self .type_parameters .into_iter() .map(|kind| kind.materialize()) .collect(), }; match state.add_struct_handle(handle) { Some(struct_handle) => { if fields.is_empty() { ( Some(StructDefinition { struct_handle, field_information: StructFieldInformation::Native, }), 0, ) } else { let field_count = fields.len(); let field_information = StructFieldInformation::Declared(fields); ( Some(StructDefinition { struct_handle, field_information, }), field_count, ) } } None => (None, 0), } } } #[derive(Clone, Debug)] struct FieldDefinitionGen { name_idx: PropIndex, signature_gen: SignatureTokenGen, } impl FieldDefinitionGen { fn strategy() -> impl Strategy<Value = Self> { (any::<PropIndex>(), SignatureTokenGen::atom_strategy()).prop_map( |(name_idx, signature_gen)| Self { name_idx, signature_gen, }, ) } fn materialize(self, state: &StDefnMaterializeState) -> FieldDefinition { FieldDefinition { name: IdentifierIndex(self.name_idx.index(state.identifiers_len) as TableIndex), signature: TypeSignature(self.signature_gen.materialize(&state.struct_handles)), } } }
Vector(targ) => self.contains_nominal_resource(targ), Reference(token) | MutableReference(token) => self.contains_nominal_resource(token), Bool | U8 | U64 | U128 | Address | TypeParameter(_) => false, } } } #[derive(Clone, Debug)] pub struct StructHandleGen { module_idx: PropIndex, name_idx: PropIndex, is_nominal_resource: bool, type_parameters: Vec<KindGen>, } impl StructHandleGen { pub fn strategy(kind_count: impl Into<SizeRange>) -> impl Strategy<Value = Self> { ( any::<PropIndex>(), any::<PropIndex>(), any::<bool>(), vec(KindGen::strategy(), kind_count), ) .prop_map( |(module_idx, name_idx, is_nominal_resource, type_parameters)| Self { module_idx, name_idx, is_nominal_resource, type_parameters, }, ) } pub fn materialize(self, module_len: usize, identifiers_len: usize) -> StructHandle { let idx = max(self.module_idx.index(module_len) as TableIndex, 1); let mut type_parameters = vec![]; for type_param in self.type_parameters { type_parameters.push(type_param.materialize()); } StructHandle { module: ModuleHandleIndex(idx as TableIndex), name: IdentifierIndex(self.name_idx.index(identifiers_len) as TableIndex), is_nominal_resource: self.is_nominal_resource, type_parameters, } } } #[derive(Clone, Debug)] pub struct StructDefinitionGen { name_idx: PropIndex, is_nominal_resource: bool, type_parameters: Vec<KindGen>, is_public: bool, field_defs: Option<Vec<FieldDefinitionGen>>, } impl StructDefinitionGen { pub fn strategy( field_count: impl Into<SizeRange>, kind_count: impl Into<SizeRange>, ) -> impl Strategy<Value = Self> { ( any::<PropIndex>(), any::<bool>(), vec(KindGen::strategy(), kind_count), any::<bool>(), option::of(vec(FieldDefinitionGen::strategy(), field_count)), ) .prop_map( |(name_idx, is_nominal_resource, type_parameters, is_public, field_defs)| Self { name_idx, is_nom
random
[ { "content": "fn struct_handle(token: &SignatureToken) -> Option<StructHandleIndex> {\n\n use SignatureToken::*;\n\n\n\n match token {\n\n Struct(sh_idx) => Some(*sh_idx),\n\n StructInstantiation(sh_idx, _) => Some(*sh_idx),\n\n Reference(token) | MutableReference(token) => struct_han...
Rust
platform/nutekt-digital/demos/raves/src/lib.rs
atomb/logue-sdk
3ba795ece7e90871e171c483c652040186800f5b
#![no_std] use panic_halt as _; use core::f32; use core::ptr; use micromath::F32Ext; pub mod dsp; pub mod mathutil; pub mod nts1; use dsp::biquad; use mathutil::*; use nts1::*; use nts1::clipsat::osc_softclipf; use nts1::platform::*; use nts1::random::osc_white; use nts1::userosc::*; use nts1::wavebank::*; #[repr(u8)] pub enum RavesFlags { None = 0, Wave0 = 1 << 1, Wave1 = 1 << 2, SubWave = 1 << 3, RingMix = 1 << 4, BitCrush = 1 << 5, Reset = 1 << 6, } #[repr(C)] pub struct RavesState { wave0: *const WaveLUT, wave1: *const WaveLUT, subwave: *const WaveLUT, phi0: f32, phi1: f32, phisub: f32, w00: f32, w01: f32, w0sub: f32, lfo: f32, lfoz: f32, dither: f32, bitres: f32, bitresrcp: f32, imperfection: f32, flags: u8, } impl RavesState { pub const fn new() -> Self { RavesState { wave0: ptr::null(), wave1: ptr::null(), subwave: ptr::null(), phi0: 0.0, phi1: 0.0, phisub: 0.0, w00: K_SR440, w01: K_SR440, w0sub: K_SR220, lfo: 0.0, lfoz: 0.0, dither: 0.0, bitres: 1.0, bitresrcp: 1.0, imperfection: 0.0, flags: RavesFlags::None as u8, } } pub fn init(&mut self) { self.wave0 = get_waves_a_elt(0); self.wave1 = get_waves_d_elt(0); self.subwave = get_waves_a_elt(0); self.imperfection = osc_white() * 1.0417e-006; } pub fn reset(&mut self) { self.phi0 = 0.0; self.phi1 = 0.0; self.phisub = 0.0; self.lfo = self.lfoz; } } #[repr(C)] pub struct RavesParams { submix: f32, ringmix: f32, bitcrush: f32, shape: f32, shiftshape: f32, wave0: u8, wave1: u8, subwave: u8, padding: u8, } impl RavesParams { pub const fn new() -> Self { RavesParams { submix: 0.05, ringmix: 0.0, bitcrush: 0.0, shape: 0.0, shiftshape: 0.0, wave0: 0, wave1: 0, subwave: 0, padding: 0, } } } #[repr(C)] pub struct Raves { state: RavesState, params: RavesParams, prelpf: biquad::BiQuad, postlpf: biquad::BiQuad, } impl Raves { pub const fn new() -> Self { Raves { params: RavesParams::new(), state: RavesState::new(), prelpf: biquad::BiQuad::new(), postlpf: biquad::BiQuad::new(), } } pub fn init(&mut self) { self.params = RavesParams::new(); self.state = RavesState::new(); self.state.init(); self.prelpf.coeffs.set_pole_lp(0.8); self.postlpf.coeffs.set_folp(osc_tanpif(0.45)); } pub fn update_pitch(&mut self, w0: f32) { let w0new = w0 + self.state.imperfection; let drift = self.params.shiftshape; self.state.w00 = w0new; self.state.w01 = w0new + drift * 5.20833333333333e-006; self.state.w0sub = 0.5 * w0new + drift * 3.125e-006; } pub fn update_waves(&mut self, flags: u16) { if (flags & RavesFlags::Wave0 as u16) != 0 { let k_a_thr = K_WAVES_A_CNT; let k_b_thr = k_a_thr + K_WAVES_B_CNT; let k_c_thr = k_b_thr + K_WAVES_C_CNT; let mut idx = self.params.wave0 as usize; if idx < k_a_thr { self.state.wave0 = get_waves_a_elt(idx); } else if idx < k_b_thr { idx -= k_a_thr; self.state.wave0 = get_waves_b_elt(idx); } else if idx < k_c_thr { idx -= k_b_thr; self.state.wave0 = get_waves_c_elt(idx); } else { } } if (flags & RavesFlags::Wave1 as u16) != 0 { let k_d_thr = K_WAVES_D_CNT; let k_e_thr = k_d_thr + K_WAVES_E_CNT; let k_f_thr = k_e_thr + K_WAVES_F_CNT; let mut idx = self.params.wave1 as usize; if idx < k_d_thr { self.state.wave1 = get_waves_d_elt(idx); } else if idx < k_e_thr { idx -= k_d_thr; self.state.wave1 = get_waves_e_elt(idx); } else if idx < k_f_thr { idx -= k_e_thr; self.state.wave1 = get_waves_f_elt(idx); } else { } } if (flags & RavesFlags::SubWave as u16) != 0 { self.state.subwave = get_waves_a_elt(self.params.subwave as usize); } } } pub fn osc_init(raves: &mut Raves, _platform: u32, _api: u32) { raves.init(); } pub fn osc_cycle(raves: &mut Raves, params: &UserOscParams, yn: &mut [i32]) { let phi = (params.pitch >> 8) as u8; let plo = (params.pitch & 0xFF) as u8; let flags = raves.state.flags; raves.update_pitch(osc_w0f_for_note(phi, plo)); raves.update_waves(flags as u16); let p : &RavesParams = &raves.params; { let sm : &mut RavesState = &mut raves.state; if (flags as u8) & (RavesFlags::Reset as u8) != 0 { sm.reset(); } if (flags as u8) & (RavesFlags::BitCrush as u8) != 0 { sm.dither = p.bitcrush * 2e-008; sm.bitres = osc_bitresf(p.bitcrush); sm.bitresrcp = 1.0 / sm.bitres; } sm.lfo = q31_to_f32(params.shape_lfo); sm.flags = RavesFlags::None as u8; } let s : &RavesState = &raves.state; let mut phi0 = s.phi0; let mut phi1 = s.phi1; let mut phisub = s.phisub; let mut lfoz = s.lfoz; let lfo_inc = (s.lfo - lfoz) / yn.len() as f32; let submix = p.submix; let ringmix = p.ringmix; let prelpf = &mut raves.prelpf; let postlpf = &mut raves.postlpf; for y in yn.iter_mut() { let wavemix = clipminmaxf(0.005, p.shape + lfoz, 0.995); let mut sig = (1.0 - wavemix) * osc_wave_scanf(wave_table_ref(s.wave0), phi0); sig += wavemix * osc_wave_scanf(wave_table_ref(s.wave1), phi1); let subsig = osc_wave_scanf(wave_table_ref(s.subwave), phisub); sig = (1.0 - submix) * sig + submix * subsig; sig = (1.0 - ringmix) * sig + ringmix * (subsig * sig); sig = clip1m1f(sig); sig = prelpf.process_fo(sig); sig += s.dither * osc_white(); sig = (sig * s.bitres).round() * s.bitresrcp; sig = postlpf.process_fo(sig); sig = osc_softclipf(0.125, sig); *y = f32_to_q31(sig); phi0 += s.w00; phi0 -= (phi0 as u32) as f32; phi1 += s.w01; phi1 -= (phi1 as u32) as f32; phisub += s.w0sub; phisub -= (phisub as u32) as f32; lfoz += lfo_inc; } { let sm : &mut RavesState = &mut raves.state; sm.phi0 = phi0; sm.phi1 = phi1; sm.phisub = phisub; sm.lfoz = lfoz; } } pub fn osc_noteon(raves: &mut Raves, _params: &UserOscParams) { raves.state.flags |= RavesFlags::Reset as u8; } pub fn osc_param(raves: &mut Raves, index: UserOscParamId, value: u16) { let p : &mut RavesParams = &mut raves.params; let s : &mut RavesState = &mut raves.state; match index { UserOscParamId::Id1 => { let cnt : usize = K_WAVES_A_CNT + K_WAVES_B_CNT + K_WAVES_C_CNT; p.wave0 = (value % cnt as u16) as u8; s.flags |= RavesFlags::Wave0 as u8; }, UserOscParamId::Id2 => { let cnt : usize = K_WAVES_D_CNT + K_WAVES_E_CNT + K_WAVES_F_CNT; p.wave1 = (value % cnt as u16) as u8; s.flags |= RavesFlags::Wave1 as u8; }, UserOscParamId::Id3 => { p.subwave = (value % K_WAVES_A_CNT as u16) as u8; s.flags |= RavesFlags::SubWave as u8; }, UserOscParamId::Id4 => { p.submix = clip01f(0.05 + (value as f32 * 0.01 * 0.90)); }, UserOscParamId::Id5 => { p.ringmix = clip01f(value as f32 * 0.01); }, UserOscParamId::Id6 => { p.bitcrush = clip01f(value as f32 * 0.01); s.flags |= RavesFlags::BitCrush as u8; }, UserOscParamId::Shape => { p.shape = param_val_to_f32(value); }, UserOscParamId::ShiftShape => { p.shiftshape = 1.0 + param_val_to_f32(value); }, } }
#![no_std] use panic_halt as _; use core::f32; use core::ptr; use micromath::F32Ext; pub mod dsp; pub mod mathutil; pub mod nts1; use dsp::biquad; use mathutil::*; use nts1::*; use nts1::clipsat::osc_softclipf; use nts1::platform::*; use nts1::random::osc_white; use nts1::userosc::*; use nts1::wavebank::*; #[repr(u8)] pub enum RavesFlags { None = 0, Wave0 = 1 << 1, Wave1 = 1 << 2, SubWave = 1 << 3, RingMix = 1 << 4, BitCrush = 1 << 5, Reset = 1 << 6, } #[repr(C)] pub struct RavesState { wave0: *const WaveLUT, wave1: *const WaveLUT, subwave: *const WaveLUT, phi0: f32, phi1: f32, phisub: f32, w00: f32, w01: f32, w0sub: f32, lfo: f32, lfoz: f32, dither: f32, bitres: f32, bitresrcp: f32, imperfection: f32, flags: u8, } impl RavesState { pub const fn new() -> Self { RavesState { wave0: ptr::null(), wave1: ptr::null(), subwave: ptr::null(), phi0: 0.0, phi1: 0.0, phisub: 0.0, w00: K_SR440, w01: K_SR440, w0sub: K_SR220, lfo: 0.0, lfoz: 0.0, dither: 0.0, bitres: 1.0, bitresrcp: 1.0, imperfection: 0.0, flags: RavesFlags::None as u8, } } pub fn init(&mut self) { self.wave0 = get_waves_a_elt(0); self.wave1 = get_waves_d_elt(0); self.subwave = get_waves_a_elt(0); self.imperfection = osc_white() * 1.0417e-006; } pub fn reset(&mut self) { self.phi0 = 0.0; self.phi1 = 0.0; self.phisub = 0.0; self.lfo = self.lfoz; } } #[repr(C)] pub struct RavesParams { submix: f32, ringmix: f32, bitcrush: f32, shape: f32, shiftshape: f32, wave0: u8, wave1: u8, subwave: u8, padding: u8, } impl RavesParams { pub const fn new() -> Self { RavesParams { submix: 0.05, ringmix: 0.0, bitcrush: 0.0, shape: 0.0, shiftshape: 0.0, wave0: 0, wave1: 0, subwave: 0, padding: 0, } } } #[repr(C)] pub struct Raves { state: RavesState, params: RavesParams, prelpf: biquad::BiQuad, postlpf: biquad::BiQuad, } impl Raves { pub const fn new() -> Self { Raves { params: RavesParams::new(), state: RavesState::new(), prelpf: biquad::BiQuad::new(), postlpf: biquad::BiQuad::new(), } } pub fn init(&mut self) { self.params = RavesParams::new(); self.state = RavesState::new(); self.state.init(); self.prelpf.coeffs.set_pole_lp(0.8); self.postlpf.coeffs.set_folp(osc_tanpif(0.45)); } pub fn update_pitch(&mut self, w0: f32) { let w0new = w0 + self.state.imperfection; let drift = self.params.shiftshape; self.state.w00 = w0new; self.state.w01 = w0new + drift * 5.20833333333333e-006; self.state.w0sub = 0.5 * w0new + drift * 3.125e-006; } pub fn update_waves(&mut self, flags: u16) { if (flags & RavesFlags::Wave0 as u16) != 0 { let k_a_thr = K_WAVES_A_CNT; let k_b_thr = k_a_thr + K_WAVES_B_CNT; let k_c_thr = k_b_thr + K_WAVES_C_CNT; let mut idx = self.params.wave0 as usize; if idx < k_a_thr { self.state.wave0 = get_waves_a_elt(idx); } else if idx < k_b_thr { idx -= k_a_thr; self.state.wave0 = get_waves_b_elt(idx); } else if idx < k_c_thr { idx -= k_b_thr; self.state.wave0 = get_waves_c_elt(idx); } else { } } if (flags & RavesFlags::Wave1 as u16) != 0 { let k_d_thr = K_WAVES_D_CNT; let k_e_thr = k_d_thr + K_WAVES_E_CNT; let k_f_thr = k_e_thr + K_WAVES_F_CNT; let mut idx = self.params.wave1 as usize; if idx < k_d_thr { self.state.wave1 = get_waves_d_elt(idx); } else if idx < k_e_thr { idx -= k_d_thr; self.state.wave1 = get_waves_e_elt(idx); } else if idx < k_f_thr { idx -= k_e_thr; self.state.wave1 = get_waves_f_elt(idx); } else { } } if (flags & RavesFlags::SubWave as u16) != 0 { self.state.subwave = get_waves_a_elt(self.params.subwave as usize); } } } pub fn osc_init(raves: &mut Raves, _platform: u32, _api: u32) { raves.init(); } pub fn osc_cycle(raves: &mut Raves, params: &UserOscParams, yn: &mut [i32]) { let phi = (params.pitch >> 8) as u8; let plo = (params.pitch & 0xFF) as u8; let flags = raves.state.flags; raves.update_pitch(osc_w0f_for_note(phi, plo)); raves.update_waves(flags as u16); let p : &RavesParams = &raves.params; { let sm : &mut RavesState = &mut raves.state; if (flags as u8) & (RavesFlags::Reset as u8) != 0 { sm.reset(); } if (flags as u8) & (RavesFlags::BitCrush as u8) != 0 { sm.dither = p.bitcrush * 2e-008; sm.bitres = osc_bitresf(p.bitcrush); sm.bitresrcp = 1.0 / sm.bitres; } sm.lfo = q31_to_f32(params.shape_lfo); sm.flags = RavesFlags::None as u8; } let s : &RavesState = &raves.state; let mut phi0 = s.phi0; let mut phi1 = s.phi1; let mut phisub = s.phisub; let mut lfoz = s.lfoz; let lfo_inc = (s.lfo - lfoz) / yn.len() as f32; let submix = p.submix; let ringmix = p.ringmix; let prelpf = &mut raves.prelpf; let postlpf = &mut raves.postlpf; for y in yn.iter_mut() { let wavemix = clipminmaxf(0.005, p.shape + lfoz, 0.995); let mut sig = (1.0 - wavemix) * osc_wave_scanf(wave_table_ref(s.wave0), phi0); sig += wavemix * osc_wave_scanf(wave_table_ref(s.wave1), phi1); let subsig = osc_wave_scanf(wave_table_ref(s.subwave), phisub); sig = (1.0 - submix) * sig + submix * subsig; sig = (1.0 - ringmix) * sig + ringmix * (subsig * sig); sig = clip1m1f(sig); sig = prelpf.process_fo(sig); sig += s.dither * osc_white(); sig = (sig * s.bitres).round() * s.bitresrcp; sig = postlpf.process_fo(sig); sig = osc_softclipf(0.125, sig); *y = f32_to_q31(sig); phi0 += s.w00; phi0 -= (phi0 as u32) as f32; phi1 += s.w01; phi1 -= (phi1 as u32) as f32; phisub += s.w0sub; phisub -= (phisub as u32) as f32; lfoz += lfo_inc; } { let sm : &mut RavesState = &mut raves.state; sm.phi0 = phi0; sm.phi1 = phi1; sm.phisub = phisub; sm.lfoz = lfoz; } } pub fn osc_noteon(raves: &mut Raves, _params: &UserOscParams) { raves.state.flags |= RavesFlags::Reset as u8; } pub fn osc_param(raves: &mut Raves, index: UserOscParamId, value: u16) { let p : &mut RavesParams = &mut raves.params; let s : &mut RavesState = &mut raves.state; match index { UserOscParamId::Id1 => { let cnt : usize = K_WAVES_A_CNT + K_WAVES_B_C
NT + K_WAVES_C_CNT; p.wave0 = (value % cnt as u16) as u8; s.flags |= RavesFlags::Wave0 as u8; }, UserOscParamId::Id2 => { let cnt : usize = K_WAVES_D_CNT + K_WAVES_E_CNT + K_WAVES_F_CNT; p.wave1 = (value % cnt as u16) as u8; s.flags |= RavesFlags::Wave1 as u8; }, UserOscParamId::Id3 => { p.subwave = (value % K_WAVES_A_CNT as u16) as u8; s.flags |= RavesFlags::SubWave as u8; }, UserOscParamId::Id4 => { p.submix = clip01f(0.05 + (value as f32 * 0.01 * 0.90)); }, UserOscParamId::Id5 => { p.ringmix = clip01f(value as f32 * 0.01); }, UserOscParamId::Id6 => { p.bitcrush = clip01f(value as f32 * 0.01); s.flags |= RavesFlags::BitCrush as u8; }, UserOscParamId::Shape => { p.shape = param_val_to_f32(value); }, UserOscParamId::ShiftShape => { p.shiftshape = 1.0 + param_val_to_f32(value); }, } }
function_block-function_prefixed
[ { "content": "/// Convert 10-bit parameter value to f32\n\npub fn param_val_to_f32(x: u16) -> f32 {\n\n x as f32 * 9.77517106549365e-004f32\n\n}\n\n\n", "file_path": "platform/nutekt-digital/demos/raves/src/nts1/userosc.rs", "rank": 1, "score": 362597.1218847061 }, { "content": "pub fn ge...
Rust
gstreamer/src/subclass/pad.rs
snapview/gstreamer-rs
a1da562e960208572f124d5a8b878dc0e11b03f5
use gst_sys; use glib; use glib::translate::*; use glib::subclass::prelude::*; use Pad; use PadClass; pub trait PadImpl: PadImplExt + ObjectImpl + Send + Sync { fn linked(&self, pad: &Pad, peer: &Pad) { self.parent_linked(pad, peer) } fn unlinked(&self, pad: &Pad, peer: &Pad) { self.parent_unlinked(pad, peer) } } pub trait PadImplExt { fn parent_linked(&self, pad: &Pad, peer: &Pad); fn parent_unlinked(&self, pad: &Pad, peer: &Pad); } impl<T: PadImpl> PadImplExt for T { fn parent_linked(&self, pad: &Pad, peer: &Pad) { unsafe { let data = T::type_data(); let parent_class = data.as_ref().get_parent_class() as *mut gst_sys::GstPadClass; (*parent_class) .linked .map(|f| f(pad.to_glib_none().0, peer.to_glib_none().0)) .unwrap_or(()) } } fn parent_unlinked(&self, pad: &Pad, peer: &Pad) { unsafe { let data = T::type_data(); let parent_class = data.as_ref().get_parent_class() as *mut gst_sys::GstPadClass; (*parent_class) .unlinked .map(|f| f(pad.to_glib_none().0, peer.to_glib_none().0)) .unwrap_or(()) } } } unsafe impl<T: PadImpl> IsSubclassable<T> for PadClass { fn override_vfuncs(&mut self) { <glib::ObjectClass as IsSubclassable<T>>::override_vfuncs(self); unsafe { let klass = &mut *(self as *mut Self as *mut gst_sys::GstPadClass); klass.linked = Some(pad_linked::<T>); klass.unlinked = Some(pad_unlinked::<T>); } } } unsafe extern "C" fn pad_linked<T: PadImpl>(ptr: *mut gst_sys::GstPad, peer: *mut gst_sys::GstPad) { let instance = &*(ptr as *mut T::Instance); let imp = instance.get_impl(); let wrap: Borrowed<Pad> = from_glib_borrow(ptr); imp.linked(&wrap, &from_glib_borrow(peer)) } unsafe extern "C" fn pad_unlinked<T: PadImpl>( ptr: *mut gst_sys::GstPad, peer: *mut gst_sys::GstPad, ) { let instance = &*(ptr as *mut T::Instance); let imp = instance.get_impl(); let wrap: Borrowed<Pad> = from_glib_borrow(ptr); imp.unlinked(&wrap, &from_glib_borrow(peer)) } #[cfg(test)] mod tests { use super::*; use crate::prelude::*; use glib; use glib::subclass; use std::sync::atomic; struct TestPad { linked: atomic::AtomicBool, unlinked: atomic::AtomicBool, } impl ObjectSubclass for TestPad { const NAME: &'static str = "TestPad"; type ParentType = ::Pad; type Instance = subclass::simple::InstanceStruct<Self>; type Class = subclass::simple::ClassStruct<Self>; glib_object_subclass!(); fn new() -> Self { Self { linked: atomic::AtomicBool::new(false), unlinked: atomic::AtomicBool::new(false), } } } impl ObjectImpl for TestPad {} impl PadImpl for TestPad { fn linked(&self, pad: &Pad, peer: &Pad) { self.linked.store(true, atomic::Ordering::SeqCst); self.parent_linked(pad, peer) } fn unlinked(&self, pad: &Pad, peer: &Pad) { self.unlinked.store(true, atomic::Ordering::SeqCst); self.parent_unlinked(pad, peer) } } #[test] fn test_pad_subclass() { ::init().unwrap(); let pad = glib::Object::new( TestPad::get_type(), &[("name", &"test"), ("direction", &::PadDirection::Src)], ) .unwrap() .downcast::<::Pad>() .unwrap(); assert_eq!(pad.get_name(), "test"); let otherpad = ::Pad::new(Some("other-test"), ::PadDirection::Sink); pad.link(&otherpad).unwrap(); pad.unlink(&otherpad).unwrap(); let imp = TestPad::from_instance(&pad); assert!(imp.linked.load(atomic::Ordering::SeqCst)); assert!(imp.unlinked.load(atomic::Ordering::SeqCst)); } }
use gst_sys; use glib; use glib::translate::*; use glib::subclass::prelude::*; use Pad; use PadClass; pub trait PadImpl: PadImplExt + ObjectImpl + Send + Sync { fn linked(&self, pad: &Pad, peer: &Pad) { self.parent_linked(pad, peer) } fn unlinked(&self, pad: &Pad, peer: &Pad) { self.parent_unlinked(pad, peer) } } pub trait PadImplExt { fn parent_linked(&self, pad: &Pad, peer: &Pad); fn parent_unlinked(&self, pad: &Pad, peer: &Pad); } impl<T: PadImpl> PadImplExt for T { fn parent_linked(&self, pad: &Pad, peer: &Pad) { unsafe { let data = T::type_data(); let parent_class = data.as_ref().get_parent_class() as *mut gst_sys::GstPadClass; (*parent_class) .linked .map(|f| f(pad.to_glib_none().0, peer.to_glib_none().0)) .unwrap_or(()) } } fn parent_unlinked(&self, pad: &Pad, peer: &Pad) { unsafe { let data = T::type_data(); let paren
.unwrap_or(()) } } } unsafe impl<T: PadImpl> IsSubclassable<T> for PadClass { fn override_vfuncs(&mut self) { <glib::ObjectClass as IsSubclassable<T>>::override_vfuncs(self); unsafe { let klass = &mut *(self as *mut Self as *mut gst_sys::GstPadClass); klass.linked = Some(pad_linked::<T>); klass.unlinked = Some(pad_unlinked::<T>); } } } unsafe extern "C" fn pad_linked<T: PadImpl>(ptr: *mut gst_sys::GstPad, peer: *mut gst_sys::GstPad) { let instance = &*(ptr as *mut T::Instance); let imp = instance.get_impl(); let wrap: Borrowed<Pad> = from_glib_borrow(ptr); imp.linked(&wrap, &from_glib_borrow(peer)) } unsafe extern "C" fn pad_unlinked<T: PadImpl>( ptr: *mut gst_sys::GstPad, peer: *mut gst_sys::GstPad, ) { let instance = &*(ptr as *mut T::Instance); let imp = instance.get_impl(); let wrap: Borrowed<Pad> = from_glib_borrow(ptr); imp.unlinked(&wrap, &from_glib_borrow(peer)) } #[cfg(test)] mod tests { use super::*; use crate::prelude::*; use glib; use glib::subclass; use std::sync::atomic; struct TestPad { linked: atomic::AtomicBool, unlinked: atomic::AtomicBool, } impl ObjectSubclass for TestPad { const NAME: &'static str = "TestPad"; type ParentType = ::Pad; type Instance = subclass::simple::InstanceStruct<Self>; type Class = subclass::simple::ClassStruct<Self>; glib_object_subclass!(); fn new() -> Self { Self { linked: atomic::AtomicBool::new(false), unlinked: atomic::AtomicBool::new(false), } } } impl ObjectImpl for TestPad {} impl PadImpl for TestPad { fn linked(&self, pad: &Pad, peer: &Pad) { self.linked.store(true, atomic::Ordering::SeqCst); self.parent_linked(pad, peer) } fn unlinked(&self, pad: &Pad, peer: &Pad) { self.unlinked.store(true, atomic::Ordering::SeqCst); self.parent_unlinked(pad, peer) } } #[test] fn test_pad_subclass() { ::init().unwrap(); let pad = glib::Object::new( TestPad::get_type(), &[("name", &"test"), ("direction", &::PadDirection::Src)], ) .unwrap() .downcast::<::Pad>() .unwrap(); assert_eq!(pad.get_name(), "test"); let otherpad = ::Pad::new(Some("other-test"), ::PadDirection::Sink); pad.link(&otherpad).unwrap(); pad.unlink(&otherpad).unwrap(); let imp = TestPad::from_instance(&pad); assert!(imp.linked.load(atomic::Ordering::SeqCst)); assert!(imp.unlinked.load(atomic::Ordering::SeqCst)); } }
t_class = data.as_ref().get_parent_class() as *mut gst_sys::GstPadClass; (*parent_class) .unlinked .map(|f| f(pad.to_glib_none().0, peer.to_glib_none().0))
function_block-random_span
[ { "content": "pub trait ChildProxyImpl: ObjectImpl + Send + Sync {\n\n fn get_child_by_name(&self, object: &ChildProxy, name: &str) -> Option<glib::Object> {\n\n unsafe {\n\n let type_ = gst_sys::gst_child_proxy_get_type();\n\n let iface = gobject_sys::g_type_default_interface_re...
Rust
src/tests/conformance/invalid_message/reject.rs
ljedrz/ziggurat
718b5f090c0c2642dcebd0636de7fc7b3c8b844d
use std::{io, time::Duration}; use crate::{ protocol::{ message::Message, payload::{block::Block, reject::CCode, FilterAdd, FilterLoad, Inv, Version}, }, setup::node::{Action, Node}, tools::synthetic_node::{PingPongError, SyntheticNode}, }; #[tokio::test] async fn version_post_handshake() { let version = Message::Version(Version::new( "0.0.0.0:0".parse().unwrap(), "0.0.0.0:0".parse().unwrap(), )); run_test_case(version, CCode::Duplicate).await.unwrap(); } #[tokio::test] async fn verack_post_handshake() { run_test_case(Message::Verack, CCode::Duplicate) .await .unwrap(); } #[tokio::test] async fn mixed_inventory() { let genesis_block = Block::testnet_genesis(); let mixed_inv = vec![genesis_block.inv_hash(), genesis_block.txs[0].inv_hash()]; run_test_case(Message::Inv(Inv::new(mixed_inv)), CCode::Invalid) .await .unwrap(); } #[tokio::test] async fn multi_block_inventory() { let multi_block_inv = vec![ Block::testnet_genesis().inv_hash(), Block::testnet_1().inv_hash(), Block::testnet_2().inv_hash(), ]; run_test_case(Message::Inv(Inv::new(multi_block_inv)), CCode::Invalid) .await .unwrap(); } #[tokio::test] async fn bloom_filter_add() { run_test_case(Message::FilterAdd(FilterAdd::default()), CCode::Obsolete) .await .unwrap(); } #[tokio::test] async fn bloom_filter_load() { run_test_case(Message::FilterLoad(FilterLoad::default()), CCode::Obsolete) .await .unwrap(); } #[tokio::test] async fn bloom_filter_clear() { run_test_case(Message::FilterClear, CCode::Obsolete) .await .unwrap(); } async fn run_test_case(message: Message, expected_code: CCode) -> io::Result<()> { const RECV_TIMEOUT: Duration = Duration::from_secs(1); let mut node = Node::new()?; node.initial_action(Action::WaitForConnection) .start() .await?; let mut synthetic_node = SyntheticNode::builder() .with_full_handshake() .with_all_auto_reply() .build() .await?; synthetic_node.connect(node.addr()).await?; synthetic_node.send_direct_message(node.addr(), message)?; let result = match synthetic_node .ping_pong_timeout(node.addr(), RECV_TIMEOUT) .await { Ok(_) => Err(io::Error::new(io::ErrorKind::Other, "Message was ignored")), Err(PingPongError::Unexpected(msg)) => match *msg { Message::Reject(reject) if reject.ccode == expected_code => Ok(()), Message::Reject(reject) => { return Err(io::Error::new( io::ErrorKind::Other, format!( "Incorrect rejection ccode: {:?} instead of {:?}", reject.ccode, expected_code ), )) } unexpected => { return Err(io::Error::new( io::ErrorKind::Other, format!("Unexpected message received: {:?}", unexpected), )) } }, Err(err) => Err(err.into()), }; synthetic_node.shut_down(); node.stop()?; result }
use std::{io, time::Duration}; use crate::{ protocol::{ message::Message, payload::{block::Block, reject::CCode, FilterAdd, FilterLoad, Inv, Version}, }, setup::node::{Action, Node}, tools::synthetic_node::{PingPongError, SyntheticNode}, }; #[tokio::test] async fn version_post_handshake() { let version = Message::Version(Version::new( "0.0.0.0:0".parse().unwrap(), "0.0.0.0
T) .await { Ok(_) => Err(io::Error::new(io::ErrorKind::Other, "Message was ignored")), Err(PingPongError::Unexpected(msg)) => match *msg { Message::Reject(reject) if reject.ccode == expected_code => Ok(()), Message::Reject(reject) => { return Err(io::Error::new( io::ErrorKind::Other, format!( "Incorrect rejection ccode: {:?} instead of {:?}", reject.ccode, expected_code ), )) } unexpected => { return Err(io::Error::new( io::ErrorKind::Other, format!("Unexpected message received: {:?}", unexpected), )) } }, Err(err) => Err(err.into()), }; synthetic_node.shut_down(); node.stop()?; result }
:0".parse().unwrap(), )); run_test_case(version, CCode::Duplicate).await.unwrap(); } #[tokio::test] async fn verack_post_handshake() { run_test_case(Message::Verack, CCode::Duplicate) .await .unwrap(); } #[tokio::test] async fn mixed_inventory() { let genesis_block = Block::testnet_genesis(); let mixed_inv = vec![genesis_block.inv_hash(), genesis_block.txs[0].inv_hash()]; run_test_case(Message::Inv(Inv::new(mixed_inv)), CCode::Invalid) .await .unwrap(); } #[tokio::test] async fn multi_block_inventory() { let multi_block_inv = vec![ Block::testnet_genesis().inv_hash(), Block::testnet_1().inv_hash(), Block::testnet_2().inv_hash(), ]; run_test_case(Message::Inv(Inv::new(multi_block_inv)), CCode::Invalid) .await .unwrap(); } #[tokio::test] async fn bloom_filter_add() { run_test_case(Message::FilterAdd(FilterAdd::default()), CCode::Obsolete) .await .unwrap(); } #[tokio::test] async fn bloom_filter_load() { run_test_case(Message::FilterLoad(FilterLoad::default()), CCode::Obsolete) .await .unwrap(); } #[tokio::test] async fn bloom_filter_clear() { run_test_case(Message::FilterClear, CCode::Obsolete) .await .unwrap(); } async fn run_test_case(message: Message, expected_code: CCode) -> io::Result<()> { const RECV_TIMEOUT: Duration = Duration::from_secs(1); let mut node = Node::new()?; node.initial_action(Action::WaitForConnection) .start() .await?; let mut synthetic_node = SyntheticNode::builder() .with_full_handshake() .with_all_auto_reply() .build() .await?; synthetic_node.connect(node.addr()).await?; synthetic_node.send_direct_message(node.addr(), message)?; let result = match synthetic_node .ping_pong_timeout(node.addr(), RECV_TIMEOU
random
[ { "content": "/// Enables tracing for all [`SyntheticNode`] instances (usually scoped by test).\n\npub fn enable_tracing() {\n\n use tracing_subscriber::{fmt, EnvFilter};\n\n\n\n fmt()\n\n .with_test_writer()\n\n .with_env_filter(EnvFilter::from_default_env())\n\n .init();\n\n}\n\n\n\...
Rust
crates/sentry-actix/src/lib.rs
sharingcloud/github-scbot
953ba1ae7f3bb06c37084756458a1ddb53c8fa65
#![doc(html_favicon_url = "https://sentry-brand.storage.googleapis.com/favicon.ico")] #![doc(html_logo_url = "https://sentry-brand.storage.googleapis.com/sentry-glyph-black.png")] #![warn(missing_docs)] #![allow(deprecated)] #![allow(clippy::type_complexity)] use std::{borrow::Cow, pin::Pin, sync::Arc}; use actix_web::{ dev::{Service, ServiceRequest, ServiceResponse, Transform}, Error, }; use futures_util::{ future::{ok, Future, Ready}, FutureExt, }; use sentry_core::{ protocol::{ClientSdkPackage, Event, Request}, types::Uuid, Hub, SentryFutureExt, }; #[cfg(feature = "eyre")] mod eyre; #[cfg(feature = "eyre")] pub use eyre::WrapEyre; pub struct SentryBuilder { middleware: Sentry, } impl SentryBuilder { pub fn finish(self) -> Sentry { self.middleware } pub fn with_hub(mut self, hub: Arc<Hub>) -> Self { self.middleware.hub = Some(hub); self } pub fn with_default_hub(mut self) -> Self { self.middleware.hub = None; self } pub fn emit_header(mut self, val: bool) -> Self { self.middleware.emit_header = val; self } pub fn capture_server_errors(mut self, val: bool) -> Self { self.middleware.capture_server_errors = val; self } } #[derive(Clone)] pub struct Sentry { hub: Option<Arc<Hub>>, emit_header: bool, capture_server_errors: bool, } impl Sentry { pub fn new() -> Self { Sentry { hub: None, emit_header: false, capture_server_errors: true, } } pub fn builder() -> SentryBuilder { Sentry::new().into_builder() } pub fn into_builder(self) -> SentryBuilder { SentryBuilder { middleware: self } } } impl Default for Sentry { fn default() -> Self { Sentry::new() } } impl<S, B> Transform<S> for Sentry where S: Service<Request = ServiceRequest, Response = ServiceResponse<B>, Error = Error>, S::Future: 'static, { type Error = Error; type Future = Ready<Result<Self::Transform, Self::InitError>>; type InitError = (); type Request = ServiceRequest; type Response = ServiceResponse<B>; type Transform = SentryMiddleware<S>; fn new_transform(&self, service: S) -> Self::Future { ok(SentryMiddleware { service, inner: self.clone(), }) } } pub struct SentryMiddleware<S> { service: S, inner: Sentry, } impl<S, B> Service for SentryMiddleware<S> where S: Service<Request = ServiceRequest, Response = ServiceResponse<B>, Error = Error>, S::Future: 'static, { type Error = Error; type Future = Pin<Box<dyn Future<Output = Result<Self::Response, Self::Error>>>>; type Request = ServiceRequest; type Response = ServiceResponse<B>; fn poll_ready( &mut self, cx: &mut std::task::Context<'_>, ) -> std::task::Poll<Result<(), Self::Error>> { self.service.poll_ready(cx) } fn call(&mut self, req: ServiceRequest) -> Self::Future { let inner = self.inner.clone(); let hub = Arc::new(Hub::new_from_top( inner.hub.clone().unwrap_or_else(Hub::main), )); let client = hub.client(); let track_sessions = client.as_ref().map_or(false, |client| { let options = client.options(); options.auto_session_tracking && options.session_mode == sentry_core::SessionMode::Request }); if track_sessions { hub.start_session(); } let with_pii = client .as_ref() .map_or(false, |client| client.options().send_default_pii); let (tx, sentry_req) = sentry_request_from_http(&req, with_pii); hub.configure_scope(|scope| { scope.set_transaction(tx.as_deref()); scope.add_event_processor(Box::new(move |event| { let evt = process_event(event, &sentry_req); Some(evt) })); }); let fut = self.service.call(req).bind_hub(hub.clone()); async move { let mut res: Self::Response = match fut.await { Ok(res) => res, Err(e) => { if inner.capture_server_errors { process_error(hub, &e); } return Err(e); } }; if inner.capture_server_errors && res.response().status().is_server_error() { if let Some(e) = res.response().error() { let event_id = process_error(hub, e); if inner.emit_header { res.response_mut().headers_mut().insert( "x-sentry-event".parse().unwrap(), event_id.to_simple_ref().to_string().parse().unwrap(), ); } } } Ok(res) } .boxed_local() } } #[cfg(feature = "eyre")] fn process_eyre_report(hub: Arc<Hub>, e: &actix_web::Error) -> Option<Uuid> { use sentry_eyre::EyreHubExt; e.as_error::<WrapEyre>() .map(|report| hub.capture_eyre(report)) } #[cfg(not(feature = "eyre"))] fn process_eyre_report(_hub: Arc<Hub>, _e: &actix_web::Error) -> Option<Uuid> { None } fn process_error(hub: Arc<Hub>, e: &actix_web::Error) -> Uuid { process_eyre_report(hub.clone(), e).unwrap_or_else(|| hub.capture_error(e)) } fn sentry_request_from_http(request: &ServiceRequest, with_pii: bool) -> (Option<String>, Request) { let transaction = if let Some(name) = request.match_name() { Some(String::from(name)) } else { request.match_pattern() }; let mut sentry_req = Request { url: format!( "{}://{}{}", request.connection_info().scheme(), request.connection_info().host(), request.uri() ) .parse() .ok(), method: Some(request.method().to_string()), headers: request .headers() .iter() .map(|(k, v)| (k.to_string(), v.to_str().unwrap_or_default().to_string())) .collect(), ..Default::default() }; if with_pii { if let Some(remote) = request.connection_info().remote_addr() { sentry_req.env.insert("REMOTE_ADDR".into(), remote.into()); } }; (transaction, sentry_req) } fn process_event(mut event: Event<'static>, request: &Request) -> Event<'static> { if event.request.is_none() { event.request = Some(request.clone()); } if let Some(sdk) = event.sdk.take() { let mut sdk = sdk.into_owned(); sdk.packages.push(ClientSdkPackage { name: "sentry-actix".into(), version: env!("CARGO_PKG_VERSION").into(), }); event.sdk = Some(Cow::Owned(sdk)); } event }
#![doc(html_favicon_url = "https://sentry-brand.storage.googleapis.com/favicon.ico")] #![doc(html_logo_url = "https://sentry-brand.storage.googleapis.com/sentry-glyph-black.png")] #![warn(missing_docs)] #![allow(deprecated)] #![allow(clippy::type_complexity)] use std::{borrow::Cow, pin::Pin, sync::Arc}; use actix_web::{ dev::{Service, ServiceRequest, ServiceResponse, Transform}, Error, }; use futures_util::{ future::{ok, Future, Ready}, FutureExt, }; use sentry_core::{ protocol::{ClientSdkPackage, Event, Request}, types::Uuid, Hub, SentryFutureExt, }; #[cfg(feature = "eyre")] mod eyre; #[cfg(feature = "eyre")] pub use eyre::WrapEyre; pub struct SentryBuilder { middleware: Sentry, } impl SentryBuilder { pub fn finish(self) -> Sentry { self.middleware } pub fn with_hub(mut self, hub: Arc<Hub>) -> Self { self.middleware.hub = Some(hub); self } pub fn with_default_hub(mut self) -> Self { self.middleware.hub = None; self } pub fn emit_header(mut self, val: bool) -> Self { self.middleware.emit_header = val; self } pub fn capture_server_errors(mut self, val: bool) -> Self { self.middleware.capture_server_errors = val; self } } #[derive(Clone)] pub struct Sentry { hub: Option<Arc<Hub>>, emit_header: bool, capture_server_errors: bool, } impl Sentry { pub fn ne
pub fn builder() -> SentryBuilder { Sentry::new().into_builder() } pub fn into_builder(self) -> SentryBuilder { SentryBuilder { middleware: self } } } impl Default for Sentry { fn default() -> Self { Sentry::new() } } impl<S, B> Transform<S> for Sentry where S: Service<Request = ServiceRequest, Response = ServiceResponse<B>, Error = Error>, S::Future: 'static, { type Error = Error; type Future = Ready<Result<Self::Transform, Self::InitError>>; type InitError = (); type Request = ServiceRequest; type Response = ServiceResponse<B>; type Transform = SentryMiddleware<S>; fn new_transform(&self, service: S) -> Self::Future { ok(SentryMiddleware { service, inner: self.clone(), }) } } pub struct SentryMiddleware<S> { service: S, inner: Sentry, } impl<S, B> Service for SentryMiddleware<S> where S: Service<Request = ServiceRequest, Response = ServiceResponse<B>, Error = Error>, S::Future: 'static, { type Error = Error; type Future = Pin<Box<dyn Future<Output = Result<Self::Response, Self::Error>>>>; type Request = ServiceRequest; type Response = ServiceResponse<B>; fn poll_ready( &mut self, cx: &mut std::task::Context<'_>, ) -> std::task::Poll<Result<(), Self::Error>> { self.service.poll_ready(cx) } fn call(&mut self, req: ServiceRequest) -> Self::Future { let inner = self.inner.clone(); let hub = Arc::new(Hub::new_from_top( inner.hub.clone().unwrap_or_else(Hub::main), )); let client = hub.client(); let track_sessions = client.as_ref().map_or(false, |client| { let options = client.options(); options.auto_session_tracking && options.session_mode == sentry_core::SessionMode::Request }); if track_sessions { hub.start_session(); } let with_pii = client .as_ref() .map_or(false, |client| client.options().send_default_pii); let (tx, sentry_req) = sentry_request_from_http(&req, with_pii); hub.configure_scope(|scope| { scope.set_transaction(tx.as_deref()); scope.add_event_processor(Box::new(move |event| { let evt = process_event(event, &sentry_req); Some(evt) })); }); let fut = self.service.call(req).bind_hub(hub.clone()); async move { let mut res: Self::Response = match fut.await { Ok(res) => res, Err(e) => { if inner.capture_server_errors { process_error(hub, &e); } return Err(e); } }; if inner.capture_server_errors && res.response().status().is_server_error() { if let Some(e) = res.response().error() { let event_id = process_error(hub, e); if inner.emit_header { res.response_mut().headers_mut().insert( "x-sentry-event".parse().unwrap(), event_id.to_simple_ref().to_string().parse().unwrap(), ); } } } Ok(res) } .boxed_local() } } #[cfg(feature = "eyre")] fn process_eyre_report(hub: Arc<Hub>, e: &actix_web::Error) -> Option<Uuid> { use sentry_eyre::EyreHubExt; e.as_error::<WrapEyre>() .map(|report| hub.capture_eyre(report)) } #[cfg(not(feature = "eyre"))] fn process_eyre_report(_hub: Arc<Hub>, _e: &actix_web::Error) -> Option<Uuid> { None } fn process_error(hub: Arc<Hub>, e: &actix_web::Error) -> Uuid { process_eyre_report(hub.clone(), e).unwrap_or_else(|| hub.capture_error(e)) } fn sentry_request_from_http(request: &ServiceRequest, with_pii: bool) -> (Option<String>, Request) { let transaction = if let Some(name) = request.match_name() { Some(String::from(name)) } else { request.match_pattern() }; let mut sentry_req = Request { url: format!( "{}://{}{}", request.connection_info().scheme(), request.connection_info().host(), request.uri() ) .parse() .ok(), method: Some(request.method().to_string()), headers: request .headers() .iter() .map(|(k, v)| (k.to_string(), v.to_str().unwrap_or_default().to_string())) .collect(), ..Default::default() }; if with_pii { if let Some(remote) = request.connection_info().remote_addr() { sentry_req.env.insert("REMOTE_ADDR".into(), remote.into()); } }; (transaction, sentry_req) } fn process_event(mut event: Event<'static>, request: &Request) -> Event<'static> { if event.request.is_none() { event.request = Some(request.clone()); } if let Some(sdk) = event.sdk.take() { let mut sdk = sdk.into_owned(); sdk.packages.push(ClientSdkPackage { name: "sentry-actix".into(), version: env!("CARGO_PKG_VERSION").into(), }); event.sdk = Some(Cow::Owned(sdk)); } event }
w() -> Self { Sentry { hub: None, emit_header: false, capture_server_errors: true, } }
function_block-function_prefixed
[ { "content": "/// Captures an [`eyre::Report`].\n\n///\n\n/// This will capture an eyre report as a sentry event if a\n\n/// [`sentry::Client`](../../struct.Client.html) is initialised, otherwise it will be a\n\n/// no-op. The event is dispatched to the thread-local hub, with semantics as described in\n\n/// [...
Rust
src/address.rs
jkilpatr/clarity
e75e1419095f02cd52e7e3213b1e2226a312ed02
use serde::Serialize; use serde::Serializer; use std::str; use std::str::FromStr; use utils::bytes_to_hex_str; use utils::{hex_str_to_bytes, ByteDecodeError}; #[derive(PartialEq, Debug, Clone, Eq, PartialOrd, Hash, Deserialize)] pub struct Address { data: Vec<u8>, } impl Serialize for Address { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { if *self == Address::default() { serializer.serialize_bytes(&[]) } else { serializer.serialize_bytes(&self.data) } } } impl Address { pub fn new() -> Address { Address { data: Vec::new() } } pub fn as_bytes(&self) -> &[u8] { &self.data } } impl Default for Address { fn default() -> Address { Address { data: Vec::new() } } } impl From<[u8; 20]> for Address { fn from(val: [u8; 20]) -> Address { Address { data: val.to_vec() } } } impl<'a> From<&'a [u8]> for Address { fn from(val: &'a [u8]) -> Address { Address { data: val.to_vec() } } } #[derive(Fail, Debug, PartialEq)] pub enum AddressError { #[fail(display = "Address should be exactly 40 bytes")] InvalidLengthError, #[fail(display = "Unable to decode bytes: {}", _0)] DecodeError(ByteDecodeError), #[fail(display = "Checksum error")] ChecksumError, #[fail(display = "Invalid checksum")] InvalidChecksum, } impl From<ByteDecodeError> for AddressError { fn from(e: ByteDecodeError) -> AddressError { AddressError::DecodeError(e) } } impl FromStr for Address { type Err = AddressError; fn from_str(s: &str) -> Result<Self, Self::Err> { if s.len() == 0 { return Ok(Address::default()); } let s = if s.starts_with("0x") { &s[2..] } else { &s }; if s.len() == 40 || s.len() == 48 { Ok(Address { data: hex_str_to_bytes(&s)?, }) } else { Err(AddressError::InvalidLengthError) } } } impl ToString for Address { fn to_string(&self) -> String { bytes_to_hex_str(&self.data) } } #[test] #[should_panic] fn decode_invalid_length() { "123".parse::<Address>().unwrap(); } #[test] #[should_panic] fn decode_invalid_character() { "\u{012345}123456789012345678901234567890123456" .parse::<Address>() .unwrap(); } #[test] fn decode() { let address: Address = "1234567890123456789012345678901234567890" .parse::<Address>() .unwrap(); assert_eq!( address, Address::from([ 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90 ]) ); } #[test] fn serialize_null_address() { use serde_rlp::ser::to_bytes; let address = Address::new(); assert_eq!(to_bytes(&address).unwrap(), [128]); } #[test] fn serialize_padded_address() { use serde_rlp::ser::to_bytes; let address: Address = "00000000000000000000000000000000000000c0".parse().unwrap(); assert_eq!( to_bytes(&address).unwrap(), [148, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xc0] ); } #[test] #[should_panic] fn address_less_than_20_filler() { let address: Address = "0b9331677e6ebf".parse().unwrap(); } #[test] fn handle_prefixed() { let address: Address = "0x000000000000000000000000000b9331677e6ebf" .parse() .unwrap(); assert_eq!( address, Address::from([ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x0b, 0x93, 0x31, 0x67, 0x7e, 0x6e, 0xbf ]) ); } #[test] fn hashed() { use std::collections::HashMap; let a = Address::from_str("0x000000000000000000000000000b9331677e6ebf").unwrap(); let b = Address::from_str("0x00000000000000000000000000000000deadbeef").unwrap(); let mut map = HashMap::new(); map.insert(a.clone(), "Foo"); map.insert(b.clone(), "Bar"); assert_eq!(map.get(&a).unwrap(), &"Foo"); assert_eq!(map.get(&b).unwrap(), &"Bar"); } #[test] fn ordered() { let a = Address::from_str("0x000000000000000000000000000000000000000a").unwrap(); let b = Address::from_str("0x000000000000000000000000000000000000000b").unwrap(); let c = Address::from_str("0x000000000000000000000000000000000000000c").unwrap(); assert!(c > b); assert!(b > a); assert!(b < c); assert!(a < c); assert_ne!(a, b); assert_ne!(b, c); assert_ne!(a, c); }
use serde::Serialize; use serde::Serializer; use std::str; use std::str::FromStr; use utils::bytes_to_hex_str; use utils::{hex_str_to_bytes, ByteDecodeError}; #[derive(PartialEq, Debug, Clone, Eq, PartialOrd, Hash, Deserialize)] pub struct Address { data: Vec<u8>, } impl Serialize for Address { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { if *self == Address::default() { serializer.serialize_bytes(&[]) } else { serializer.serialize_bytes(&self.data) } } } impl Address { pub fn new() -> Address { Address { data: Vec::new() } } pub fn as_bytes(&self) -> &[u8] { &self.data } } impl Default for Address { fn default() -> Address { Address { data: Vec::new() } } } impl From<[u8; 20]> for Address { fn from(val: [u8; 20]) -> Address { Address { data: val.to_vec() } } } impl<'a> From<&'a [u8]> for Address { fn from(val: &'a [u8]) -> Address { Address { data: val.to_vec() } } } #[derive(Fail, Debug, PartialEq)] pub enum AddressError { #[fail(display = "Address should be exactly 40 bytes")] InvalidLengthError, #[fail(display = "Unable to decode bytes: {}", _0)] DecodeError(ByteDecodeError), #[fail(display = "Checksum error")] ChecksumError, #[fail(display = "Invalid checksum")] InvalidChecksum, } impl From<ByteDecodeError> for AddressError { fn from(e: ByteDecodeError) -> AddressError { AddressError::DecodeError(e) } } impl FromStr for Address { type Err = AddressError; fn from_str(s: &str) -> Result<Self, Self::Err> { if s.len() == 0 { return Ok(Address::default()); } let s = if s.starts_with("0x") { &s[2..] } else { &s }; if s.len() == 40 || s.len() == 48 { Ok(Address { data: hex_str_to_bytes(&s)?, }) } else { Err(AddressError::InvalidLengthError) } } } impl ToString for Address { fn to_string(&self) -> String { bytes_to_hex_str(&self.data) } } #[test] #[should_panic] fn decode_invalid_length() { "123".parse::<Address>().unwrap(); } #[test] #[should_panic] fn decode_invalid_character() { "\u{012345}123456789012345678901234567890123456" .parse::<Address>() .unwrap(); } #[test] fn decode() { let address: Address = "1234567890123456789012345678901234567890" .parse::<Address>() .unwrap(); assert_eq!( address, Address::from([ 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90 ]) ); } #[test] fn serialize_null_address() { use serde_rlp::ser::to_bytes; let address = Address::new(); assert_eq!(to_bytes(&address).unwrap(), [128]); } #[test] fn serialize_padded_address() { use serde_rlp::ser::to_bytes; let address: Address = "00000000000000000000000000000000000000c0".parse().unwrap(); assert_eq!( to_bytes(&address).unwrap(), [148, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xc0] ); } #[test] #[should_panic] fn address_less_than_20_filler() { let address: Address = "0b9331677e6ebf".parse().unwrap(); } #[test] fn handle_prefixed() { let address: Address = "0x000000000000000000000000000b9331677e6ebf" .parse() .unwrap(); assert_eq!( address, Address::from([ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x0b, 0x93, 0x31, 0x67, 0x7e, 0x6e, 0xbf ]) ); } #[test] fn hashed() { use std::collections::HashMap; let a = Address::from_str("0x000000000000000000000000000b9331677e6ebf").unw
e(), "Bar"); assert_eq!(map.get(&a).unwrap(), &"Foo"); assert_eq!(map.get(&b).unwrap(), &"Bar"); } #[test] fn ordered() { let a = Address::from_str("0x000000000000000000000000000000000000000a").unwrap(); let b = Address::from_str("0x000000000000000000000000000000000000000b").unwrap(); let c = Address::from_str("0x000000000000000000000000000000000000000c").unwrap(); assert!(c > b); assert!(b > a); assert!(b < c); assert!(a < c); assert_ne!(a, b); assert_ne!(b, c); assert_ne!(a, c); }
rap(); let b = Address::from_str("0x00000000000000000000000000000000deadbeef").unwrap(); let mut map = HashMap::new(); map.insert(a.clone(), "Foo"); map.insert(b.clon
function_block-random_span
[ { "content": "/// A function that takes a hexadecimal representation of bytes\n\n/// back into a stream of bytes.\n\npub fn hex_str_to_bytes(s: &str) -> Result<Vec<u8>, ByteDecodeError> {\n\n let s = if s.starts_with(\"0x\") { &s[2..] } else { s };\n\n s.as_bytes()\n\n .chunks(2)\n\n .map(|c...
Rust
bencher/src/main.rs
dimforge/dimforge-bench
b662de5580095e3a15ffc65a4abc5b40bc8db622
#[macro_use] extern crate log; use amiquip::{ Connection, ConsumerMessage, ConsumerOptions, Exchange, Publish, QueueDeclareOptions, }; use bson::DateTime; use clap::{App, Arg, SubCommand}; use dimforge_bench_common::{ BenchCSVEntry, BenchConfig, BenchContext, BenchKey, BenchMessage, BenchPlatform, }; use log::LevelFilter; use mongodb::sync::Database; use simple_logger::SimpleLogger; use std::fs::File; use std::io::Write; use std::path::Path; use std::process::Command; use std::str::FromStr; fn main() -> mongodb::error::Result<()> { SimpleLogger::new() .with_level(LevelFilter::Info) .init() .unwrap(); let matches = App::new("Dimforge benchmark tool") .arg( Arg::with_name("config") .short("f") .required(false) .takes_value(true) .help("path to the JSON configuration file"), ) .subcommand(SubCommand::with_name("configure").about("Configure credentials")) .subcommand( SubCommand::with_name("send") .about("Send a message to start a benchmark") .arg( Arg::with_name("repository") .short("r") .required(true) .takes_value(true) .help("the repository to clone"), ) .arg( Arg::with_name("branch") .short("b") .required(true) .takes_value(true) .help("the branch of the commit to compile"), ) .arg( Arg::with_name("commit") .short("c") .required(true) .takes_value(true) .help("the commit to compile"), ), ) .subcommand(SubCommand::with_name("listen").about("Listen to incoming benchmark messages")) .get_matches(); let config = matches.value_of("config"); let config = BenchConfig::from_json_file(config); if let Some(matches) = matches.subcommand_matches("send") { let repository = matches.value_of("repository").unwrap().to_string(); let branch = matches.value_of("branch").unwrap().to_string(); let commit = matches.value_of("commit").unwrap().to_string(); let message = BenchMessage { repository, branch, commit, }; send_bench_message(&config, &message); info!("Bench message sent."); } if let Some(_) = matches.subcommand_matches("listen") { listen_bench_messages(&config)?; } if let Some(_) = matches.subcommand_matches("configure") { configure(); } Ok(()) } fn configure() { println!("MongoDB bencher uri: "); let mongodb_bencher_uri = text_io::read!("{}\n"); println!("MongoDB server uri: "); let mongodb_server_uri = text_io::read!("{}\n"); println!("MongoDB database: "); let mongodb_db = text_io::read!("{}\n"); println!("Rabbitmq uri: "); let rabbitmq_uri = text_io::read!("{}\n"); println!("Save configuration to folder [$HOME/.dimforge]: "); let mut output_dir: String = text_io::read!("{}\n"); if output_dir.is_empty() { let home = std::env::var("HOME").unwrap_or(String::new()); output_dir = format!("{}/.dimforge", home); } let config = BenchConfig { mongodb_db, mongodb_bencher_uri, mongodb_server_uri, rabbitmq_uri, }; let config_json = serde_json::to_string(&config).unwrap(); std::fs::create_dir_all(&output_dir).unwrap(); let output_file = format!("{}/benchbot.json", output_dir); let mut out = File::create(&output_file).expect( "Could not open target configuration file. Did you run the `configure` subcommand yet?", ); out.write_all(config_json.as_bytes()).unwrap(); info!("Configuration successfully saved to '{}'.", output_file); } fn send_bench_message(config: &BenchConfig, message: &BenchMessage) { let mut connection = Connection::open(&config.rabbitmq_uri).unwrap(); let channel = connection.open_channel(None).unwrap(); let exchange = Exchange::direct(&channel); let message = serde_json::to_string(message).unwrap(); exchange .publish(Publish::new(message.as_bytes(), "benchmark")) .unwrap(); let _ = connection.close(); } fn listen_bench_messages(config: &BenchConfig) -> mongodb::error::Result<()> { let mut connection = Connection::open(&config.rabbitmq_uri).unwrap(); let channel = connection.open_channel(None).unwrap(); let queue = channel .queue_declare("benchmark", QueueDeclareOptions::default()) .unwrap(); let consumer = queue.consume(ConsumerOptions::default()).unwrap(); for message in consumer.receiver().iter() { match message { ConsumerMessage::Delivery(delivery) => { let body = String::from_utf8_lossy(&delivery.body); let message = serde_json::from_str::<BenchMessage>(&body); if delivery.redelivered { warn!("Dropping redelivered message: {:?}", message); let _ = delivery.ack(&channel); continue; } let message = message.unwrap(); info!("Received bench message: {:?}", message); let tempdir = tempfile::tempdir().unwrap(); let target_dir = tempdir.path(); let bench_subdir = "benchmarks3d"; let bench_names = clone_and_build_benches( target_dir, bench_subdir, &message.repository, &message.commit, ); info!("About to run benchmarks: {:?}", bench_names); let version = rustc_version::version() .map(|v| format!("{}", v)) .unwrap_or("unknown".to_string()); let platform = BenchPlatform { compiler: version.clone(), }; let key = BenchKey { commit: message.commit, branch: message.branch, date: DateTime(chrono::Utc::now()), }; for bench_name in bench_names { let context = BenchContext { name: bench_name, backend: String::new(), }; run_bench(config, target_dir, bench_subdir, &key, &context, &platform)?; } delivery.ack(&channel).unwrap(); } other => { error!("consumer ended: {:?}", other); break; } } } let _ = connection.close(); Ok(()) } fn clone_and_build_benches( repo_dir: &Path, bench_subdir: &str, repo_url: &str, commit: &str, ) -> Vec<String> { info!("Cloning {} in {:?}", repo_url, repo_dir); Command::new("git") .arg("clone") .arg(repo_url) .arg(repo_dir) .status() .unwrap(); Command::new("git") .arg("checkout") .arg(commit) .current_dir(repo_dir) .status() .unwrap(); let build_path = format!("{}/{}", repo_dir.to_string_lossy(), bench_subdir); info!("Building {}", build_path); let status = Command::new("cargo") .arg("build") .arg("--release") .args(&["--features", "simd-nightly"]) .args(&["--features", "other-backends"]) .current_dir(&build_path) .status() .unwrap(); info!("Build ended with status: {}", status); let exec_path = format!("{}/target/release", repo_dir.to_string_lossy()); let output = Command::new("./all_benchmarks3") .arg("--list") .current_dir(exec_path) .output() .unwrap(); String::from_utf8_lossy(&output.stdout) .split_whitespace() .map(|s| s.to_string()) .collect() } fn run_bench( config: &BenchConfig, bench_dir: &Path, bench_subdir: &str, key: &BenchKey, context: &BenchContext, platform: &BenchPlatform, ) -> mongodb::error::Result<()> { let build_path = format!("{}/{}", bench_dir.to_string_lossy(), bench_subdir); let status = Command::new("cargo") .arg("run") .arg("--release") .args(&["--features", "simd-nightly"]) .args(&["--features", "other-backends"]) .args(&["--", "--bench", "--example", &context.name]) .current_dir(build_path) .status() .unwrap(); info!("Exit status for '{}' benchmark: {}", context.name, status); let entries = parse_results(bench_dir, bench_subdir, key, context, platform); upload_results(&config, &entries) } fn parse_results( repo_dir: &Path, bench_subdir: &str, key: &BenchKey, context: &BenchContext, platform: &BenchPlatform, ) -> Vec<BenchCSVEntry> { let bench_result_path = format!( "{}/{}/{}.csv", repo_dir.to_string_lossy(), bench_subdir, context.name ); info!("Parting bench file: {}", bench_result_path); let csv = parse_csv(bench_result_path).unwrap(); let mut entries = Vec::new(); for (backend, timings) in csv.0.into_iter().zip(csv.1.into_iter()) { let mut context = context.clone(); context.backend = backend; let entry = BenchCSVEntry { key: key.clone(), context, platform: platform.clone(), timings, }; entries.push(entry); } entries } fn upload_results(config: &BenchConfig, entries: &[BenchCSVEntry]) -> mongodb::error::Result<()> { let db = connect_to_mongodb(&config.mongodb_bencher_uri, &config.mongodb_db)?; let coll = db.collection("rapier3d"); for entry in entries { let doc = bson::to_document(entry).unwrap(); coll.insert_one(doc, None)?; } Ok(()) } fn parse_csv(path: impl AsRef<Path>) -> csv::Result<(Vec<String>, Vec<Vec<f32>>)> { let mut reader = csv::ReaderBuilder::new() .has_headers(true) .from_path(path) .unwrap(); let headers: Vec<_> = reader.headers()?.iter().map(|h| h.to_string()).collect(); let mut values = vec![Vec::new(); headers.len()]; for record in reader.records() { for (i, value) in record?.iter().enumerate() { let val = f32::from_str(value).unwrap(); values[i].push(val); } } Ok((headers, values)) } fn connect_to_mongodb(uri: &str, db: &str) -> mongodb::error::Result<Database> { use mongodb::sync::Client; let client = Client::with_uri_str(uri)?; Ok(client.database(db)) }
#[macro_use] extern crate log; use amiquip::{ Connection, ConsumerMessage, ConsumerOptions, Exchange, Publish, QueueDeclareOptions, }; use bson::DateTime; use clap::{App, Arg, SubCommand}; use dimforge_bench_common::{ BenchCSVEntry, BenchConfig, BenchContext, BenchKey, BenchMessage, BenchPlatform, }; use log::LevelFilter; use mongodb::sync::Database; use simple_logger::SimpleLogger; use std::fs::File; use std::io::Write; use std::path::Path; use std::process::Command; use std::str::FromStr; fn main() -> mongodb::error::Result<()> { SimpleLogger::new() .with_level(LevelFilter::Info) .init() .unwrap(); let matches = App::new("Dimforge benchmark tool") .arg( Arg::with_name("config") .short("f") .required(false) .takes_value(true) .help("path to the JSON configuration file"), ) .subcommand(SubCommand::with_name("configure").about("Configure credentials")) .subcommand( SubCommand::with_name("send") .about("Send a message to start a benchmark") .arg( Arg::with_name("repository") .short("r") .required(true) .takes_value(true) .help("the repository to clone"), ) .arg( Arg::with_name("branch") .short("b") .required(true) .takes_value(true) .help("the branch of the commit to compile"), ) .arg( Arg::with_name("commit") .short("c") .required(true) .takes_value(true) .help("the commit to compile"), ), ) .subcommand(SubCommand::with_name("listen").about("Listen to incoming benchmark messages")) .get_matches(); let config = matches.value_of("config"); let config = BenchConfig::from_json_file(config); if let Some(matches) = matches.subcommand_matches("send") { let repository = matches.value_of("repository").unwrap().to_string(); let branch = matches.value_of("branch").unwrap().to_string(); let commit = matches.value_of("commit").unwrap().to_string(); let message = BenchMessage { repository, branch, commit, }; send_bench_message(&config, &message); info!("Bench message sent."); } if let Some(_) = matches.subcommand_matches("listen") { listen_bench_messages(&config)?; } if let Some(_) = matches.subcommand_matches("configure") { configure(); } Ok(()) } fn configure() { println!("MongoDB bencher uri: "); let mongodb_bencher_uri = text_io::read!("{}\n"); println!("MongoDB server uri: "); let mongodb_server_uri = text_io::read!("{}\n"); println!("MongoDB database: "); let mongodb_db = text_io::read!("{}\n"); println!("Rabbitmq uri: "); let rabbitmq_uri = text_io::read!("{}\n"); println!("Save configuration to folder [$HOME/.dimforge]: "); let mut output_dir: String = text_io::read!("{}\n"); if output_dir.is_empty() { let home = std::env::var("HOME").unwrap_or(String::new()); output_dir = format!("{}/.dimforge", home); } let config = BenchConfig { mongodb_db, mongodb_bencher_uri, mongodb_server_uri, rabbitmq_uri, }; let config_json = serde_json::to_string(&config).unwrap(); std::fs::create_dir_all(&output_dir).unwrap(); let output_file = format!("{}/benchbot.json", output_dir); let mut out = File::create(&output_file).expect( "Could not open target configuration file. Did you run the `configure` subcommand yet?", ); out.write_all(config_json.as_bytes()).unwrap(); info!("Configuration successfully saved to '{}'.", output_file); } fn send_bench_message(config: &BenchConfig, message: &BenchMessage) { let mut connection = Connection::open(&config.rabbitmq_uri).unwrap(); let channel = connection.open_channel(None).unwrap(); let exchange = Exchange::direct(&channel); let message = serde_json::to_string(message).unwrap(); exchange .publish(Publish::new(message.as_bytes(), "benchmark")) .unwrap(); let _ = connection.close(); } fn listen_bench_messages(config: &BenchConfig) -> mongodb::error::Result<()> { let mut connection = Connection::open(&config.rabbitmq_uri).unwrap(); let channel = connection.open_channel(None).unwrap(); let queue = channel .queue_declare("benchmark", QueueDeclareOptions::default()) .unwrap(); let consumer = queue.consume(ConsumerOptions::default()).unwrap(); for message in consumer.receiver().iter() { match message { ConsumerMessage::Delivery(delivery) => { let body = String::from_utf8_lossy(&delivery.body); let message = serde_json::from_str::<BenchMessage>(&body); if delivery.redelivered { warn!("Dropping redelivered message: {:?}", message); let _ = delivery.ack(&channel); continue; } let message = message.unwrap(); info!("Received bench message: {:?}", message); let tempdir = tempfile::tempdir().unwrap(); let target_dir = tempdir.path(); let bench_subdir = "benchmarks3d"; let bench_names = clone_and_build_benches( target_dir, bench_subdir, &message.repository, &message.commit, ); info!("About to run benchmarks: {:?}", bench_names); let version = rustc_version::version() .map(|v| format!("{}", v)) .unwrap_or("unknown".to_string()); let platform = BenchPlatform { compiler: version.clone(), }; let key = BenchKey { commit: message.commit, branch: message.branch, date: DateTime(chrono::Utc::now()), }; for bench_name in bench_names { let context = BenchContext { name: bench_name, backend: String::new(), }; run_bench(config, target_dir, bench_subdir, &key, &context, &platform)?; } delivery.ack(&channel).unwrap(); } other => { error!("consumer ended: {:?}", other); break; } } } let _ = connection.close(); Ok(()) } fn clone_and_build_benches( repo_dir: &Path, bench_subdir: &str, repo_url: &str, commit: &str, ) -> Vec<String> { info!("Cloning {} in {:?}", repo_url, repo_dir); Command::new("git") .arg("clone") .arg(repo_url) .arg(repo_dir) .status() .unwrap(); Command::new("git") .arg("checkout") .arg(commit) .current_dir(repo_dir) .status() .unwrap(); let build_path = format!("{}/{}", repo_dir.to_string_lossy(), bench_subdir); info!("Building {}", build_path); let status = Command::new("cargo") .arg("build") .arg("--release") .args(&["--features", "simd-nightly"]) .args(&["--features", "other-backends"]) .current_dir(&build_path) .status() .unwrap(); info!("Build ended with status: {}", status); let exec_path = format!("{}/target/release", repo_dir.to_string_lossy()); let output = Command::new("./all_benchmarks3") .arg("--list") .current_dir(exec_path) .output() .unwrap(); String::from_utf8_lossy(&output.stdout) .split_whitespace() .map(|s| s.to_string()) .collect() } fn run_bench( config: &BenchConfig, bench_dir: &Path, bench_subdir: &str, key: &BenchKey, context: &BenchContext, platform: &BenchPlatform, ) -> mongodb::error::Result<()> { let build_path = format!("{}/{}", bench_dir.to_string_lossy(), bench_subdir); let status = Command::new("cargo") .arg("run") .arg("--release") .args(&["--features", "simd-nightly"]) .args(&["--features", "other-backends"]) .args(&["--", "--bench", "--example", &context.name]) .current_dir(build_path) .status() .unwrap(); info!("Exit status for '{}' benchmark: {}", context.name, status); let entries = parse_results(bench_dir, bench_subdir, key, context, platform); upload_results(&config, &entries) } fn parse_results( repo_dir: &Path, bench_subdir: &str, key: &BenchKey, context: &BenchContext, platform: &BenchPlatform, ) -> Vec<BenchCSVEntry> { let bench_result_path = format!( "{}/{}/{}.csv", repo_dir.to_string_lossy(), bench_subdir, context.name ); info!("Parting bench file: {}", bench_result_path); let csv = parse_csv(bench_result_path).unwrap(); let mut entries = Vec::new(); for (backend, timings) in csv.0.into_iter().zip(csv.1.into_iter()) { let mut context = context.clone(); context.backend = backend; let entry = BenchCSVEntry { key: key.clone(), context, platform: platform.clone(), timings, }; entries.push(entry); } entries } fn upload_results(config: &BenchConfig, entries: &[BenchCSVEntry]) -> mongodb::error::Result<()> { let db = connect_to_mongodb(&config.mongodb_bencher_u
fn parse_csv(path: impl AsRef<Path>) -> csv::Result<(Vec<String>, Vec<Vec<f32>>)> { let mut reader = csv::ReaderBuilder::new() .has_headers(true) .from_path(path) .unwrap(); let headers: Vec<_> = reader.headers()?.iter().map(|h| h.to_string()).collect(); let mut values = vec![Vec::new(); headers.len()]; for record in reader.records() { for (i, value) in record?.iter().enumerate() { let val = f32::from_str(value).unwrap(); values[i].push(val); } } Ok((headers, values)) } fn connect_to_mongodb(uri: &str, db: &str) -> mongodb::error::Result<Database> { use mongodb::sync::Client; let client = Client::with_uri_str(uri)?; Ok(client.database(db)) }
ri, &config.mongodb_db)?; let coll = db.collection("rapier3d"); for entry in entries { let doc = bson::to_document(entry).unwrap(); coll.insert_one(doc, None)?; } Ok(()) }
function_block-function_prefixed
[ { "content": "fn connect_to_mongodb(uri: &str, db: &str) -> mongodb::error::Result<Database> {\n\n use mongodb::sync::Client;\n\n let client = Client::with_uri_str(&uri)?;\n\n Ok(client.database(db))\n\n}\n", "file_path": "server/src/main.rs", "rank": 0, "score": 139675.0472944086 }, { ...
Rust
server/src/main.rs
smokku/soldank
9aa7d307121faf7d482bf102c76db34411910a5e
#[macro_use] extern crate clap; use color_eyre::eyre::Result; use hecs::World; use smol::future; use std::{ collections::VecDeque, net::SocketAddr, sync::{Arc, RwLock}, time::{Duration, Instant}, }; use crate::{ constants::*, cvars::{set_cli_cvars, Config, NetConfig}, networking::Networking, }; use soldank_shared::{messages::NetworkMessage, networking::GameWorld}; mod cheat; mod cli; mod constants; mod cvars; mod networking; mod state; mod systems; #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum GameState { Lobby, InGame, } fn main() -> Result<()> { color_eyre::install()?; env_logger::init(); smol::block_on(async { let cmd = cli::parse_cli_args(); let mut map_name = cmd.value_of("map").unwrap_or(DEFAULT_MAP).to_owned(); map_name.push_str(".pms"); log::info!("Using map: {}", map_name); let mut config = Config { net: NetConfig { orb: Arc::new(RwLock::new(orb::Config { timestep_seconds: TIMESTEP_RATE, ..Default::default() })), ..Default::default() }, ..Default::default() }; set_cli_cvars(&mut config, &cmd); let mut networking = Networking::new(cmd.value_of("bind")).await; if let Some(key) = cmd.value_of("key") { networking.connection_key = key.to_string(); } let mut messages: VecDeque<(SocketAddr, NetworkMessage)> = VecDeque::new(); let mut world = World::new(); let mut game_state = GameState::Lobby; let mut server = orb::server::Server::<GameWorld>::new(config.net.orb.clone(), 0.0); let startup_time = Instant::now(); let mut previous_time = Instant::now(); let mut running = true; while running { let timeout = Duration::from_millis( (config.net.orb.read().unwrap().snapshot_send_period * 1000.) as _, ); future::race( networking.process(&mut world, &mut config, &mut messages), async { smol::Timer::after(timeout).await; }, ) .await; let current_time = Instant::now(); let delta_seconds = current_time.duration_since(previous_time).as_secs_f64(); let seconds_since_startup = current_time.duration_since(startup_time).as_secs_f64(); systems::process_network_messages( &mut world, &mut messages, &mut networking.connections, ); systems::message_dump(&mut messages); match game_state { GameState::Lobby => { systems::lobby(&mut world, &mut game_state, &networking); } GameState::InGame => { server.update(delta_seconds, seconds_since_startup); let server_display_state = server.display_state(); log::trace!( "server_display_state: {}", server_display_state.inner().len() ); networking.process_simulation(&mut server); if networking.connections.is_empty() { log::info!("No connections left - exiting"); running = false; } } } previous_time = current_time; networking.post_process(&config); } log::info!("Exiting server"); Ok(()) }) }
#[macro_use] extern crate clap; use color_eyre::eyre::Result; use hecs::World; use smol::future; use std::{ collections::VecDeque, net::SocketAddr, sync::{Arc, RwLock}, time::{Duration, Instant}, }; use crate::{ constants::*, cvars::{set_cli_cvars, Config, NetConfig}, networking::Networking, }; use soldank_shared::{messages::NetworkMessage, networking::GameWorld}; mod cheat; mod cli; mod constants; mod cvars; mod networking; mod state; mod systems; #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum GameState { Lobby, InGame, } fn main() -> Result<()> { color_eyre::install()?; env_logger::init(); smol::block_on(async { let cmd = cli::parse_cli_args(); let mut map_name = cmd.value_of("map").unwrap_or(DEFAULT_MAP).to_owned(); map_name.push_str(".pms"); log::info!("Using map: {}", map_name); let mut config = Config { net: NetConfig { orb: Arc::new(RwLock::new(orb::Config { timestep_seconds: TIMESTEP_RATE, ..Default::default() })), ..Default::default() }, ..Default::default() }; set_cli_cvars(&mut config, &cmd); let mut networking = Networking::new(cmd.value_of("bind")).await; if let Some(key) = cmd.value_of("key") { networking.connection_key = key.to_string(); } let mut messages: VecDeque<(SocketAddr, NetworkMessage)> = VecDeque::new(); let mut world = World::new(); let mut game_state = GameState::Lobby; let mut server = orb::server::Server::<GameWorld>::new(config.net.orb.clone(), 0.0); let startup_time = Instant::now(); let mut previous_time = Instant::now(); let mut running = true; while running { let timeout = Duration::from_millis( (config.net.orb.read().unwrap().snapshot_send_period * 1000.) as _, ); future::race( networking.process(&mut world, &mut config, &mut messages), async { smol::Timer::after(timeout).await; }, ) .await; let current_time = Instant::now(); let delta_seconds = current_time.duration_since(previous_time).as_secs_f64(); let seconds_since_startup = current_time.duration_since(startup_time).as_secs_f64(); systems::process_network_messages( &mut world, &mut messages, &mut networking.connections, ); systems::message_dump(&mut messages); match game_state { GameState::Lobby => { systems::lobby(&mut world, &mut game_state, &networking); } GameState::InGame => { server.update(delta_seconds, seconds_since_startup); let server_display_state = server.display_state(); log::trace!( "server_display_state: {}", server_display_state.inner().len() ); networking.process_simula
networking.post_process(&config); } log::info!("Exiting server"); Ok(()) }) }
tion(&mut server); if networking.connections.is_empty() { log::info!("No connections left - exiting"); running = false; } } } previous_time = current_time;
function_block-random_span
[ { "content": "pub fn lobby(world: &mut World, game_state: &mut GameState, networking: &Networking) {\n\n if *game_state != GameState::Lobby {\n\n log::error!(\"Running lobby system outside Lobby GameState\");\n\n }\n\n\n\n let ready = !networking.connections.is_empty()\n\n && networking\n...
Rust
lib/src/settings.rs
orhun/pueue
8b033231e8b95a987d284c621b45ed20203700d3
use std::collections::HashMap; use std::fs::{create_dir_all, File}; use std::io::{prelude::*, BufReader}; use std::path::{Path, PathBuf}; use log::info; use serde_derive::{Deserialize, Serialize}; use shellexpand::tilde; use crate::error::Error; use crate::platform::directories::*; use crate::setting_defaults::*; #[derive(PartialEq, Clone, Debug, Default, Deserialize, Serialize)] pub struct Shared { pub pueue_directory: Option<PathBuf>, pub runtime_directory: Option<PathBuf>, #[cfg(not(target_os = "windows"))] #[serde(default = "default_true")] pub use_unix_socket: bool, pub pid_path: Option<PathBuf>, #[cfg(not(target_os = "windows"))] pub unix_socket_path: Option<PathBuf>, #[serde(default = "default_host")] pub host: String, #[serde(default = "default_port")] pub port: String, pub daemon_cert: Option<PathBuf>, pub daemon_key: Option<PathBuf>, pub shared_secret_path: Option<PathBuf>, } #[derive(PartialEq, Clone, Debug, Default, Deserialize, Serialize)] pub struct Client { #[serde(default = "Default::default")] pub restart_in_place: bool, #[serde(default = "default_true")] pub read_local_logs: bool, #[serde(default = "Default::default")] pub show_confirmation_questions: bool, #[serde(default = "Default::default")] pub show_expanded_aliases: bool, #[serde(default = "Default::default")] pub dark_mode: bool, pub max_status_lines: Option<usize>, #[serde(default = "default_status_time_format")] pub status_time_format: String, #[serde(default = "default_status_datetime_format")] pub status_datetime_format: String, } #[derive(PartialEq, Clone, Debug, Default, Deserialize, Serialize)] pub struct Daemon { #[serde(default = "Default::default")] pub pause_group_on_failure: bool, #[serde(default = "Default::default")] pub pause_all_on_failure: bool, pub callback: Option<String>, #[serde(default = "default_callback_log_lines")] pub callback_log_lines: usize, #[serde(skip_serializing)] #[deprecated( since = "1.1.0", note = "The configuration for groups is now stored in the state." )] pub groups: Option<HashMap<String, i64>>, } impl Default for Settings { fn default() -> Self { Settings { client: Client { read_local_logs: true, status_time_format: default_status_time_format(), status_datetime_format: default_status_datetime_format(), ..Default::default() }, daemon: Daemon { callback_log_lines: default_callback_log_lines(), ..Default::default() }, shared: Shared { #[cfg(not(target_os = "windows"))] use_unix_socket: true, host: default_host(), port: default_port(), ..Default::default() }, profiles: HashMap::new(), } } } #[derive(PartialEq, Clone, Debug, Deserialize, Serialize)] pub struct Settings { #[serde(default = "Default::default")] pub client: Client, #[serde(default = "Default::default")] pub daemon: Daemon, pub shared: Shared, #[serde(default = "HashMap::new")] pub profiles: HashMap<String, NestedSettings>, } #[derive(PartialEq, Clone, Debug, Deserialize, Serialize)] pub struct NestedSettings { #[serde(default = "Default::default")] pub client: Client, #[serde(default = "Default::default")] pub daemon: Daemon, #[serde(default = "Default::default")] pub shared: Shared, } pub fn expand_home(old_path: &Path) -> PathBuf { PathBuf::from(tilde(&old_path.to_string_lossy()).into_owned()) } impl Shared { pub fn pueue_directory(&self) -> PathBuf { if let Some(path) = &self.pueue_directory { expand_home(path) } else { default_pueue_path() } } pub fn runtime_directory(&self) -> PathBuf { if let Some(path) = &self.runtime_directory { expand_home(path) } else if let Some(path) = default_runtime_directory() { path } else { default_pueue_path() } } #[cfg(not(target_os = "windows"))] pub fn unix_socket_path(&self) -> PathBuf { if let Some(path) = &self.unix_socket_path { expand_home(path) } else { self.runtime_directory() .join(format!("pueue_{}.socket", whoami::username())) } } pub fn pid_path(&self) -> PathBuf { if let Some(path) = &self.pid_path { expand_home(path) } else { self.runtime_directory().join("pueue.pid") } } pub fn daemon_cert(&self) -> PathBuf { if let Some(path) = &self.daemon_cert { expand_home(path) } else { self.pueue_directory().join("certs").join("daemon.cert") } } pub fn daemon_key(&self) -> PathBuf { if let Some(path) = &self.daemon_key { expand_home(path) } else { self.pueue_directory().join("certs").join("daemon.key") } } pub fn shared_secret_path(&self) -> PathBuf { if let Some(path) = &self.shared_secret_path { expand_home(path) } else { self.pueue_directory().join("shared_secret") } } } impl Settings { pub fn read(from_file: &Option<PathBuf>) -> Result<(Settings, bool), Error> { if let Some(path) = from_file { let file = File::open(path) .map_err(|err| Error::IoPathError(path.clone(), "opening config file", err))?; let reader = BufReader::new(file); let settings = serde_yaml::from_reader(reader) .map_err(|err| Error::ConfigDeserialization(err.to_string()))?; return Ok((settings, true)); }; info!("Parsing config files"); for directory in get_config_directories().into_iter() { let path = directory.join("pueue.yml"); info!("Checking path: {path:?}"); if path.exists() && path.is_file() { info!("Found config file at: {path:?}"); let file = File::open(&path) .map_err(|err| Error::IoPathError(path, "opening config file.", err))?; let reader = BufReader::new(file); let settings = serde_yaml::from_reader(reader) .map_err(|err| Error::ConfigDeserialization(err.to_string()))?; return Ok((settings, true)); } } info!("No config file found. Use default config."); Ok((Settings::default(), false)) } pub fn save(&self, path: &Option<PathBuf>) -> Result<(), Error> { let config_path = if let Some(path) = path { path.clone() } else { default_config_directory().join("pueue.yml") }; let config_dir = config_path .parent() .ok_or_else(|| Error::InvalidPath("Couldn't resolve config directory".into()))?; if !config_dir.exists() { create_dir_all(&config_dir).map_err(|err| { Error::IoPathError(config_dir.to_path_buf(), "creating config dir", err) })?; } let content = match serde_yaml::to_string(self) { Ok(content) => content, Err(error) => { return Err(Error::Generic(format!( "Configuration file serialization failed:\n{error}" ))) } }; let mut file = File::create(&config_path).map_err(|err| { Error::IoPathError(config_dir.to_path_buf(), "creating settings file", err) })?; file.write_all(content.as_bytes()).map_err(|err| { Error::IoPathError(config_dir.to_path_buf(), "writing settings file", err) })?; Ok(()) } pub fn load_profile(&mut self, profile: &str) -> Result<(), Error> { let profile = self.profiles.remove(profile).ok_or_else(|| { Error::ConfigDeserialization(format!("Couldn't find profile with name \"{profile}\"")) })?; self.client = profile.client; self.daemon = profile.daemon; self.shared = profile.shared; Ok(()) } } #[cfg(test)] mod test { use super::*; #[test] fn test_load_profile() { let mut settings = Settings::default(); assert_eq!( settings.client.status_time_format, default_status_time_format() ); assert_eq!( settings.daemon.callback_log_lines, default_callback_log_lines() ); assert_eq!(settings.shared.host, default_host()); let mut profile = Settings::default(); profile.client.status_time_format = "test".to_string(); profile.daemon.callback_log_lines = 100_000; profile.shared.host = "quatschhost".to_string(); let profile = NestedSettings { client: profile.client, daemon: profile.daemon, shared: profile.shared, }; settings.profiles.insert("testprofile".to_string(), profile); settings .load_profile("testprofile") .expect("We just added the profile"); assert_eq!(settings.client.status_time_format, "test"); assert_eq!(settings.daemon.callback_log_lines, 100_000); assert_eq!(settings.shared.host, "quatschhost"); } #[test] fn test_error_on_missing_profile() { let mut settings = Settings::default(); let result = settings.load_profile("doesn't exist"); let expected_error_message = "Couldn't find profile with name \"doesn't exist\""; if let Err(Error::ConfigDeserialization(error_message)) = result { assert_eq!(error_message, expected_error_message); return; } panic!("Got unexpected result when expecting missing profile error: {result:?}"); } }
use std::collections::HashMap; use std::fs::{create_dir_all, File}; use std::io::{prelude::*, BufReader}; use std::path::{Path, PathBuf}; use log::info; use serde_derive::{Deserialize, Serialize}; use shellexpand::tilde; use crate::error::Error; use crate::platform::directories::*; use crate::setting_defaults::*; #[derive(PartialEq, Clone, Debug, Default, Deserialize, Serialize)] pub struct Shared { pub pueue_directory: Option<PathBuf>, pub runtime_directory: Option<PathBuf>, #[cfg(not(target_os = "windows"))] #[serde(default = "default_true")] pub use_unix_socket: bool, pub pid_path: Option<PathBuf>, #[cfg(not(target_os = "windows"))] pub unix_socket_path: Option<PathBuf>, #[serde(default = "default_host")] pub host: String, #[serde(default = "default_port")] pub port: String, pub daemon_cert: Option<PathBuf>, pub daemon_key: Option<PathBuf>, pub shared_secret_path: Option<PathBuf>, } #[derive(PartialEq, Clone, Debug, Default, Deserialize, Serialize)] pub struct Client { #[serde(default = "Default::default")] pub restart_in_place: bool, #[serde(default = "default_true")] pub read_local_logs: bool, #[serde(default = "Default::default")] pub show_confirmation_questions: bool, #[serde(default = "Default::default")] pub show_expanded_aliases: bool, #[serde(default = "Default::default")] pub dark_mode: bool, pub max_status_lines: Option<usize>, #[serde(default = "default_status_time_format")] pub status_time_format: String, #[serde(default = "default_status_datetime_format")] pub status_datetime_format: String, } #[derive(PartialEq, Clone, Debug, Default, Deserialize, Serialize)] pub struct Daemon { #[serde(default = "Default::default")] pub pause_group_on_failure: bool, #[serde(default = "Default::default")] pub pause_all_on_failure: bool, pub callback: Option<String>, #[serde(default = "default_callback_log_lines")] pub callback_log_lines: usize, #[serde(skip_serializing)] #[deprecated( since = "1.1.0", note = "The configuration for groups is now stored in the state." )] pub groups: Option<HashMap<String, i64>>, } impl Default for Settings { fn default() -> Self { Settings { client: Client { read_local_logs: true, status_time_format: default_status_time_format(), status_datetime_format: default_status_datetime_format(), ..Default::default() }, daemon: Daemon { callback_log_lines: default_callback_log_lines(), ..Default::default() }, shared: Shared { #[cfg(not(target_os = "windows"))] use_unix_socket: true, host: default_host(), port: default_port(), ..Default::default() }, profiles: HashMap::new(), } } } #[derive(PartialEq, Clone, Debug, Deserialize, Serialize)] pub struct Settings { #[serde(default = "Default::default")] pub client: Client, #[serde(default = "Default::default")] pub daemon: Daemon, pub shared: Shared, #[serde(default = "HashMap::new")] pub profiles: HashMap<String, NestedSettings>, } #[derive(PartialEq, Clone, Debug, Deserialize, Serialize)] pub struct NestedSettings { #[serde(default = "Default::default")] pub client: Client, #[serde(default = "Default::default")] pub daemon: Daemon, #[serde(default = "Default::default")] pub shared: Shared, } pub fn expand_home(old_path: &Path) -> PathBuf { PathBuf::from(tilde(&old_path.to_string_lossy()).into_owned()) } impl Shared { pub fn pueue_directory(&self) -> PathBuf { if let Some(path) = &self.pueue_directory { expand_home(path) } else { default_pueue_path() } } pub fn runtime_directory(&self) -> PathBuf { if let Some(path) = &self.runtime_directory { expand_home(path) } else if let Some(path) = default_runtime_directory() { path } else { default_pueue_path() } } #[cfg(not(target_os = "windows"))] pub fn unix_socket_path(&self) -> PathBuf { if let Some(path) = &self.unix_socket_path { expand_home(path) } else { self.runtime_directory() .join(format!("pueue_{}.socket", whoami::username())) } } pub fn pid_path(&self) -> PathBuf { if let Some(path) = &self.pid_path { expand_home(path) } else { self.runtime_directory().join("pueue.pid") } } pub fn daemon_cert(&self) -> PathBuf { if let Some(path) = &self.daemon_cert { expand_home(path) } else { self.pueue_directory().join("certs").join("daemon.cert") } } pub fn daemon_key(&self) -> PathBuf { if let Some(path) = &self.daemon_key { expand_home(path) } else { self.pueue_directory().join("certs").join("daemon.key") } }
} impl Settings { pub fn read(from_file: &Option<PathBuf>) -> Result<(Settings, bool), Error> { if let Some(path) = from_file { let file = File::open(path) .map_err(|err| Error::IoPathError(path.clone(), "opening config file", err))?; let reader = BufReader::new(file); let settings = serde_yaml::from_reader(reader) .map_err(|err| Error::ConfigDeserialization(err.to_string()))?; return Ok((settings, true)); }; info!("Parsing config files"); for directory in get_config_directories().into_iter() { let path = directory.join("pueue.yml"); info!("Checking path: {path:?}"); if path.exists() && path.is_file() { info!("Found config file at: {path:?}"); let file = File::open(&path) .map_err(|err| Error::IoPathError(path, "opening config file.", err))?; let reader = BufReader::new(file); let settings = serde_yaml::from_reader(reader) .map_err(|err| Error::ConfigDeserialization(err.to_string()))?; return Ok((settings, true)); } } info!("No config file found. Use default config."); Ok((Settings::default(), false)) } pub fn save(&self, path: &Option<PathBuf>) -> Result<(), Error> { let config_path = if let Some(path) = path { path.clone() } else { default_config_directory().join("pueue.yml") }; let config_dir = config_path .parent() .ok_or_else(|| Error::InvalidPath("Couldn't resolve config directory".into()))?; if !config_dir.exists() { create_dir_all(&config_dir).map_err(|err| { Error::IoPathError(config_dir.to_path_buf(), "creating config dir", err) })?; } let content = match serde_yaml::to_string(self) { Ok(content) => content, Err(error) => { return Err(Error::Generic(format!( "Configuration file serialization failed:\n{error}" ))) } }; let mut file = File::create(&config_path).map_err(|err| { Error::IoPathError(config_dir.to_path_buf(), "creating settings file", err) })?; file.write_all(content.as_bytes()).map_err(|err| { Error::IoPathError(config_dir.to_path_buf(), "writing settings file", err) })?; Ok(()) } pub fn load_profile(&mut self, profile: &str) -> Result<(), Error> { let profile = self.profiles.remove(profile).ok_or_else(|| { Error::ConfigDeserialization(format!("Couldn't find profile with name \"{profile}\"")) })?; self.client = profile.client; self.daemon = profile.daemon; self.shared = profile.shared; Ok(()) } } #[cfg(test)] mod test { use super::*; #[test] fn test_load_profile() { let mut settings = Settings::default(); assert_eq!( settings.client.status_time_format, default_status_time_format() ); assert_eq!( settings.daemon.callback_log_lines, default_callback_log_lines() ); assert_eq!(settings.shared.host, default_host()); let mut profile = Settings::default(); profile.client.status_time_format = "test".to_string(); profile.daemon.callback_log_lines = 100_000; profile.shared.host = "quatschhost".to_string(); let profile = NestedSettings { client: profile.client, daemon: profile.daemon, shared: profile.shared, }; settings.profiles.insert("testprofile".to_string(), profile); settings .load_profile("testprofile") .expect("We just added the profile"); assert_eq!(settings.client.status_time_format, "test"); assert_eq!(settings.daemon.callback_log_lines, 100_000); assert_eq!(settings.shared.host, "quatschhost"); } #[test] fn test_error_on_missing_profile() { let mut settings = Settings::default(); let result = settings.load_profile("doesn't exist"); let expected_error_message = "Couldn't find profile with name \"doesn't exist\""; if let Err(Error::ConfigDeserialization(error_message)) = result { assert_eq!(error_message, expected_error_message); return; } panic!("Got unexpected result when expecting missing profile error: {result:?}"); } }
pub fn shared_secret_path(&self) -> PathBuf { if let Some(path) = &self.shared_secret_path { expand_home(path) } else { self.pueue_directory().join("shared_secret") } }
function_block-full_function
[ { "content": "/// This is a small helper which either returns a given group or the default group.\n\npub fn group_or_default(group: &Option<String>) -> String {\n\n group\n\n .clone()\n\n .unwrap_or_else(|| PUEUE_DEFAULT_GROUP.to_string())\n\n}\n\n\n", "file_path": "client/client.rs", "...
Rust
capi/src/optimizer.rs
shinolab/acoustic-field-calculator
620c363c5003cdff085fe41db10ebfbddd25153d
/* * File: optimizer.rs * Project: src * Created Date: 22/09/2020 * Author: Shun Suzuki * ----- * Last Modified: 17/06/2021 * Modified By: Shun Suzuki (suzuki@hapis.k.u-tokyo.ac.jp) * ----- * Copyright (c) 2020 Hapis Lab. All rights reserved. * */ use libc::c_char; use std::ffi::c_void; use std::ffi::CStr; use std::mem::forget; use super::type_inference_aux::*; use acoustic_field_calculator::prelude::*; use acoustic_field_optimizer::multiple_foci::*; use acoustic_field_optimizer::*; macro_rules! gen_match_src_type { ([$($src_type:ident),*], $st: ident, $handle: ident, $expr: expr) => { match SourceType::from_i32($st) { $(SourceType::$src_type => { let mut system: Box<UniformSystem<$src_type>> = Box::from_raw($handle as *mut _); $expr.optimize(&mut system); forget(system); },)* } }; ($st: ident, $handle: ident, $expr: expr) => { sources!(gen_match_src_type; $st, $handle, $expr) } } #[no_mangle] #[allow(improper_ctypes_definitions)] pub unsafe extern "C" fn AFO_FocalPoint(handle: *mut c_void, point: Vector3, source_type: i32) { gen_match_src_type!(source_type, handle, FocalPoint::new(point)); } #[no_mangle] #[allow(improper_ctypes_definitions)] pub unsafe extern "C" fn AFO_BesselBeam( handle: *mut c_void, point: Vector3, dir: Vector3, theta: f32, source_type: i32, ) { gen_match_src_type!(source_type, handle, BesselBeam::new(point, dir, theta)); } #[no_mangle] #[allow(improper_ctypes_definitions)] pub unsafe extern "C" fn AFO_IFFT( handle: *mut c_void, path: *const c_char, bottom_left: Vector3, top_left: Vector3, bottom_right: Vector3, spacing: f32, z: f32, source_type: i32, ) { let path = CStr::from_ptr(path).to_str().unwrap(); gen_match_src_type!( source_type, handle, Ifft::new(path, bottom_left, top_left, bottom_right, spacing, z) ); } #[no_mangle] pub unsafe extern "C" fn AFO_GSPAT( handle: *mut c_void, foci: *const c_void, amps: *const f32, size: u64, source_type: i32, ) { let len = size as usize; let foci = std::slice::from_raw_parts(foci as *const Vector3, len); let amps = std::slice::from_raw_parts(amps, len); gen_match_src_type!( source_type, handle, Gspat::new(foci.to_vec(), amps.to_vec()) ); } #[no_mangle] pub unsafe extern "C" fn AFO_GS( handle: *mut c_void, foci: *const c_void, amps: *const f32, size: u64, source_type: i32, ) { let len = size as usize; let foci = std::slice::from_raw_parts(foci as *const Vector3, len); let amps = std::slice::from_raw_parts(amps, len); gen_match_src_type!(source_type, handle, GS::new(foci.to_vec(), amps.to_vec())); } #[no_mangle] pub unsafe extern "C" fn AFO_Naive( handle: *mut c_void, foci: *const c_void, amps: *const f32, size: u64, source_type: i32, ) { let len = size as usize; let foci = std::slice::from_raw_parts(foci as *const Vector3, len); let amps = std::slice::from_raw_parts(amps, len); gen_match_src_type!( source_type, handle, Naive::new(foci.to_vec(), amps.to_vec()) ); } #[no_mangle] pub unsafe extern "C" fn AFO_Horn( handle: *mut c_void, foci: *const c_void, amps: *const f32, size: u64, source_type: i32, ) { let len = size as usize; let foci = std::slice::from_raw_parts(foci as *const Vector3, len); let amps = std::slice::from_raw_parts(amps, len); gen_match_src_type!(source_type, handle, Horn::new(foci.to_vec(), amps.to_vec())); } #[no_mangle] pub unsafe extern "C" fn AFO_Long( handle: *mut c_void, foci: *const c_void, amps: *const f32, size: u64, source_type: i32, ) { let len = size as usize; let foci = std::slice::from_raw_parts(foci as *const Vector3, len); let amps = std::slice::from_raw_parts(amps, len); gen_match_src_type!(source_type, handle, Long::new(foci.to_vec(), amps.to_vec())); } #[no_mangle] pub unsafe extern "C" fn AFO_APO( handle: *mut c_void, foci: *const c_void, amps: *const f32, size: u64, lambda: f32, source_type: i32, ) { let len = size as usize; let foci = std::slice::from_raw_parts(foci as *const Vector3, len); let amps = std::slice::from_raw_parts(amps, len); gen_match_src_type!( source_type, handle, Apo::new(foci.to_vec(), amps.to_vec(), lambda) ); } #[no_mangle] pub unsafe extern "C" fn AFO_GaussNewton( handle: *mut c_void, foci: *const c_void, amps: *const f32, size: u64, source_type: i32, ) { let len = size as usize; let foci = std::slice::from_raw_parts(foci as *const Vector3, len); let amps = std::slice::from_raw_parts(amps, len); gen_match_src_type!( source_type, handle, GaussNewton::new(foci.to_vec(), amps.to_vec()) ); } #[no_mangle] pub unsafe extern "C" fn AFO_GradientDescent( handle: *mut c_void, foci: *const c_void, amps: *const f32, size: u64, source_type: i32, ) { let len = size as usize; let foci = std::slice::from_raw_parts(foci as *const Vector3, len); let amps = std::slice::from_raw_parts(amps, len); gen_match_src_type!( source_type, handle, GradientDescent::new(foci.to_vec(), amps.to_vec()) ); } #[no_mangle] pub unsafe extern "C" fn AFO_LM( handle: *mut c_void, foci: *const c_void, amps: *const f32, size: u64, source_type: i32, ) { let len = size as usize; let foci = std::slice::from_raw_parts(foci as *const Vector3, len); let amps = std::slice::from_raw_parts(amps, len); gen_match_src_type!(source_type, handle, LM::new(foci.to_vec(), amps.to_vec())); } #[no_mangle] pub unsafe extern "C" fn AFO_Greedy( handle: *mut c_void, foci: *const c_void, amps: *const f32, size: u64, phase_div: u64, source_type: i32, ) { let len = size as usize; let foci = std::slice::from_raw_parts(foci as *const Vector3, len); let amps = std::slice::from_raw_parts(amps, len); gen_match_src_type!( source_type, handle, Greedy::new(foci.to_vec(), amps.to_vec(), phase_div as usize) ); }
/* * File: optimizer.rs * Project: src * Created Date: 22/09/2020 * Author: Shun Suzuki * ----- * Last Modified: 17/06/2021 * Modified By: Shun Suzuki (suzuki@hapis.k.u-tokyo.ac.jp) * ----- * Copyright (c) 2020 Hapis Lab. All rights reserved. * */ use libc::c_char; use std::ffi::c_void; use std::ffi::CStr; use std::mem::forget; use super::type_inference_aux::*; use acoustic_field_calculator::prelude::*; use acoustic_field_optimizer::multiple_foci::*; use acoustic_field_optimizer::*; macro_rules! gen_match_src_type { ([$($src_type:ident),*], $st: ident, $handle: ident, $expr: expr) => { match SourceType::from_i32($st) { $(SourceType::$src_type => { let mut system: Box<UniformSystem<$src_type>> = Box::from_raw($handle as *mut _); $expr.optimize(&mut system); forget(system); },)* } }; ($st: ident, $handle: ident, $expr: expr) => { sources!(gen_match_src_type; $st, $handle, $expr) } } #[no_mangle] #[allow(improper_ctypes_definitions)] pub unsafe extern "C" fn AFO_FocalPoint(handle: *mut c_void, point: Vector3, source_type: i32) { gen_match_src_type!(source_type, handle, FocalPoint::new(point)); } #[no_mangle] #[allow(improper_ctypes_definitions)] pub unsafe extern "C" fn AFO_BesselBeam( handle: *mut c_void, point: Vector3, dir: Vector3, theta: f32, source_type: i32, ) { gen_match_src_type!(source_type, handle, BesselBeam::new(point, dir, theta)); } #[no_mangle] #[allow(improper_ctypes_definitions)] pub unsafe extern "C" fn AFO_IFFT( handle: *mut c_void, path: *const c_char, bottom_left: Vector3, top_left: Vector3, bottom_right: Vector3, spacing: f32, z: f32, source_type: i32, ) { let path = CStr::from_ptr(path).to_str().unwrap(); gen_match_src_type!( source_type, handle, Ifft::new(path, bottom_left, top_left, bottom_right, spacing, z) ); } #[no_mangle] pub unsafe extern "C" fn AFO_GSPAT( handle: *mut c_void, foci: *const c_void, amps: *const f32, size: u64, source_type: i32, ) { let len = size as usize; let foc
#[no_mangle] pub unsafe extern "C" fn AFO_GS( handle: *mut c_void, foci: *const c_void, amps: *const f32, size: u64, source_type: i32, ) { let len = size as usize; let foci = std::slice::from_raw_parts(foci as *const Vector3, len); let amps = std::slice::from_raw_parts(amps, len); gen_match_src_type!(source_type, handle, GS::new(foci.to_vec(), amps.to_vec())); } #[no_mangle] pub unsafe extern "C" fn AFO_Naive( handle: *mut c_void, foci: *const c_void, amps: *const f32, size: u64, source_type: i32, ) { let len = size as usize; let foci = std::slice::from_raw_parts(foci as *const Vector3, len); let amps = std::slice::from_raw_parts(amps, len); gen_match_src_type!( source_type, handle, Naive::new(foci.to_vec(), amps.to_vec()) ); } #[no_mangle] pub unsafe extern "C" fn AFO_Horn( handle: *mut c_void, foci: *const c_void, amps: *const f32, size: u64, source_type: i32, ) { let len = size as usize; let foci = std::slice::from_raw_parts(foci as *const Vector3, len); let amps = std::slice::from_raw_parts(amps, len); gen_match_src_type!(source_type, handle, Horn::new(foci.to_vec(), amps.to_vec())); } #[no_mangle] pub unsafe extern "C" fn AFO_Long( handle: *mut c_void, foci: *const c_void, amps: *const f32, size: u64, source_type: i32, ) { let len = size as usize; let foci = std::slice::from_raw_parts(foci as *const Vector3, len); let amps = std::slice::from_raw_parts(amps, len); gen_match_src_type!(source_type, handle, Long::new(foci.to_vec(), amps.to_vec())); } #[no_mangle] pub unsafe extern "C" fn AFO_APO( handle: *mut c_void, foci: *const c_void, amps: *const f32, size: u64, lambda: f32, source_type: i32, ) { let len = size as usize; let foci = std::slice::from_raw_parts(foci as *const Vector3, len); let amps = std::slice::from_raw_parts(amps, len); gen_match_src_type!( source_type, handle, Apo::new(foci.to_vec(), amps.to_vec(), lambda) ); } #[no_mangle] pub unsafe extern "C" fn AFO_GaussNewton( handle: *mut c_void, foci: *const c_void, amps: *const f32, size: u64, source_type: i32, ) { let len = size as usize; let foci = std::slice::from_raw_parts(foci as *const Vector3, len); let amps = std::slice::from_raw_parts(amps, len); gen_match_src_type!( source_type, handle, GaussNewton::new(foci.to_vec(), amps.to_vec()) ); } #[no_mangle] pub unsafe extern "C" fn AFO_GradientDescent( handle: *mut c_void, foci: *const c_void, amps: *const f32, size: u64, source_type: i32, ) { let len = size as usize; let foci = std::slice::from_raw_parts(foci as *const Vector3, len); let amps = std::slice::from_raw_parts(amps, len); gen_match_src_type!( source_type, handle, GradientDescent::new(foci.to_vec(), amps.to_vec()) ); } #[no_mangle] pub unsafe extern "C" fn AFO_LM( handle: *mut c_void, foci: *const c_void, amps: *const f32, size: u64, source_type: i32, ) { let len = size as usize; let foci = std::slice::from_raw_parts(foci as *const Vector3, len); let amps = std::slice::from_raw_parts(amps, len); gen_match_src_type!(source_type, handle, LM::new(foci.to_vec(), amps.to_vec())); } #[no_mangle] pub unsafe extern "C" fn AFO_Greedy( handle: *mut c_void, foci: *const c_void, amps: *const f32, size: u64, phase_div: u64, source_type: i32, ) { let len = size as usize; let foci = std::slice::from_raw_parts(foci as *const Vector3, len); let amps = std::slice::from_raw_parts(amps, len); gen_match_src_type!( source_type, handle, Greedy::new(foci.to_vec(), amps.to_vec(), phase_div as usize) ); }
i = std::slice::from_raw_parts(foci as *const Vector3, len); let amps = std::slice::from_raw_parts(amps, len); gen_match_src_type!( source_type, handle, Gspat::new(foci.to_vec(), amps.to_vec()) ); }
function_block-function_prefixed
[ { "content": "pub fn to_vec4(v: Vector3) -> [f32; 4] {\n\n [v[0] as f32, v[1] as f32, v[2] as f32, 0.]\n\n}\n", "file_path": "acoustic-field-calculator/src/gpu/gpu_prelude.rs", "rank": 0, "score": 210100.9783508046 }, { "content": "#[inline(always)]\n\npub fn sub(a: Vector3, b: Vector3) -...
Rust
src/test/util/matchable.rs
PhotonQuantum/mongo-rust-driver
fd0f75c1af911d02d55659876456838d5f2b8bf0
use std::{any::Any, fmt::Debug, time::Duration}; use crate::{ bson::{Bson, Document}, bson_util, options::{AuthMechanism, Credential}, }; pub trait Matchable: Sized + 'static { fn is_placeholder(&self) -> bool { false } fn content_matches(&self, expected: &Self) -> Result<(), String>; fn matches<T: Matchable + Any>(&self, expected: &T) -> Result<(), String> { if expected.is_placeholder() { return Ok(()); } if let Some(expected) = <dyn Any>::downcast_ref::<Self>(expected) { self.content_matches(expected) } else { Err(format!( "Couldn't down downcast expected ({:?}) to self ({:?})", expected.type_id(), self.type_id() )) } } } pub trait MatchErrExt { fn prefix(self, name: &str) -> Self; } impl MatchErrExt for Result<(), String> { fn prefix(self, name: &str) -> Self { self.map_err(|s| format!("{}: {}", name, s)) } } pub fn eq_matches<T: PartialEq + Debug>( name: &str, actual: &T, expected: &T, ) -> Result<(), String> { if actual != expected { return Err(format!( "expected {} {:?}, got {:?}", name, expected, actual )); } Ok(()) } impl Matchable for Bson { fn is_placeholder(&self) -> bool { if let Bson::String(string) = self { string.as_str() == "42" || string.as_str() == "" } else { get_int(self) == Some(42) } } fn content_matches(&self, expected: &Bson) -> Result<(), String> { match (self, expected) { (Bson::Document(actual_doc), Bson::Document(expected_doc)) => { actual_doc.matches(expected_doc) } (Bson::Array(actual_array), Bson::Array(expected_array)) => { if actual_array.len() < expected_array.len() { return Err(format!( "expected {} array elements, got {}", expected_array.len(), actual_array.len() )); } for (actual, expected) in actual_array.iter().zip(expected_array.iter()) { actual.matches(expected)?; } Ok(()) } _ => { match (bson_util::get_int(self), get_int(expected)) { (Some(actual_int), Some(expected_int)) => { eq_matches("int", &actual_int, &expected_int)? } (None, Some(expected_int)) => { return Err(format!("expected int {}, got none", expected_int)) } _ => eq_matches("bson", self, expected)?, } Ok(()) } } } } impl Matchable for Document { fn content_matches(&self, expected: &Document) -> Result<(), String> { for (k, v) in expected.iter() { if k == "upsertedCount" { continue; } if k == "afterClusterTime" { continue; } if k == "recoveryToken" && v.is_placeholder() && self.get_document(k).is_ok() { continue; } if k == "readConcern" { if let Some(doc) = v.as_document() { if doc.len() == 1 && doc.get_i32("afterClusterTime") == Ok(42) { continue; } } } match self.get(k) { Some(actual_v) => actual_v.matches(v).prefix(k)?, None => { if v != &Bson::Null { return Err(format!("{:?}: expected value {:?}, got null", k, v)); } } } } Ok(()) } } impl Matchable for Credential { fn content_matches(&self, expected: &Credential) -> Result<(), String> { self.username .content_matches(&expected.username) .prefix("username")?; self.source .content_matches(&expected.source) .prefix("source")?; self.password .content_matches(&expected.password) .prefix("password")?; self.mechanism .content_matches(&expected.mechanism) .prefix("mechanism")?; self.mechanism_properties .content_matches(&expected.mechanism_properties) .prefix("mechanism_properties")?; Ok(()) } } impl Matchable for AuthMechanism { fn content_matches(&self, expected: &AuthMechanism) -> Result<(), String> { eq_matches("AuthMechanism", self, expected) } } impl Matchable for bool { fn content_matches(&self, expected: &bool) -> Result<(), String> { eq_matches("bool", self, expected) } } impl Matchable for u32 { fn is_placeholder(&self) -> bool { self == &42 } fn content_matches(&self, expected: &u32) -> Result<(), String> { eq_matches("u32", self, expected) } } impl Matchable for String { fn is_placeholder(&self) -> bool { self.as_str() == "42" } fn content_matches(&self, expected: &String) -> Result<(), String> { eq_matches("String", self, expected) } } impl Matchable for Duration { fn content_matches(&self, expected: &Duration) -> Result<(), String> { eq_matches("Duration", self, expected) } } impl<T: Matchable> Matchable for Option<T> { fn is_placeholder(&self) -> bool { match self { Some(ref v) => v.is_placeholder(), None => true, } } fn content_matches(&self, expected: &Option<T>) -> Result<(), String> { if let Some(expected_value) = expected { return match self { Some(actual_value) => actual_value.content_matches(expected_value), None => Err("expected Some(_), got None".to_string()), }; } Ok(()) } } pub fn assert_matches<A: Matchable + Debug, E: Matchable + Debug>( actual: &A, expected: &E, description: Option<&str>, ) { let result = actual.matches(expected); assert!( result.is_ok(), "{}\n{:#?}\n did not MATCH \n{:#?}\n MATCH failure: {}", description.unwrap_or(""), actual, expected, result.unwrap_err(), ); } fn parse_i64_ext_json(doc: &Document) -> Option<i64> { let number_string = doc.get("$numberLong").and_then(Bson::as_str)?; number_string.parse::<i64>().ok() } fn get_int(value: &Bson) -> Option<i64> { bson_util::get_int(value).or_else(|| value.as_document().and_then(parse_i64_ext_json)) }
use std::{any::Any, fmt::Debug, time::Duration}; use crate::{ bson::{Bson, Document}, bson_util, options::{AuthMechanism, Credential}, }; pub trait Matchable: Sized + 'static { fn is_placeholder(&self) -> bool { false } fn content_matches(&self, expected: &Self) -> Result<(), String>; fn matches<T: Matchable + Any>(&self, expected: &T) -> Result<(), String> { if expected.is_placeholder() { return Ok(()); } if let Some(expected) = <dyn Any>::downcast_ref::<Self>(expected) { self.content_matches(expected) } else { Err(format!( "Couldn't down downcast expected ({:?}) to self ({:?})", expected.type_id(), self.type_id() )) } } } pub trait MatchErrExt { fn prefix(self, name: &str) -> Self; } impl MatchErrExt for Result<(), String> { fn prefix(self, name: &str) -> Self { self.map_err(|s| format!("{}: {}", name, s)) } } pub fn eq_matches<T: PartialEq + Debug>( name: &str, actual: &T, expected: &T, ) -> Result<(), String> { if actual != expected { return Err(format!( "expected {} {:?}, got {:?}", name, expected, actual )); } Ok(()) } impl Matchable for Bson { fn is_placeholder(&self) -> bool { if let Bson::String(string) = self { string.as_str() == "42" || string.as_str() == "" } else { get_int(self) == Some(42) } } fn content_matches(&self, expected: &Bson) -> Result<(), String> { match (self, expected) { (Bson::Document(actual_doc), Bson::Document(expected_doc)) => { actual_doc.matches(expected_doc) } (Bson::Array(actual_array), Bson::Array(expected_array)) => { if actual_array.len() < expected_array.len() { return Err(format!( "expected {} array elements, got {}", expected_array.len(), actual_array.len() )); } for (actual, expected) in actual_array.iter().zip(expected_array.iter()) { actual.matches(expected)?; } Ok(()) } _ => { match (bson_util::get_int(self), get_int(expected)) { (Some(actual_int), Some(expected_int)) => { eq_matches("int", &actual_int, &expected_int)? } (None, Some(expected_int)) => { return Err(format!("expected int {}, got none", expected_int)) } _ => eq_matches("bson", self, expected)?, } Ok(()) } } } } impl Matchable for Document { fn content_matches(&self, expected: &Document) -> Result<(), String> { for (k, v) in expected.iter() { if k == "upsertedCount" { continue; } if k == "afterClusterTime" { continue; } if k == "recoveryToken" && v.is_placeholder() && self.get_document(k).is_ok() { continue; } if k == "readConcern" { if let Some(doc) = v.as_document() { if doc.len() == 1 && doc.get_i32("afterClusterTime") == Ok(42) { continue; } } } match self.get(k) { Some(actual_v) => actual_v.matches(v).prefix(
.prefix("username")?; self.source .content_matches(&expected.source) .prefix("source")?; self.password .content_matches(&expected.password) .prefix("password")?; self.mechanism .content_matches(&expected.mechanism) .prefix("mechanism")?; self.mechanism_properties .content_matches(&expected.mechanism_properties) .prefix("mechanism_properties")?; Ok(()) } } impl Matchable for AuthMechanism { fn content_matches(&self, expected: &AuthMechanism) -> Result<(), String> { eq_matches("AuthMechanism", self, expected) } } impl Matchable for bool { fn content_matches(&self, expected: &bool) -> Result<(), String> { eq_matches("bool", self, expected) } } impl Matchable for u32 { fn is_placeholder(&self) -> bool { self == &42 } fn content_matches(&self, expected: &u32) -> Result<(), String> { eq_matches("u32", self, expected) } } impl Matchable for String { fn is_placeholder(&self) -> bool { self.as_str() == "42" } fn content_matches(&self, expected: &String) -> Result<(), String> { eq_matches("String", self, expected) } } impl Matchable for Duration { fn content_matches(&self, expected: &Duration) -> Result<(), String> { eq_matches("Duration", self, expected) } } impl<T: Matchable> Matchable for Option<T> { fn is_placeholder(&self) -> bool { match self { Some(ref v) => v.is_placeholder(), None => true, } } fn content_matches(&self, expected: &Option<T>) -> Result<(), String> { if let Some(expected_value) = expected { return match self { Some(actual_value) => actual_value.content_matches(expected_value), None => Err("expected Some(_), got None".to_string()), }; } Ok(()) } } pub fn assert_matches<A: Matchable + Debug, E: Matchable + Debug>( actual: &A, expected: &E, description: Option<&str>, ) { let result = actual.matches(expected); assert!( result.is_ok(), "{}\n{:#?}\n did not MATCH \n{:#?}\n MATCH failure: {}", description.unwrap_or(""), actual, expected, result.unwrap_err(), ); } fn parse_i64_ext_json(doc: &Document) -> Option<i64> { let number_string = doc.get("$numberLong").and_then(Bson::as_str)?; number_string.parse::<i64>().ok() } fn get_int(value: &Bson) -> Option<i64> { bson_util::get_int(value).or_else(|| value.as_document().and_then(parse_i64_ext_json)) }
k)?, None => { if v != &Bson::Null { return Err(format!("{:?}: expected value {:?}, got null", k, v)); } } } } Ok(()) } } impl Matchable for Credential { fn content_matches(&self, expected: &Credential) -> Result<(), String> { self.username .content_matches(&expected.username)
random
[ { "content": "fn numbers_match(actual: &Bson, expected: &Bson) -> bool {\n\n if actual.element_type() == expected.element_type() {\n\n return actual == expected;\n\n }\n\n\n\n match (get_int(actual), get_int(expected)) {\n\n (Some(actual), Some(expected)) => actual == expected,\n\n ...
Rust
src/json_dsl/param.rs
swarkentin/valico
f6aed770ef3b0f1215636b29c696c2a69e29f92b
use serde::Serialize; use serde_json::{to_value, Value}; use super::super::json_schema; use super::builder; use super::coercers; use super::validators; pub struct Param { pub name: String, pub coercer: Option<Box<dyn coercers::Coercer + Send + Sync>>, pub nest: Option<builder::Builder>, pub description: Option<String>, pub allow_null: bool, pub validators: validators::Validators, pub default: Option<Value>, pub schema_builder: Option<Box<dyn Fn(&mut json_schema::Builder) + Send + Sync>>, pub schema_id: Option<url::Url>, } unsafe impl Send for Param {} impl Param { pub fn new(name: &str) -> Param { Param { name: name.to_string(), description: None, coercer: None, nest: None, allow_null: false, validators: vec![], default: None, schema_builder: None, schema_id: None, } } pub fn new_with_coercer( name: &str, coercer: Box<dyn coercers::Coercer + Send + Sync>, ) -> Param { Param { name: name.to_string(), description: None, coercer: Some(coercer), nest: None, allow_null: false, validators: vec![], default: None, schema_builder: None, schema_id: None, } } pub fn new_with_nest( name: &str, coercer: Box<dyn coercers::Coercer + Send + Sync>, nest: builder::Builder, ) -> Param { Param { name: name.to_string(), description: None, coercer: Some(coercer), nest: Some(nest), allow_null: false, validators: vec![], default: None, schema_builder: None, schema_id: None, } } pub fn build<F>(name: &str, build_def: F) -> Param where F: FnOnce(&mut Param), { let mut param = Param::new(name); build_def(&mut param); param } pub fn desc(&mut self, description: &str) { self.description = Some(description.to_string()); } pub fn schema_id(&mut self, id: url::Url) { self.schema_id = Some(id); } pub fn schema<F>(&mut self, build: F) where F: Fn(&mut json_schema::Builder) + 'static + Send + Sync, { self.schema_builder = Some(Box::new(build)); } pub fn coerce(&mut self, coercer: Box<dyn coercers::Coercer + Send + Sync>) { self.coercer = Some(coercer); } pub fn nest<F>(&mut self, nest_def: F) where F: FnOnce(&mut builder::Builder) -> (), { self.nest = Some(builder::Builder::build(nest_def)); } pub fn allow_null(&mut self) { self.allow_null = true; } pub fn regex(&mut self, regex: regex::Regex) { self.validators.push(Box::new(regex)); } pub fn validate(&mut self, validator: Box<dyn validators::Validator + 'static + Send + Sync>) { self.validators.push(validator); } pub fn validate_with<F>(&mut self, validator: F) where F: Fn(&Value, &str) -> super::validators::ValidatorResult + 'static + Send + Sync, { self.validators.push(Box::new(validator)); } fn process_validators(&self, val: &Value, path: &str) -> super::super::ValicoErrors { let mut errors = vec![]; for validator in self.validators.iter() { match validator.validate(val, path) { Ok(()) => (), Err(validation_errors) => errors.extend(validation_errors), } } errors } pub fn process( &self, val: &mut Value, path: &str, scope: Option<&json_schema::Scope>, ) -> super::ExtendedResult<Option<Value>> { if val.is_null() && self.allow_null { return super::ExtendedResult::new(None); } let mut result = super::ExtendedResult::new(None); let mut return_value = None; { let val = if self.coercer.is_some() { match self.coercer.as_ref().unwrap().coerce(val, path) { Ok(None) => val, Ok(Some(new_value)) => { return_value = Some(new_value); return_value.as_mut().unwrap() } Err(errors) => { result.state.errors.extend(errors); return result; } } } else { val }; if self.nest.is_some() { let process_state = self.nest.as_ref().unwrap().process_nest(val, path, scope); result.append(process_state); } let validation_errors = self.process_validators(val, path); result.state.errors.extend(validation_errors); if let Some(ref id) = self.schema_id { if let Some(scope) = scope { let schema = scope.resolve(id); match schema { Some(schema) => result.append(schema.validate_in(val, path)), None => result.state.missing.push(id.clone()), } } } } if return_value.is_some() { result.value = return_value; } result } } impl Param { pub fn allow_values<T: Serialize>(&mut self, values: &[T]) { self.validators .push(Box::new(validators::AllowedValues::new( values.iter().map(|v| to_value(v).unwrap()).collect(), ))); } pub fn reject_values<T: Serialize>(&mut self, values: &[T]) { self.validators .push(Box::new(validators::RejectedValues::new( values.iter().map(|v| to_value(v).unwrap()).collect(), ))); } pub fn default<T: Serialize>(&mut self, default: T) { self.default = Some(to_value(&default).unwrap()); } }
use serde::Serialize; use serde_json::{to_value, Value}; use super::super::json_schema; use super::builder; use super::coercers; use super::validators; pub struct Param { pub name: String, pub coercer: Option<Box<dyn coercers::Coercer + Send + Sync>>, pub nest: Option<builder::Builder>, pub description: Option<String>, pub allow_null: bool, pub validators: validators::Validators, pub default: Option<Value>, pub schema_builder: Option<Box<dyn Fn(&mut json_schema::Builder) + Send + Sync>>, pub schema_id: Option<url::Url>, } unsafe impl Send for Param {} impl Param {
pub fn new_with_coercer( name: &str, coercer: Box<dyn coercers::Coercer + Send + Sync>, ) -> Param { Param { name: name.to_string(), description: None, coercer: Some(coercer), nest: None, allow_null: false, validators: vec![], default: None, schema_builder: None, schema_id: None, } } pub fn new_with_nest( name: &str, coercer: Box<dyn coercers::Coercer + Send + Sync>, nest: builder::Builder, ) -> Param { Param { name: name.to_string(), description: None, coercer: Some(coercer), nest: Some(nest), allow_null: false, validators: vec![], default: None, schema_builder: None, schema_id: None, } } pub fn build<F>(name: &str, build_def: F) -> Param where F: FnOnce(&mut Param), { let mut param = Param::new(name); build_def(&mut param); param } pub fn desc(&mut self, description: &str) { self.description = Some(description.to_string()); } pub fn schema_id(&mut self, id: url::Url) { self.schema_id = Some(id); } pub fn schema<F>(&mut self, build: F) where F: Fn(&mut json_schema::Builder) + 'static + Send + Sync, { self.schema_builder = Some(Box::new(build)); } pub fn coerce(&mut self, coercer: Box<dyn coercers::Coercer + Send + Sync>) { self.coercer = Some(coercer); } pub fn nest<F>(&mut self, nest_def: F) where F: FnOnce(&mut builder::Builder) -> (), { self.nest = Some(builder::Builder::build(nest_def)); } pub fn allow_null(&mut self) { self.allow_null = true; } pub fn regex(&mut self, regex: regex::Regex) { self.validators.push(Box::new(regex)); } pub fn validate(&mut self, validator: Box<dyn validators::Validator + 'static + Send + Sync>) { self.validators.push(validator); } pub fn validate_with<F>(&mut self, validator: F) where F: Fn(&Value, &str) -> super::validators::ValidatorResult + 'static + Send + Sync, { self.validators.push(Box::new(validator)); } fn process_validators(&self, val: &Value, path: &str) -> super::super::ValicoErrors { let mut errors = vec![]; for validator in self.validators.iter() { match validator.validate(val, path) { Ok(()) => (), Err(validation_errors) => errors.extend(validation_errors), } } errors } pub fn process( &self, val: &mut Value, path: &str, scope: Option<&json_schema::Scope>, ) -> super::ExtendedResult<Option<Value>> { if val.is_null() && self.allow_null { return super::ExtendedResult::new(None); } let mut result = super::ExtendedResult::new(None); let mut return_value = None; { let val = if self.coercer.is_some() { match self.coercer.as_ref().unwrap().coerce(val, path) { Ok(None) => val, Ok(Some(new_value)) => { return_value = Some(new_value); return_value.as_mut().unwrap() } Err(errors) => { result.state.errors.extend(errors); return result; } } } else { val }; if self.nest.is_some() { let process_state = self.nest.as_ref().unwrap().process_nest(val, path, scope); result.append(process_state); } let validation_errors = self.process_validators(val, path); result.state.errors.extend(validation_errors); if let Some(ref id) = self.schema_id { if let Some(scope) = scope { let schema = scope.resolve(id); match schema { Some(schema) => result.append(schema.validate_in(val, path)), None => result.state.missing.push(id.clone()), } } } } if return_value.is_some() { result.value = return_value; } result } } impl Param { pub fn allow_values<T: Serialize>(&mut self, values: &[T]) { self.validators .push(Box::new(validators::AllowedValues::new( values.iter().map(|v| to_value(v).unwrap()).collect(), ))); } pub fn reject_values<T: Serialize>(&mut self, values: &[T]) { self.validators .push(Box::new(validators::RejectedValues::new( values.iter().map(|v| to_value(v).unwrap()).collect(), ))); } pub fn default<T: Serialize>(&mut self, default: T) { self.default = Some(to_value(&default).unwrap()); } }
pub fn new(name: &str) -> Param { Param { name: name.to_string(), description: None, coercer: None, nest: None, allow_null: false, validators: vec![], default: None, schema_builder: None, schema_id: None, } }
function_block-full_function
[ { "content": "pub fn string() -> Box<dyn coercers::Coercer + Send + Sync> {\n\n Box::new(coercers::StringCoercer)\n\n}\n", "file_path": "src/json_dsl/mod.rs", "rank": 0, "score": 279433.43672192114 }, { "content": "pub trait Coercer: Send + Sync {\n\n fn get_primitive_type(&self) -> Pr...
Rust
day7/src/main.rs
monkeydom/adventofcode-2020-rust
9b9105fabc51b9c793fae59528ac593f54524e11
#[allow(dead_code)] mod aoc; #[allow(dead_code)] mod file; use std::collections::HashMap; use std::collections::HashSet; use std::fmt; fn main() { aoc::preamble(); part2(); } #[derive(Debug)] struct BagContents { bt: BagType, contents: Vec<BagType>, } impl BagContents { fn from_strings(strings: &[&str]) -> Self { let bt = BagType::from_strings(&strings[..2]); let mut contents: Vec<BagType> = vec![]; for index in (4..strings.len()).step_by(4) { if let Ok(count) = strings[index].parse() { let bagtype = BagType::from_strings(&strings[index + 1..index + 3]); contents.extend((0..count).map(|_| bagtype.clone())); } } BagContents { bt, contents } } } #[derive(Debug, PartialEq, Eq, Clone, Hash)] struct BagType { attribute: String, color: String, } impl fmt::Display for BagType { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "[{} {}]", self.attribute, self.color) } } impl BagType { fn from_strings(strings: &[&str]) -> Self { BagType { attribute: strings[0].to_string(), color: strings[1].to_string(), } } } fn parse_rule(line: String) -> BagContents { let tokens: Vec<&str> = line.split(" ").collect(); let bag_contents = BagContents::from_strings(&tokens); println!("{}\n{:?}\n", line, &bag_contents); bag_contents } fn part1() { let contents: Vec<BagContents> = file::lines().map(|l| parse_rule(l)).collect(); let innermost_bag = BagType::from_strings(&["shiny", "gold"]); let mut hash_set: HashSet<BagType> = HashSet::new(); hash_set.insert(innermost_bag); loop { let mut collect_set: HashSet<BagType> = HashSet::new(); for bc in contents.iter() { for inner in bc.contents.iter() { if hash_set.contains(inner) { collect_set.insert(bc.bt.clone()); } } } for bt in &hash_set { collect_set.insert(bt.clone()); } if collect_set.len() > hash_set.len() { hash_set = collect_set; } else { break; } } let result = hash_set.len() - 1; aoc::print_solution1(format!("{}", result).as_str()); } fn part2() { let result = "None Yet"; let contents: Vec<BagContents> = file::lines().map(|l| parse_rule(l)).collect(); let mut containment_count = HashMap::new(); let gold_bag = BagType::from_strings(&["shiny", "gold"]); let mut cc = &mut containment_count; let mut update = |mcc: &mut HashMap<BagType, i64>, bt: &BagType, count: i64| { mcc.insert(bt.clone(), mcc.get(bt).unwrap_or(&0) + count); }; for bc in contents.iter() { if bc.contents.len() == 0 { println!("LEN 0 ! {:?}", bc); update(&mut cc, &bc.bt, 0); } } let mut iteration = 1; loop { let mut unknown_count = 0; let mut updated_some = false; for bc in contents.iter() { if let None = cc.get(&bc.bt) { if bc.contents.iter().all(|bt| cc.get(&bt).is_some()) { updated_some = true; let value = bc .contents .iter() .fold(0, |acc, v| acc + cc.get(v).unwrap() + 1); println!("{:?} -> updating with value {}", &bc.bt, value); update(cc, &bc.bt, value); } else { println!("unknown {:?}", &bc.bt); unknown_count += 1; } } } if !updated_some { break; } println!( "====== {} iteration [uk: {}] ======", iteration, unknown_count ); iteration += 1; } let result = containment_count.get(&gold_bag); aoc::print_solution2(format!("{:?} ", result).as_str()); } #[cfg(test)] mod tests { use super::*; const TEST_INPUT_1: &str = "light red bags contain 1 bright white bag, 2 muted yellow bags. dark orange bags contain 3 bright white bags, 4 muted yellow bags. bright white bags contain 1 shiny gold bag. muted yellow bags contain 2 shiny gold bags, 9 faded blue bags. shiny gold bags contain 1 dark olive bag, 2 vibrant plum bags. dark olive bags contain 3 faded blue bags, 4 dotted black bags. vibrant plum bags contain 5 faded blue bags, 6 dotted black bags. faded blue bags contain no other bags. dotted black bags contain no other bags." #[test] fn test_parse_lines() { } }
#[allow(dead_code)] mod aoc; #[allow(dead_code)] mod file; use std::collections::HashMap; use std::collections::HashSet; use std::fmt; fn main() { aoc::preamble(); part2(); } #[derive(Debug)] struct BagContents { bt: BagType, contents: Vec<BagType>, } impl BagContents { fn from_strings(strings: &[&str]) -> Self { let bt = BagType::from_strings(&strings[..2]); let mut contents: Vec<BagType> = vec![]; for index in (4..strings.len()).step_by(4) { if let Ok(count) = strings[index].parse() { let bagtype = BagType::from_strings(&strings[index + 1..index + 3]); contents.extend((0..count).map(|_| bagtype.clone())); } } BagContents { bt, contents } } } #[derive(Debug, PartialEq, Eq, Clone, Hash)] struct BagType { attribute: String, color: String, } impl fmt::Display for BagType { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "[{} {}]", self.attribute, s
(|bt| cc.get(&bt).is_some()) { updated_some = true; let value = bc .contents .iter() .fold(0, |acc, v| acc + cc.get(v).unwrap() + 1); println!("{:?} -> updating with value {}", &bc.bt, value); update(cc, &bc.bt, value); } else { println!("unknown {:?}", &bc.bt); unknown_count += 1; } } } if !updated_some { break; } println!( "====== {} iteration [uk: {}] ======", iteration, unknown_count ); iteration += 1; } let result = containment_count.get(&gold_bag); aoc::print_solution2(format!("{:?} ", result).as_str()); } #[cfg(test)] mod tests { use super::*; const TEST_INPUT_1: &str = "light red bags contain 1 bright white bag, 2 muted yellow bags. dark orange bags contain 3 bright white bags, 4 muted yellow bags. bright white bags contain 1 shiny gold bag. muted yellow bags contain 2 shiny gold bags, 9 faded blue bags. shiny gold bags contain 1 dark olive bag, 2 vibrant plum bags. dark olive bags contain 3 faded blue bags, 4 dotted black bags. vibrant plum bags contain 5 faded blue bags, 6 dotted black bags. faded blue bags contain no other bags. dotted black bags contain no other bags." #[test] fn test_parse_lines() { } }
elf.color) } } impl BagType { fn from_strings(strings: &[&str]) -> Self { BagType { attribute: strings[0].to_string(), color: strings[1].to_string(), } } } fn parse_rule(line: String) -> BagContents { let tokens: Vec<&str> = line.split(" ").collect(); let bag_contents = BagContents::from_strings(&tokens); println!("{}\n{:?}\n", line, &bag_contents); bag_contents } fn part1() { let contents: Vec<BagContents> = file::lines().map(|l| parse_rule(l)).collect(); let innermost_bag = BagType::from_strings(&["shiny", "gold"]); let mut hash_set: HashSet<BagType> = HashSet::new(); hash_set.insert(innermost_bag); loop { let mut collect_set: HashSet<BagType> = HashSet::new(); for bc in contents.iter() { for inner in bc.contents.iter() { if hash_set.contains(inner) { collect_set.insert(bc.bt.clone()); } } } for bt in &hash_set { collect_set.insert(bt.clone()); } if collect_set.len() > hash_set.len() { hash_set = collect_set; } else { break; } } let result = hash_set.len() - 1; aoc::print_solution1(format!("{}", result).as_str()); } fn part2() { let result = "None Yet"; let contents: Vec<BagContents> = file::lines().map(|l| parse_rule(l)).collect(); let mut containment_count = HashMap::new(); let gold_bag = BagType::from_strings(&["shiny", "gold"]); let mut cc = &mut containment_count; let mut update = |mcc: &mut HashMap<BagType, i64>, bt: &BagType, count: i64| { mcc.insert(bt.clone(), mcc.get(bt).unwrap_or(&0) + count); }; for bc in contents.iter() { if bc.contents.len() == 0 { println!("LEN 0 ! {:?}", bc); update(&mut cc, &bc.bt, 0); } } let mut iteration = 1; loop { let mut unknown_count = 0; let mut updated_some = false; for bc in contents.iter() { if let None = cc.get(&bc.bt) { if bc.contents.iter().all
random
[ { "content": "fn process_lines(lines: impl Iterator<Item = String>) -> (Vec<Field>, Vec<i64>, Vec<Vec<i64>>) {\n\n let mut my_ticket: Vec<i64> = vec![];\n\n let mut nearby_tickets: Vec<Vec<i64>> = vec![];\n\n let mut fields: Vec<Field> = vec![];\n\n let mut state = 0;\n\n for line in lines {\n\n ...
Rust
fortress/src/lib/enemies/state/enemy_state_machine.rs
j-rock/fortress
23b71bbd75afe75370b59e2117893f1023142c17
use crate::{ audio::{ AudioPlayer, Sound, }, dimensions::{ Attack, Reverse, time::{ DeltaTime, Microseconds, } }, enemies::{ DamageTextWriter, EnemySystemConfig, EnemyConfig, EnemyState, state::EnemyBody, }, items::{ ItemPickup, ItemSystem, ItemType, types::SkullType, }, particles::{ ParticleEvent, ParticleSystem, }, physics::PhysicsSimulation, render::{ LightDependentSpriteData, LightDependentSpriteRenderer, NamedSpriteSheet, SpriteSheetFrameId, }, }; use glm; use nalgebra::{ Point2, Vector2, }; pub enum EnemyStateMachine { Base(EnemyBody, Microseconds), Dying(Option<Point2<f64>>, Microseconds), Dead } impl EnemyStateMachine { pub fn new(body: EnemyBody) -> Self { Self::Base(body, 0) } pub fn pre_update(&mut self, config: &EnemyConfig, dt: DeltaTime, player_locs: &Vec<Point2<f64>>, enemy_state: &mut EnemyState) -> Option<Self> { match self { Self::Base(body, time_elapsed) => { *time_elapsed += dt.as_microseconds(); body.move_to_target(config, player_locs); if let Some(direction) = body.velocity() { enemy_state.set_facing_dir(direction); } }, Self::Dying(_, time_elapsed) => { *time_elapsed += dt.as_microseconds(); }, _ => {}, } None } pub fn take_attack(&self, config: &EnemySystemConfig, attack: Attack, bullet_direction: Option<Vector2<f64>>, enemy_state: &mut EnemyState, particles: &mut ParticleSystem, damage_text: &mut DamageTextWriter) { if let Self::Base(body, _) = self { let damage = attack.damage; enemy_state.take_attack(attack); if let Some(position) = body.position() { let blood_color = glm::vec3(config.enemy.blood_color.0, config.enemy.blood_color.1, config.enemy.blood_color.2); let blood_event = ParticleEvent::blood(position.clone(), blood_color, config.enemy.num_blood_particles_per_hit); particles.queue_event(blood_event); damage_text.add_damage(&config.damage_text, damage, position, bullet_direction); } } } pub fn post_update(&mut self, config: &EnemyConfig, audio: &AudioPlayer, enemy_state: &EnemyState, items: &mut ItemSystem, physics_sim: &mut PhysicsSimulation) -> Option<Self> { match self { Self::Base(body, _) if !enemy_state.health().alive() => { audio.play_sound(Sound::EnemyKilled); let position = body.position(); Some(Self::Dying(position, 0)) }, Self::Dying(position, time_elapsed) if *time_elapsed >= config.dying_duration_micros => { if let Some(position) = position { let item_pickup = ItemPickup::new(ItemType::Skull(SkullType::Regular), enemy_state.facing_dir()); items.spawn_item(item_pickup, position.clone(), physics_sim); } Some(Self::Dead) }, _ => None } } pub fn queue_draw(&self, config: &EnemyConfig, enemy_state: &EnemyState, sprite_renderer: &mut LightDependentSpriteRenderer) { let image_name = match self { Self::Dying(_, _) => String::from("enemy1_dying.png"), _ => String::from("enemy1.png") }; let frame = match self { Self::Base(_, time_elapsed) => (*time_elapsed / config.walk_frame_duration_micros) as usize, Self::Dying(_, time_elapsed) => (*time_elapsed / config.dying_frame_duration_micros) as usize, _ => 0, }; let reverse = if enemy_state.facing_dir().is_left() { Reverse::none() } else { Reverse::horizontally() }; if let Some(position) = self.position() { let world_half_size = glm::vec2(config.physical_radius as f32, config.physical_radius as f32) * config.render_scale; let world_center_position = glm::vec3(position.x as f32, world_half_size.y, -position.y as f32); sprite_renderer.queue(LightDependentSpriteData { world_center_position, world_half_size, sprite_frame_id: SpriteSheetFrameId::new(image_name, NamedSpriteSheet::SpriteSheet1), frame, unit_world_rotation: Vector2::new(0.0, 0.0), reverse, }); } } pub fn dead(&self) -> bool { match self { Self::Dead => true, _ => false, } } fn position(&self) -> Option<Point2<f64>> { match self { Self::Base(body, _) => body.position(), Self::Dying(position, _) => *position, _ => None } } }
use crate::{ audio::{ AudioPlayer, Sound, }, dimensions::{ Attack, Reverse, time::{ DeltaTime, Microseconds, } }, enemies::{ DamageTextWriter, EnemySystemConfig, EnemyConfig, EnemyState, state::EnemyBody, }, items::{ ItemPickup, ItemSystem, ItemType, types::SkullType, }, particles::{ ParticleEvent, ParticleSystem, }, physics::PhysicsSimulation, render::{ LightDependentSpriteData, LightDependentSpriteRenderer, NamedSpriteSheet, SpriteSheetFrameId, }, }; use glm; use nalgebra::{ Point2, Vector2, }; pub enum EnemyStateMachine { Base(EnemyBody, Microseconds), Dying(Option<Point2<f64>>, Microseconds), Dead } impl EnemyStateMachine { pub fn new(body: EnemyBody) -> Self { Self::Base(body, 0) } pub fn pre_update(&mut self, config: &EnemyConfig, dt: DeltaTime, player_locs: &Vec<Point2<f64>>, enemy_state: &mut EnemyState) -> Option<Self> { match self { Self::Base(body, time_elapsed) => { *time_elapsed += dt.as_microseconds(); body.move_to_target(config, player_locs); if let Some(direction) = body.velocity() { enemy_state.set_facing_dir(direction); } }, Self::Dying(_, time_elapsed) => { *time_elapsed += dt.as_microseconds(); }, _ => {}, } None } pub fn take_attack(&self, config: &EnemySystemConfig, attack: Attack, bullet_direction: Option<Vector2<f64>>, enemy_state: &mut EnemyState, particles: &mut ParticleSystem, damage_text: &mut DamageTextWriter) { if let Self::Base(body, _) = self { let damage = attack.damage; enemy_state.take_attack(attack); if let Some(position) = body.position() { let blood_color = glm::vec3(config.enemy.blood_color.0, config.enemy.blood_color.1, config.enemy.blood_color.2); let blood_event = ParticleEvent::blood(position.clone(), blood_color, config.enemy.num_blood_particles_per_hit); particles.queue_event(blood_event); damage_text.add_damage(&config.damage_text, damage, position, bullet_direction); } } }
let image_name = match self { Self::Dying(_, _) => String::from("enemy1_dying.png"), _ => String::from("enemy1.png") }; let frame = match self { Self::Base(_, time_elapsed) => (*time_elapsed / config.walk_frame_duration_micros) as usize, Self::Dying(_, time_elapsed) => (*time_elapsed / config.dying_frame_duration_micros) as usize, _ => 0, }; let reverse = if enemy_state.facing_dir().is_left() { Reverse::none() } else { Reverse::horizontally() }; if let Some(position) = self.position() { let world_half_size = glm::vec2(config.physical_radius as f32, config.physical_radius as f32) * config.render_scale; let world_center_position = glm::vec3(position.x as f32, world_half_size.y, -position.y as f32); sprite_renderer.queue(LightDependentSpriteData { world_center_position, world_half_size, sprite_frame_id: SpriteSheetFrameId::new(image_name, NamedSpriteSheet::SpriteSheet1), frame, unit_world_rotation: Vector2::new(0.0, 0.0), reverse, }); } } pub fn dead(&self) -> bool { match self { Self::Dead => true, _ => false, } } fn position(&self) -> Option<Point2<f64>> { match self { Self::Base(body, _) => body.position(), Self::Dying(position, _) => *position, _ => None } } }
pub fn post_update(&mut self, config: &EnemyConfig, audio: &AudioPlayer, enemy_state: &EnemyState, items: &mut ItemSystem, physics_sim: &mut PhysicsSimulation) -> Option<Self> { match self { Self::Base(body, _) if !enemy_state.health().alive() => { audio.play_sound(Sound::EnemyKilled); let position = body.position(); Some(Self::Dying(position, 0)) }, Self::Dying(position, time_elapsed) if *time_elapsed >= config.dying_duration_micros => { if let Some(position) = position { let item_pickup = ItemPickup::new(ItemType::Skull(SkullType::Regular), enemy_state.facing_dir()); items.spawn_item(item_pickup, position.clone(), physics_sim); } Some(Self::Dead) }, _ => None } } pub fn queue_draw(&self, config: &EnemyConfig, enemy_state: &EnemyState, sprite_renderer: &mut LightDependentSpriteRenderer) {
random
[ { "content": "pub fn milliseconds(t: i64) -> Microseconds {\n\n t * 1000\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub struct DeltaTime {\n\n microseconds_elapsed: Microseconds\n\n}\n\n\n\nimpl DeltaTime {\n\n fn duration_to_microseconds(duration: Duration) -> Microseconds {\n\n let nanos = Microsec...
Rust
examples/box_game/box_game_p2p.rs
johanhelsing/ggrs
6d1b7d6112692619a8638646cf5229a085bc3986
extern crate freetype as ft; use ggrs::{GGRSEvent, PlayerType, SessionState}; use glutin_window::GlutinWindow as Window; use opengl_graphics::{GlGraphics, OpenGL}; use piston::event_loop::{EventSettings, Events}; use piston::input::{RenderEvent, UpdateEvent}; use piston::window::WindowSettings; use piston::{Button, EventLoop, IdleEvent, Key, PressEvent, ReleaseEvent}; use std::net::SocketAddr; use structopt::StructOpt; const FPS: u64 = 60; const INPUT_SIZE: usize = std::mem::size_of::<u8>(); const WINDOW_HEIGHT: u32 = 800; const WINDOW_WIDTH: u32 = 600; mod box_game; #[derive(StructOpt)] struct Opt { #[structopt(short, long)] local_port: u16, #[structopt(short, long)] players: Vec<String>, #[structopt(short, long)] spectators: Vec<SocketAddr>, } fn main() -> Result<(), Box<dyn std::error::Error>> { let opt = Opt::from_args(); let mut local_handle = 0; let num_players = opt.players.len(); assert!(num_players > 0); let mut sess = ggrs::start_p2p_session(num_players as u32, INPUT_SIZE, opt.local_port)?; sess.set_sparse_saving(true)?; for (i, player_addr) in opt.players.iter().enumerate() { if player_addr == "localhost" { sess.add_player(PlayerType::Local, i)?; local_handle = i; } else { let remote_addr: SocketAddr = player_addr.parse()?; sess.add_player(PlayerType::Remote(remote_addr), i)?; } } for (i, spec_addr) in opt.spectators.iter().enumerate() { sess.add_player(PlayerType::Spectator(*spec_addr), num_players + i)?; } sess.set_frame_delay(4, local_handle)?; sess.set_fps(FPS as u32)?; sess.start_session()?; let opengl = OpenGL::V3_2; let mut window: Window = WindowSettings::new("Box Game", [WINDOW_WIDTH, WINDOW_HEIGHT]) .graphics_api(opengl) .exit_on_esc(true) .build() .unwrap(); let mut game = box_game::BoxGame::new(num_players); let mut gl = GlGraphics::new(opengl); let mut event_settings = EventSettings::new(); event_settings.set_ups(FPS); event_settings.set_max_fps(FPS); let mut events = Events::new(event_settings); let mut frames_to_skip = 0; while let Some(e) = events.next(&mut window) { if let Some(args) = e.render_args() { game.render(&mut gl, &args); } if let Some(_) = e.update_args() { if frames_to_skip > 0 { frames_to_skip -= 1; println!("Skipping a frame: WaitRecommendation"); continue; } if sess.current_state() == SessionState::Running { let local_input = game.local_input(0); match sess.advance_frame(local_handle, &local_input) { Ok(requests) => game.handle_requests(requests), Err(ggrs::GGRSError::PredictionThreshold) => { } Err(e) => return Err(Box::new(e)), } if game.current_frame() % 120 == 0 { for i in 0..num_players { if let Ok(stats) = sess.network_stats(i) { println!("NetworkStats to player {}: {:?}", i, stats); } } } } } if let Some(_args) = e.idle_args() { sess.poll_remote_clients(); for event in sess.events() { if let GGRSEvent::WaitRecommendation { skip_frames } = event { frames_to_skip += skip_frames } println!("Event: {:?}", event); } } if let Some(Button::Keyboard(key)) = e.press_args() { match key { Key::W => game.key_states[0] = true, Key::A => game.key_states[1] = true, Key::S => game.key_states[2] = true, Key::D => game.key_states[3] = true, _ => (), } } if let Some(Button::Keyboard(key)) = e.release_args() { match key { Key::W => game.key_states[0] = false, Key::A => game.key_states[1] = false, Key::S => game.key_states[2] = false, Key::D => game.key_states[3] = false, _ => (), } } } Ok(()) }
extern crate freetype as ft; use ggrs::{GGRSEvent, PlayerType, SessionState}; use glutin_window::GlutinWindow as Window; use opengl_graphics::{GlGraphics, OpenGL}; use piston::event_loop::{EventSettings, Events}; use piston::input::{RenderEvent, UpdateEvent}; use piston::window::WindowSettings; use piston::{Button, EventLoop, IdleEvent, Key, PressEvent, ReleaseEvent}; use std::net::SocketAddr; use structopt::StructOpt; const FPS: u64 = 60; const INPUT_SIZE: usize = std::mem::size_of::<u8>(); const WINDOW_HEIGHT: u32 = 800; const WINDOW_WIDTH: u32 = 600; mod box_game; #[derive(StructOpt)] struct Opt { #[structopt(short, long)] local_port: u16, #[structopt(short, long)] players: Vec<String>, #[structopt(short, long)] spectators: Vec<SocketAddr>, } fn main() -> Result<(), Box<dyn std::error::Error>> { let opt = Opt::from_args(); let mut local_handle = 0; let num_players = opt.players.len(); assert!(num_players > 0); let mut sess = ggrs::start_p2p_session(num_players as u32, INPUT_SIZE, opt.local_port)?; sess.set_sparse_saving(true)?; for (i, player_addr) in opt.players.iter().enumerate() { if player_addr == "localhost" { sess.add_player(PlayerType::Local, i)?; local_handle = i; } else { let remote_addr: SocketAddr = player_addr.parse()?; sess.add_player(PlayerType::Remote(remote_addr), i)?; } } for (i, spec_addr) in opt.spectators.iter().enumerate() { sess.add_player(PlayerType::Spectator(*spec_addr), num_players + i)?; } sess.set_frame_delay(4, local_handle)?; sess.set_fps(FPS as u32)?; sess.start_session()?; let opengl = OpenGL::V3_2; let mut window: Window = WindowSettings::new("Box Game", [WINDOW_WIDTH, WINDOW_HEIGHT]) .graphics_api(opengl) .exit_on_esc(true) .build() .unwrap(); let mut game = box_game::BoxGame::new(num_players); let mut gl = GlGraphics::new(opengl);
let mut event_settings = EventSettings::new(); event_settings.set_ups(FPS); event_settings.set_max_fps(FPS); let mut events = Events::new(event_settings); let mut frames_to_skip = 0; while let Some(e) = events.next(&mut window) { if let Some(args) = e.render_args() { game.render(&mut gl, &args); } if let Some(_) = e.update_args() { if frames_to_skip > 0 { frames_to_skip -= 1; println!("Skipping a frame: WaitRecommendation"); continue; } if sess.current_state() == SessionState::Running { let local_input = game.local_input(0); match sess.advance_frame(local_handle, &local_input) { Ok(requests) => game.handle_requests(requests), Err(ggrs::GGRSError::PredictionThreshold) => { } Err(e) => return Err(Box::new(e)), } if game.current_frame() % 120 == 0 { for i in 0..num_players { if let Ok(stats) = sess.network_stats(i) { println!("NetworkStats to player {}: {:?}", i, stats); } } } } } if let Some(_args) = e.idle_args() { sess.poll_remote_clients(); for event in sess.events() { if let GGRSEvent::WaitRecommendation { skip_frames } = event { frames_to_skip += skip_frames } println!("Event: {:?}", event); } } if let Some(Button::Keyboard(key)) = e.press_args() { match key { Key::W => game.key_states[0] = true, Key::A => game.key_states[1] = true, Key::S => game.key_states[2] = true, Key::D => game.key_states[3] = true, _ => (), } } if let Some(Button::Keyboard(key)) = e.release_args() { match key { Key::W => game.key_states[0] = false, Key::A => game.key_states[1] = false, Key::S => game.key_states[2] = false, Key::D => game.key_states[3] = false, _ => (), } } } Ok(()) }
function_block-function_prefix_line
[ { "content": "#[derive(StructOpt)]\n\nstruct Opt {\n\n #[structopt(short, long)]\n\n local_port: u16,\n\n #[structopt(short, long)]\n\n num_players: usize,\n\n #[structopt(short, long)]\n\n host: SocketAddr,\n\n}\n\n\n", "file_path": "examples/box_game/box_game_spectator.rs", "rank": 0...
Rust
src/tcp.rs
tearust/natsclient
d41211cb35e0a4fcec5fa0e45a07fbce494655e4
use crate::protocol::{ProtocolHandler, ProtocolMessage, ServerInfo}; use crate::ClientOptions; use crate::Result; use crossbeam_channel::{Receiver, Sender}; use nats_types::DeliveredMessage; use std::io::Read; use std::sync::{Arc, RwLock}; use std::thread; use std::{ io::{BufRead, BufReader, Write}, net::TcpStream, str::FromStr, }; #[derive(Clone)] pub(crate) struct TcpClient { connect_urls: Arc<RwLock<Vec<ServerInfo>>>, delivery_sender: Sender<DeliveredMessage>, write_sender: Sender<Vec<u8>>, write_receiver: Receiver<Vec<u8>>, opts: ClientOptions, connlatch: Sender<bool>, } impl TcpClient { pub fn new( connect_urls: Vec<ServerInfo>, delivery_sender: Sender<DeliveredMessage>, write_sender: Sender<Vec<u8>>, write_receiver: Receiver<Vec<u8>>, opts: ClientOptions, connlatch: Sender<bool>, ) -> TcpClient { TcpClient { connect_urls: Arc::new(RwLock::new(connect_urls)), delivery_sender, write_sender, write_receiver, opts, connlatch, } } pub fn connect(&self) -> Result<()> { let stream_reader = { let urls = self.connect_urls.read().unwrap(); Self::connect_to_host(&urls)? }; let mut stream_writer = stream_reader.try_clone()?; let mut buf_reader = BufReader::new(stream_reader); let ph = ProtocolHandler::new(self.opts.clone(), self.delivery_sender.clone()); let write_sender = self.write_sender.clone(); let write_receiver = self.write_receiver.clone(); let connlatch = self.connlatch.clone(); thread::spawn(move || { let mut line = String::new(); loop { match buf_reader.read_line(&mut line) { Ok(line_len) if line_len > 0 => { let pm = if line.starts_with("MSG") { let msgheader = nats_types::parse_msg_header(&line).unwrap(); let mut buffer = vec![0; msgheader.message_len]; buf_reader.read_exact(&mut buffer).unwrap(); buf_reader.read_line(&mut line).unwrap(); ProtocolMessage::Message(DeliveredMessage { reply_to: msgheader.reply_to, payload_size: msgheader.message_len, payload: buffer, subject: msgheader.subject, subscription_id: msgheader.sid, }) } else { ProtocolMessage::from_str(&line).unwrap() }; line.clear(); ph.handle_protocol_message(&pm, &write_sender).unwrap(); } Ok(_) => {} Err(e) => { error!("Error receiving data: {}", e); } } } }); thread::spawn(move || { loop { let vec = write_receiver.recv().unwrap(); match stream_writer.write_all(&vec) { Ok(_) => { trace!("SEND {} bytes", vec.len()); if starts_with(&vec, b"CONNECT") { connlatch.send(true).unwrap(); } } Err(e) => error!("Failed to write buffer: {}", e), }; } }); Ok(()) } fn connect_to_host(servers: &[ServerInfo]) -> Result<TcpStream> { for si in servers { debug!("Attempting to connect to {}:{}", si.host, si.port); let stream = TcpStream::connect((si.host.as_ref(), si.port)); match stream { Ok(s) => return Ok(s), Err(e) => { error!("Failed to establish TCP connection: {}", e); continue; } } } Err(err!(IOError, "Failed to establish TCP connection")) } } fn starts_with(haystack: &[u8], needle: &[u8]) -> bool { let pos = haystack .windows(needle.len()) .position(|window| window == needle); if let Some(p) = pos { p == 0 } else { false } }
use crate::protocol::{ProtocolHandler, ProtocolMessage, ServerInfo}; use crate::ClientOptions; use crate::Result; use crossbeam_channel::{Receiver, Sender}; use nats_types::DeliveredMessage; use std::io::Read; use std::sync::{Arc, RwLock}; use std::thread; use std::{ io::{BufRead, BufReader, Write}, net::TcpStream, str::FromStr, }; #[derive(Clone)] pub(crate) struct TcpClient { connect_urls: Arc<RwLock<Vec<ServerInfo>>>, delivery_sender: Sender<DeliveredMessage>, write_sender: Sender<Vec<u8>>, write_receiver: Receiver<Vec<u8>>, opts: ClientOptions, connlatch: Sender<bool>, } impl TcpClient { pub fn new( connect_urls: Vec<ServerInfo>, delivery_sender: Sender<DeliveredMessage>, write_sender: Sender<Vec<u8>>, write_receiver: Receiver<Vec<u8>>, opts: ClientOptions, connlatch: Sender<bool>, ) -> TcpClient { TcpClient { connect_urls: Arc::new(RwLock::new(connect_urls)), delivery_sender, write_sender, write_receiver, opts, connlatch, } } pub fn connect(&self) -> Result<()> { let stream_reader = { let urls = self.connect_urls.read().unwrap(); Self::connect_to_host(&urls)? }; let mut stream_writer = stream_reader.try_clone()?; let mut buf_reader = BufReader::new(stream_reader); let ph = ProtocolHandler::new(self.opts.clone(), self.delivery_sender.clone()); let write_sender = self.write_sender.clone(); let write_receiver = self.write_receiver.clone();
fn connect_to_host(servers: &[ServerInfo]) -> Result<TcpStream> { for si in servers { debug!("Attempting to connect to {}:{}", si.host, si.port); let stream = TcpStream::connect((si.host.as_ref(), si.port)); match stream { Ok(s) => return Ok(s), Err(e) => { error!("Failed to establish TCP connection: {}", e); continue; } } } Err(err!(IOError, "Failed to establish TCP connection")) } } fn starts_with(haystack: &[u8], needle: &[u8]) -> bool { let pos = haystack .windows(needle.len()) .position(|window| window == needle); if let Some(p) = pos { p == 0 } else { false } }
let connlatch = self.connlatch.clone(); thread::spawn(move || { let mut line = String::new(); loop { match buf_reader.read_line(&mut line) { Ok(line_len) if line_len > 0 => { let pm = if line.starts_with("MSG") { let msgheader = nats_types::parse_msg_header(&line).unwrap(); let mut buffer = vec![0; msgheader.message_len]; buf_reader.read_exact(&mut buffer).unwrap(); buf_reader.read_line(&mut line).unwrap(); ProtocolMessage::Message(DeliveredMessage { reply_to: msgheader.reply_to, payload_size: msgheader.message_len, payload: buffer, subject: msgheader.subject, subscription_id: msgheader.sid, }) } else { ProtocolMessage::from_str(&line).unwrap() }; line.clear(); ph.handle_protocol_message(&pm, &write_sender).unwrap(); } Ok(_) => {} Err(e) => { error!("Error receiving data: {}", e); } } } }); thread::spawn(move || { loop { let vec = write_receiver.recv().unwrap(); match stream_writer.write_all(&vec) { Ok(_) => { trace!("SEND {} bytes", vec.len()); if starts_with(&vec, b"CONNECT") { connlatch.send(true).unwrap(); } } Err(e) => error!("Failed to write buffer: {}", e), }; } }); Ok(()) }
function_block-function_prefix_line
[ { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n pretty_env_logger::init();\n\n\n\n info!(\"Starting market service...\");\n\n let jwt = \"eyJ0eXAiOiJqd3QiLCJhbGciOiJlZDI1NTE5In0.eyJqdGkiOiJBNDNRN1NLT0tCT0tYUDc1WVhMWjcyVDZKNDVIVzJKR0ZRWUJFQ1I2VE1FWEZFN1RKSjVBIiwiaWF0IjoxNTU0ODk2O...
Rust
src/mesh/mod.rs
LukasKalbertodt/cantucci
dea982b39d849de4a34082c4864b46d806eadd72
use cgmath::{prelude::*, Point3, Vector3}; use num_cpus; use std::{array::IntoIter, sync::mpsc::{channel, Receiver, Sender}}; use std::sync::Arc; use threadpool::ThreadPool; use crate::{ prelude::*, camera::Camera, octree::{Octree, SpanExt}, shape::Shape, util::iter, wgpu::DrawContext, }; mod buffer; mod view; use self::buffer::{MeshBuffer, Timings}; use self::view::MeshView; pub struct ShapeMesh { tree: Octree<MeshStatus, ()>, pipeline: wgpu::RenderPipeline, shape: Arc<dyn Shape>, thread_pool: ThreadPool, new_meshes: Receiver<(Point3<f32>, (MeshView, Timings))>, mesh_tx: Sender<(Point3<f32>, (MeshView, Timings))>, active_jobs: u64, batch_timings: Timings, finished_jobs: u64, } impl ShapeMesh { pub fn new( device: &wgpu::Device, out_format: wgpu::TextureFormat, shape: Arc<dyn Shape>, ) -> Result<Self> { let mut tree = Octree::spanning(shape.bounding_box()); let _ = tree.root_mut().split(None); for mut child in IntoIter::new(tree.root_mut().into_children().unwrap()) { child.split(None); } let (tx, rx) = channel(); let num_threads = num_cpus::get(); let pool = ThreadPool::new(num_threads); info!("Using {} threads to generate mesh", num_threads); let pipeline = view::create_pipeline(device, out_format); Ok(ShapeMesh { tree, pipeline, shape, thread_pool: pool, new_meshes: rx, mesh_tx: tx, active_jobs: 0, batch_timings: Timings::default(), finished_jobs: 0, }) } pub fn update(&mut self, device: Arc<wgpu::Device>, camera: &Camera) { const FOCUS_POINTS: u8 = 5; let focii = self.get_focii(camera, FOCUS_POINTS); for focus in focii { if let Some(mut leaf) = self.tree.leaf_around_mut(focus) { if let Some(MeshStatus::Ready(_)) = leaf.leaf_data().unwrap() { let dist = camera.position.distance(focus); let span = leaf.span(); let threshold = 2.0 * (span.end.x - span.start.x).abs(); if dist < threshold { leaf.split(None); } } } } let jobs_before = self.active_jobs; let finished_jobs_before = self.finished_jobs; for (center, (view, timings)) in self.new_meshes.try_iter() { self.active_jobs -= 1; self.finished_jobs += 1; self.batch_timings = self.batch_timings + timings; *self.tree .leaf_around_mut(center) .unwrap() .leaf_data_mut() .unwrap() = Some(MeshStatus::Ready(view)); } let empty_leaves = self.tree.iter_mut() .filter_map(|n| n.into_leaf()) .filter(|&(_, ref leaf_data)| leaf_data.is_none()); for (span, leaf_data) in empty_leaves { const RESOLUTION: u32 = 64; let tx = self.mesh_tx.clone(); let shape = self.shape.clone(); let device = device.clone(); self.thread_pool.execute(move || { let (buf, timings) = MeshBuffer::generate_for_box(&span, &*shape, RESOLUTION); let view = MeshView::new(&device, &buf.vertices, &buf.indices); let _ = tx.send((span.center(), (view, timings))); }); self.active_jobs += 1; let old_view = match leaf_data.take() { Some(MeshStatus::Ready(view)) => Some(view), _ => None, }; *leaf_data = Some(MeshStatus::Requested { old_view }); } if jobs_before != self.active_jobs { trace!("Currently active sample jobs: {}", self.active_jobs); } const PRINT_EVERY_FINISHED_JOBS: u64 = 64; if self.finished_jobs % PRINT_EVERY_FINISHED_JOBS == 0 && self.finished_jobs > 0 && finished_jobs_before != self.finished_jobs { debug!( "Finished {} new jobs in: {}", PRINT_EVERY_FINISHED_JOBS, self.batch_timings, ); self.batch_timings = Timings::default(); } } pub(crate) fn draw( &self, draw_ctx: DrawContext<'_>, camera: &Camera, ) { let it = self.tree.iter() .filter_map(|n| n.leaf_data().map(|data| (data, n.span()))); for (leaf_data, _span) in it { match leaf_data { &MeshStatus::Ready(ref view) | &MeshStatus::Requested { old_view: Some(ref view) } => { view.draw(draw_ctx, camera, &self.pipeline); } _ => (), } } } pub fn get_focii(&self, camera: &Camera, focus_points: u8) -> Vec<Point3<f32>> { const EPSILON: f32 = 0.000_001; const MAX_ITERS: u64 = 100; let (top_left, bottom_right) = camera.near_plane_bb(); let (frustum_width, frustum_height) = camera.projection.near_plane_dimension(); let size_horizontal = frustum_width / focus_points as f32; let size_vertical = frustum_height / focus_points as f32; let center_diff = (bottom_right - top_left) / (2.0 * focus_points as f32); let inv_view_trans = camera.inv_view_transform(); iter::square(focus_points as u32) .map(|(x, y)| { let center = top_left + Vector3::new( x as f32 * size_horizontal, y as f32 * size_vertical, 0.0, ) + center_diff; Point3::from_homogeneous( inv_view_trans * center.to_homogeneous() ) }) .filter_map(|p| { let mut pos = camera.position; let dir = (p - camera.position).normalize(); for _ in 0..MAX_ITERS { let distance = self.shape.min_distance_from(pos); pos += dir * distance; if distance < EPSILON { return Some(pos); } } None }) .collect() } } enum MeshStatus { Requested { old_view: Option<MeshView>, }, Ready(MeshView), } #[derive(Copy, Clone)] #[repr(C)] pub struct Vertex { position: [f32; 3], normal: [f32; 3], distance_from_surface: f32, } unsafe impl bytemuck::Pod for Vertex {} unsafe impl bytemuck::Zeroable for Vertex {}
use cgmath::{prelude::*, Point3, Vector3}; use num_cpus; use std::{array::IntoIter, sync::mpsc::{channel, Receiver, Sender}}; use std::sync::Arc; use threadpool::ThreadPool; use crate::{ prelude::*, camera::Camera, octree::{Octree, SpanExt}, shape::Shape, util::iter, wgpu::DrawContext, }; mod buffer; mod view; use self::buffer::{MeshBuffer, Timings}; use self::view::MeshView; pub struct ShapeMesh { tree: Octree<MeshStatus, ()>, pipeline: wgpu::RenderPipeline, shape: Arc<dyn Shape>, thread_pool: ThreadPool, new_meshes: Receiver<(Point3<f32>, (MeshView, Timings))>, mesh_tx: Sender<(Point3<f32>, (MeshView, Timings))>, active_jobs: u64, batch_timings: Timings, finished_jobs: u64, } impl ShapeMesh { pub fn new( device: &wgpu::Device, out_format: wgpu::TextureFormat, shape: Arc<dyn Shape>, ) -> Result<Self> { let mut tree = Octree::spanning(shape.bounding_box()); let _ = tree.root_mut().split(None); for mut child in IntoIter::new(tree.root_mut().into_children().unwrap()) { child.split(None); } let (tx, rx) = channel(); let num_threads = num_cpus::get(); let pool = ThreadPool::new(num_threads); info!("Using {} threads to generate mesh", num_threads); let pipeline = view::create_pipeline(device, out_format); Ok(ShapeMesh { tree, pipeline, shape, thread_pool: pool, new_meshes: rx, mesh_tx: tx, active_jobs: 0, batch_timings: Timings::default(), finished_jobs: 0, }) }
pub(crate) fn draw( &self, draw_ctx: DrawContext<'_>, camera: &Camera, ) { let it = self.tree.iter() .filter_map(|n| n.leaf_data().map(|data| (data, n.span()))); for (leaf_data, _span) in it { match leaf_data { &MeshStatus::Ready(ref view) | &MeshStatus::Requested { old_view: Some(ref view) } => { view.draw(draw_ctx, camera, &self.pipeline); } _ => (), } } } pub fn get_focii(&self, camera: &Camera, focus_points: u8) -> Vec<Point3<f32>> { const EPSILON: f32 = 0.000_001; const MAX_ITERS: u64 = 100; let (top_left, bottom_right) = camera.near_plane_bb(); let (frustum_width, frustum_height) = camera.projection.near_plane_dimension(); let size_horizontal = frustum_width / focus_points as f32; let size_vertical = frustum_height / focus_points as f32; let center_diff = (bottom_right - top_left) / (2.0 * focus_points as f32); let inv_view_trans = camera.inv_view_transform(); iter::square(focus_points as u32) .map(|(x, y)| { let center = top_left + Vector3::new( x as f32 * size_horizontal, y as f32 * size_vertical, 0.0, ) + center_diff; Point3::from_homogeneous( inv_view_trans * center.to_homogeneous() ) }) .filter_map(|p| { let mut pos = camera.position; let dir = (p - camera.position).normalize(); for _ in 0..MAX_ITERS { let distance = self.shape.min_distance_from(pos); pos += dir * distance; if distance < EPSILON { return Some(pos); } } None }) .collect() } } enum MeshStatus { Requested { old_view: Option<MeshView>, }, Ready(MeshView), } #[derive(Copy, Clone)] #[repr(C)] pub struct Vertex { position: [f32; 3], normal: [f32; 3], distance_from_surface: f32, } unsafe impl bytemuck::Pod for Vertex {} unsafe impl bytemuck::Zeroable for Vertex {}
pub fn update(&mut self, device: Arc<wgpu::Device>, camera: &Camera) { const FOCUS_POINTS: u8 = 5; let focii = self.get_focii(camera, FOCUS_POINTS); for focus in focii { if let Some(mut leaf) = self.tree.leaf_around_mut(focus) { if let Some(MeshStatus::Ready(_)) = leaf.leaf_data().unwrap() { let dist = camera.position.distance(focus); let span = leaf.span(); let threshold = 2.0 * (span.end.x - span.start.x).abs(); if dist < threshold { leaf.split(None); } } } } let jobs_before = self.active_jobs; let finished_jobs_before = self.finished_jobs; for (center, (view, timings)) in self.new_meshes.try_iter() { self.active_jobs -= 1; self.finished_jobs += 1; self.batch_timings = self.batch_timings + timings; *self.tree .leaf_around_mut(center) .unwrap() .leaf_data_mut() .unwrap() = Some(MeshStatus::Ready(view)); } let empty_leaves = self.tree.iter_mut() .filter_map(|n| n.into_leaf()) .filter(|&(_, ref leaf_data)| leaf_data.is_none()); for (span, leaf_data) in empty_leaves { const RESOLUTION: u32 = 64; let tx = self.mesh_tx.clone(); let shape = self.shape.clone(); let device = device.clone(); self.thread_pool.execute(move || { let (buf, timings) = MeshBuffer::generate_for_box(&span, &*shape, RESOLUTION); let view = MeshView::new(&device, &buf.vertices, &buf.indices); let _ = tx.send((span.center(), (view, timings))); }); self.active_jobs += 1; let old_view = match leaf_data.take() { Some(MeshStatus::Ready(view)) => Some(view), _ => None, }; *leaf_data = Some(MeshStatus::Requested { old_view }); } if jobs_before != self.active_jobs { trace!("Currently active sample jobs: {}", self.active_jobs); } const PRINT_EVERY_FINISHED_JOBS: u64 = 64; if self.finished_jobs % PRINT_EVERY_FINISHED_JOBS == 0 && self.finished_jobs > 0 && finished_jobs_before != self.finished_jobs { debug!( "Finished {} new jobs in: {}", PRINT_EVERY_FINISHED_JOBS, self.batch_timings, ); self.batch_timings = Timings::default(); } }
function_block-full_function
[ { "content": "/// Creates 8 equally sized children spans of a passed parent span. The spans are defined\n\n/// in a way that will be no gaps between them due to floating point precision errors.\n\npub fn create_spans(parent_span: Range<Point3<f32>>) -> [Range<Point3<f32>>; 8] {\n\n let start = parent_span.st...
Rust
crates/dkg-core/src/node.rs
kafeikui/BLS-DKG-Demo
7e3e46b10715d76e6dfcdf48b8628ccb2c1eb305
use super::{ board::BoardPublisher, primitives::{ phases::{Phase0, Phase1, Phase2, Phase3}, types::{BundledJustification, BundledResponses, BundledShares, DKGOutput}, DKGError, }, }; use async_trait::async_trait; use rand::RngCore; use thiserror::Error; use threshold_bls::group::Curve; #[derive(Debug, Error)] pub enum NodeError { #[error("Could not publish to board")] PublisherError, #[error("DKG Error: {0}")] DKGError(#[from] DKGError), } #[derive(Clone, Debug)] pub enum Phase2Result<C: Curve, P: Phase3<C>> { Output(DKGOutput<C>), GoToPhase3(P), } type NodeResult<T> = std::result::Result<T, NodeError>; #[async_trait(?Send)] pub trait DKGPhase<C: Curve, B: BoardPublisher<C>, T> { type Next; async fn run(self, board: &mut B, arg: T) -> NodeResult<Self::Next> where C: 'async_trait, T: 'async_trait; } #[async_trait(?Send)] impl<C, B, R, P> DKGPhase<C, B, &mut R> for P where C: Curve, B: BoardPublisher<C>, R: RngCore, P: Phase0<C>, { type Next = P::Next; async fn run(self, board: &mut B, rng: &'async_trait mut R) -> NodeResult<Self::Next> where C: 'async_trait, { let (next, shares) = self.encrypt_shares(rng)?; if let Some(sh) = shares { board .publish_shares(sh) .await .map_err(|_| NodeError::PublisherError)?; } Ok(next) } } #[async_trait(?Send)] impl<C, B, P> DKGPhase<C, B, &[BundledShares<C>]> for P where C: Curve, B: BoardPublisher<C>, P: Phase1<C>, { type Next = P::Next; async fn run( self, board: &mut B, shares: &'async_trait [BundledShares<C>], ) -> NodeResult<Self::Next> where C: 'async_trait, { let (next, bundle) = self.process_shares(shares, false)?; if let Some(bundle) = bundle { board .publish_responses(bundle) .await .map_err(|_| NodeError::PublisherError)?; } Ok(next) } } #[async_trait(?Send)] impl<C, B, P> DKGPhase<C, B, &[BundledResponses]> for P where C: Curve, B: BoardPublisher<C>, P: Phase2<C>, { type Next = Phase2Result<C, P::Next>; async fn run( self, board: &mut B, responses: &'async_trait [BundledResponses], ) -> NodeResult<Self::Next> where C: 'async_trait, { match self.process_responses(responses) { Ok(output) => Ok(Phase2Result::Output(output)), Err(next) => { match next { Ok((next, justifications)) => { if let Some(justifications) = justifications { board .publish_justifications(justifications) .await .map_err(|_| NodeError::PublisherError)?; } Ok(Phase2Result::GoToPhase3(next)) } Err(e) => Err(NodeError::DKGError(e)), } } } } } #[async_trait(?Send)] impl<C, B, P> DKGPhase<C, B, &[BundledJustification<C>]> for P where C: Curve, B: BoardPublisher<C>, P: Phase3<C>, { type Next = DKGOutput<C>; async fn run( self, _: &mut B, responses: &'async_trait [BundledJustification<C>], ) -> NodeResult<Self::Next> where C: 'async_trait, { Ok(self.process_justifications(responses)?) } } #[cfg(test)] mod tests { use super::*; use crate::{ primitives::{ group::{Group, Node}, joint_feldman, }, test_helpers::InMemoryBoard, }; use threshold_bls::{ curve::bls12381::{self, PairingCurve as BLS12_381}, curve::zexe::{self as bls12_377, PairingCurve as BLS12_377}, poly::Idx, sig::{BlindThresholdScheme, G1Scheme, G2Scheme, Scheme, SignatureScheme, ThresholdScheme}, }; fn bad_phase0<C: Curve, R: RngCore, P: Phase0<C>>(phase0: P, rng: &mut R) -> P::Next { let (next, _) = phase0.encrypt_shares(rng).unwrap(); next } #[tokio::test] async fn dkg_sign_e2e() { let (t, n) = (3, 5); dkg_sign_e2e_curve::<bls12381::Curve, G1Scheme<BLS12_381>>(n, t).await; dkg_sign_e2e_curve::<bls12381::G2Curve, G2Scheme<BLS12_381>>(n, t).await; dkg_sign_e2e_curve::<bls12_377::G1Curve, G1Scheme<BLS12_377>>(n, t).await; dkg_sign_e2e_curve::<bls12_377::G2Curve, G2Scheme<BLS12_377>>(n, t).await; } async fn dkg_sign_e2e_curve<C, S>(n: usize, t: usize) where C: Curve, S: Scheme<Public = <C as Curve>::Point, Private = <C as Curve>::Scalar> + BlindThresholdScheme + ThresholdScheme + SignatureScheme, { let msg = rand::random::<[u8; 32]>().to_vec(); let outputs = run_dkg::<C, S>(n, t).await; let (token, blinded_msg) = S::blind_msg(&msg[..], &mut rand::thread_rng()); let partial_sigs = outputs .iter() .map(|output| S::sign_blind_partial(&output.share, &blinded_msg[..]).unwrap()) .collect::<Vec<_>>(); let blinded_sig = S::aggregate(t, &partial_sigs).unwrap(); let unblinded_sig = S::unblind_sig(&token, &blinded_sig).unwrap(); let pubkey = outputs[0].public.public_key(); S::verify(&pubkey, &msg, &unblinded_sig).unwrap(); } async fn run_dkg<C, S>(n: usize, t: usize) -> Vec<DKGOutput<C>> where C: Curve, S: Scheme<Public = <C as Curve>::Point, Private = <C as Curve>::Scalar>, { let rng = &mut rand::thread_rng(); let (mut board, phase0s) = setup::<C, S, _>(n, t, rng); let mut phase1s = Vec::new(); for phase0 in phase0s { phase1s.push(phase0.run(&mut board, rng).await.unwrap()); } let shares = board.shares.clone(); let mut phase2s = Vec::new(); for phase1 in phase1s { phase2s.push(phase1.run(&mut board, &shares).await.unwrap()); } let responses = board.responses.clone(); let mut results = Vec::new(); for phase2 in phase2s { results.push(phase2.run(&mut board, &responses).await.unwrap()); } let outputs = results .into_iter() .map(|res| match res { Phase2Result::Output(out) => out, Phase2Result::GoToPhase3(_) => unreachable!("should not get here"), }) .collect::<Vec<_>>(); assert!(is_all_same(outputs.iter().map(|output| &output.public))); outputs } #[tokio::test] async fn not_enough_validator_shares() { let (t, n) = (6, 10); let bad = t + 1; let honest = n - bad; let rng = &mut rand::thread_rng(); let (mut board, phase0s) = setup::<bls12_377::G1Curve, G1Scheme<BLS12_377>, _>(n, t, rng); let mut phase1s = Vec::new(); for (i, phase0) in phase0s.into_iter().enumerate() { let phase1 = if i < bad { bad_phase0(phase0, rng) } else { phase0.run(&mut board, rng).await.unwrap() }; phase1s.push(phase1); } let shares = board.shares.clone(); let mut errs = Vec::new(); for phase1 in phase1s { let err = match phase1.run(&mut board, &shares).await.unwrap_err() { NodeError::DKGError(err) => err, _ => panic!("should get dkg error"), }; errs.push(err); } for err in &errs[..bad] { match err { DKGError::NotEnoughValidShares(got, required) => { assert_eq!(*got, honest); assert_eq!(*required, t); } _ => panic!("should not get here"), }; } for err in &errs[bad..] { match err { DKGError::NotEnoughValidShares(got, required) => { assert_eq!(*got, honest - 1); assert_eq!(*required, t); } _ => panic!("should not get here"), }; } } #[tokio::test] async fn dkg_phase3() { let (t, n) = (5, 8); let bad = 2; let rng = &mut rand::thread_rng(); let (mut board, phase0s) = setup::<bls12_377::G1Curve, G1Scheme<BLS12_377>, _>(n, t, rng); let mut phase1s = Vec::new(); for (i, phase0) in phase0s.into_iter().enumerate() { let phase1 = if i < bad { bad_phase0(phase0, rng) } else { phase0.run(&mut board, rng).await.unwrap() }; phase1s.push(phase1); } let shares = board.shares.clone(); let mut phase2s = Vec::new(); for phase1 in phase1s { phase2s.push(phase1.run(&mut board, &shares).await.unwrap()); } let responses = board.responses.clone(); let mut results = Vec::new(); for phase2 in phase2s { results.push(phase2.run(&mut board, &responses).await.unwrap()); } let phase3s = results .into_iter() .map(|res| match res { Phase2Result::GoToPhase3(p3) => p3, _ => unreachable!("should not get here"), }) .collect::<Vec<_>>(); let justifications = board.justifs.clone(); let mut outputs = Vec::new(); for phase3 in phase3s { outputs.push(phase3.run(&mut board, &justifications).await.unwrap()); } assert!(is_all_same(outputs.iter().map(|output| &output.qual))); assert!(is_all_same( outputs[bad..].iter().map(|output| &output.public) )); let pubkey = &outputs[bad].public; for output in &outputs[..bad] { assert_ne!(&output.public, pubkey); } } fn setup<C, S, R: rand::RngCore>( n: usize, t: usize, rng: &mut R, ) -> (InMemoryBoard<C>, Vec<joint_feldman::DKG<C>>) where C: Curve, S: Scheme<Public = <C as Curve>::Point, Private = <C as Curve>::Scalar>, { let keypairs = (0..n).map(|_| S::keypair(rng)).collect::<Vec<_>>(); let nodes = keypairs .iter() .enumerate() .map(|(i, (_, public))| Node::<C>::new(i as Idx, public.clone())) .collect::<Vec<_>>(); let group = Group::new(nodes, t).unwrap(); let phase0s = keypairs .iter() .map(|(private, _)| joint_feldman::DKG::new(private.clone(), group.clone()).unwrap()) .collect::<Vec<_>>(); let board = InMemoryBoard::<C>::new(); (board, phase0s) } fn is_all_same<T: PartialEq>(mut arr: impl Iterator<Item = T>) -> bool { let first = arr.next().unwrap(); arr.all(|item| item == first) } }
use super::{ board::BoardPublisher, primitives::{ phases::{Phase0, Phase1, Phase2, Phase3}, types::{BundledJustification, BundledResponses, BundledShares, DKGOutput}, DKGError, }, }; use async_trait::async_trait; use rand::RngCore; use thiserror::Error; use threshold_bls::group::Curve; #[derive(Debug, Error)] pub enum NodeError { #[error("Could not publish to board")] PublisherError, #[error("DKG Error: {0}")] DKGError(#[from] DKGError), } #[derive(Clone, Debug)] pub enum Phase2Result<C: Curve, P: Phase3<C>> { Output(DKGOutput<C>), GoToPhase3(P), } type NodeResult<T> = std::result::Result<T, NodeError>; #[async_trait(?Send)] pub trait DKGPhase<C: Curve, B: BoardPublisher<C>, T> { type Next; async fn run(self, board: &mut B, arg: T) -> NodeResult<Self::Next> where C: 'async_trait, T: 'async_trait; } #[async_trait(?Send)] impl<C, B, R, P> DKGPhase<C, B, &mut R> for P where C: Curve, B: BoardPublisher<C>, R: RngCore, P: Phase0<C>, { type Next = P::Next; async fn run(self, board: &mut B, rng: &'async_trait mut R) -> NodeResult<Self::Next> where C: 'async_trait, { let (next, shares) = self.encrypt_shares(rng)?; if let Some(sh) = shares { board .publish_shares(sh) .await .map_err(|_| NodeError::PublisherError)?; } Ok(next) } } #[async_trait(?Send)] impl<C, B, P> DKGPhase<C, B, &[BundledShares<C>]> for P where C: Curve, B: BoardPublisher<C>, P: Phase1<C>, { type Next = P::Next; async fn run( self, board: &mut B, shares: &'async_trait [BundledShares<C>], ) -> NodeResult<Self::Next> where C: 'async_trait, { let (next, bundle) = self.process_shares(shares, false)?; if let Some(bundle) = bundle { board .publish_responses(bundle) .await .map_err(|_| NodeError::PublisherError)?; } Ok(next) } } #[async_trait(?Send)] impl<C, B, P> DKGPhase<C, B, &[BundledResponses]> for P where C: Curve, B: BoardPublisher<C>, P: Phase2<C>, { type Next = Phase2Result<C, P::Next>; async fn run( self, board: &mut B, responses: &'async_trait [BundledResponses], ) -> NodeResult<Self::Next> where C: 'async_trait, { match self.process_responses(responses) { Ok(output) => Ok(Phase2Result::Output(output)), Err(next) => { match next { Ok((next, justifications)) => { if let Some(justifications) = justifications { board .publish_justifications(justifications) .await .map_err(|_| NodeError::PublisherError)?; } Ok(Phase2Result::GoToPhase3(next)) } Err(e) => Err(NodeError::DKGError(e)), } } } } } #[async_trait(?Send)] impl<C, B, P> DKGPhase<C, B, &[BundledJustification<C>]> for P where C: Curve, B: BoardPublisher<C>, P: Phase3<C>, { type Next = DKGOutput<C>; async fn run( self, _: &mut B, responses: &'async_trait [BundledJustification<C>], ) -> NodeResult<Self::Next> where C: 'async_trait, { Ok(self.process_justifications(responses)?) } } #[cfg(test)] mod tests { use super::*; use crate::{ primitives::{ group::{Group, Node}, joint_feldman, }, test_helpers::InMemoryBoard, }; use threshold_bls::{ curve::bls12381::{self, PairingCurve as BLS12_381}, curve::zexe::{self as bls12_377, PairingCurve as BLS12_377}, poly::Idx, sig::{BlindThresholdScheme, G1Scheme, G2Scheme, Scheme, SignatureScheme, ThresholdScheme}, }; fn bad_phase0<C: Curve, R: RngCore, P: Phase0<C>>(phase0: P, rng: &mut R) -> P::Next { let (next, _) = phase0.encrypt_shares(rng).unwrap(); next } #[tokio::test] async fn dkg_sign_e2e() { let (t, n) = (3, 5); dkg_sign_e2e_curve::<bls12381::Curve, G1Scheme<BLS12_381>>(n, t).await; dkg_sign_e2e_curve::<bls12381::G2Curve, G2Scheme<BLS12_381>>(n, t).await; dkg_sign_e2e_curve::<bls12_377::G1Curve, G1Scheme<BLS12_377>>(n, t).await; dkg_sign_e2e_curve::<bls12_377::G2Curve, G2Scheme<BLS12_377>>(n, t).await; } async fn dkg_sign_e2e_curve<C, S>(n: usize, t: usize) where C: Curve, S: Scheme<Public = <C as Curve>::Point, Private = <C as Curve>::Scalar> + BlindThresholdScheme + ThresholdScheme + SignatureScheme, { let msg = rand::random::<[u8; 32]>().to_vec(); let outputs = run_dkg::<C, S>(n, t).await; let (token, blinded_msg) = S::blind_msg(&msg[..], &mut rand::thread_rng()); let partial_sigs = outputs .iter() .map(|output| S::sign_blind_partial(&output.share, &blinded_msg[..]).unwrap()) .collect::<Vec<_>>(); let blinded_sig = S::aggregate(t, &partial_sigs).unwrap(); let unblinded_sig = S::unblind_sig(&token, &blinded_sig).unwrap(); let pubkey = outputs[0].public.public_key(); S::verify(&pubkey, &msg, &unblinded_sig).unwrap(); } async fn run_dkg<C, S>(n: usize, t: usize) -> Vec<DKGOutput<C>> where C: Curve, S: Scheme<Public = <C as Curve>::Point, Private = <C as Curve>::Scalar>, { let rng = &mut rand::thread_rng();
#[tokio::test] async fn not_enough_validator_shares() { let (t, n) = (6, 10); let bad = t + 1; let honest = n - bad; let rng = &mut rand::thread_rng(); let (mut board, phase0s) = setup::<bls12_377::G1Curve, G1Scheme<BLS12_377>, _>(n, t, rng); let mut phase1s = Vec::new(); for (i, phase0) in phase0s.into_iter().enumerate() { let phase1 = if i < bad { bad_phase0(phase0, rng) } else { phase0.run(&mut board, rng).await.unwrap() }; phase1s.push(phase1); } let shares = board.shares.clone(); let mut errs = Vec::new(); for phase1 in phase1s { let err = match phase1.run(&mut board, &shares).await.unwrap_err() { NodeError::DKGError(err) => err, _ => panic!("should get dkg error"), }; errs.push(err); } for err in &errs[..bad] { match err { DKGError::NotEnoughValidShares(got, required) => { assert_eq!(*got, honest); assert_eq!(*required, t); } _ => panic!("should not get here"), }; } for err in &errs[bad..] { match err { DKGError::NotEnoughValidShares(got, required) => { assert_eq!(*got, honest - 1); assert_eq!(*required, t); } _ => panic!("should not get here"), }; } } #[tokio::test] async fn dkg_phase3() { let (t, n) = (5, 8); let bad = 2; let rng = &mut rand::thread_rng(); let (mut board, phase0s) = setup::<bls12_377::G1Curve, G1Scheme<BLS12_377>, _>(n, t, rng); let mut phase1s = Vec::new(); for (i, phase0) in phase0s.into_iter().enumerate() { let phase1 = if i < bad { bad_phase0(phase0, rng) } else { phase0.run(&mut board, rng).await.unwrap() }; phase1s.push(phase1); } let shares = board.shares.clone(); let mut phase2s = Vec::new(); for phase1 in phase1s { phase2s.push(phase1.run(&mut board, &shares).await.unwrap()); } let responses = board.responses.clone(); let mut results = Vec::new(); for phase2 in phase2s { results.push(phase2.run(&mut board, &responses).await.unwrap()); } let phase3s = results .into_iter() .map(|res| match res { Phase2Result::GoToPhase3(p3) => p3, _ => unreachable!("should not get here"), }) .collect::<Vec<_>>(); let justifications = board.justifs.clone(); let mut outputs = Vec::new(); for phase3 in phase3s { outputs.push(phase3.run(&mut board, &justifications).await.unwrap()); } assert!(is_all_same(outputs.iter().map(|output| &output.qual))); assert!(is_all_same( outputs[bad..].iter().map(|output| &output.public) )); let pubkey = &outputs[bad].public; for output in &outputs[..bad] { assert_ne!(&output.public, pubkey); } } fn setup<C, S, R: rand::RngCore>( n: usize, t: usize, rng: &mut R, ) -> (InMemoryBoard<C>, Vec<joint_feldman::DKG<C>>) where C: Curve, S: Scheme<Public = <C as Curve>::Point, Private = <C as Curve>::Scalar>, { let keypairs = (0..n).map(|_| S::keypair(rng)).collect::<Vec<_>>(); let nodes = keypairs .iter() .enumerate() .map(|(i, (_, public))| Node::<C>::new(i as Idx, public.clone())) .collect::<Vec<_>>(); let group = Group::new(nodes, t).unwrap(); let phase0s = keypairs .iter() .map(|(private, _)| joint_feldman::DKG::new(private.clone(), group.clone()).unwrap()) .collect::<Vec<_>>(); let board = InMemoryBoard::<C>::new(); (board, phase0s) } fn is_all_same<T: PartialEq>(mut arr: impl Iterator<Item = T>) -> bool { let first = arr.next().unwrap(); arr.all(|item| item == first) } }
let (mut board, phase0s) = setup::<C, S, _>(n, t, rng); let mut phase1s = Vec::new(); for phase0 in phase0s { phase1s.push(phase0.run(&mut board, rng).await.unwrap()); } let shares = board.shares.clone(); let mut phase2s = Vec::new(); for phase1 in phase1s { phase2s.push(phase1.run(&mut board, &shares).await.unwrap()); } let responses = board.responses.clone(); let mut results = Vec::new(); for phase2 in phase2s { results.push(phase2.run(&mut board, &responses).await.unwrap()); } let outputs = results .into_iter() .map(|res| match res { Phase2Result::Output(out) => out, Phase2Result::GoToPhase3(_) => unreachable!("should not get here"), }) .collect::<Vec<_>>(); assert!(is_all_same(outputs.iter().map(|output| &output.public))); outputs }
function_block-function_prefix_line
[ { "content": "/// Creates the encrypted shares with the given secret polynomial to the given\n\n/// group.\n\npub fn create_share_bundle<C: Curve, R: RngCore>(\n\n dealer_idx: Idx,\n\n secret: &PrivatePoly<C>,\n\n public: &PublicPoly<C>,\n\n group: &Group<C>,\n\n rng: &mut R,\n\n) -> DKGResult<Bu...
Rust
pageserver/src/layered_repository/filename.rs
libzenith/zenith
4b3b19f4448f650b918230d972e2ec68815dcbdb
use crate::config::PageServerConf; use crate::layered_repository::storage_layer::SegmentTag; use crate::relish::*; use std::fmt; use std::path::PathBuf; use zenith_utils::lsn::Lsn; #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone)] pub struct DeltaFileName { pub seg: SegmentTag, pub start_lsn: Lsn, pub end_lsn: Lsn, pub dropped: bool, } impl DeltaFileName { pub fn parse_str(fname: &str) -> Option<Self> { let rel; let mut parts; if let Some(rest) = fname.strip_prefix("rel_") { parts = rest.split('_'); rel = RelishTag::Relation(RelTag { spcnode: parts.next()?.parse::<u32>().ok()?, dbnode: parts.next()?.parse::<u32>().ok()?, relnode: parts.next()?.parse::<u32>().ok()?, forknum: parts.next()?.parse::<u8>().ok()?, }); } else if let Some(rest) = fname.strip_prefix("pg_xact_") { parts = rest.split('_'); rel = RelishTag::Slru { slru: SlruKind::Clog, segno: u32::from_str_radix(parts.next()?, 16).ok()?, }; } else if let Some(rest) = fname.strip_prefix("pg_multixact_members_") { parts = rest.split('_'); rel = RelishTag::Slru { slru: SlruKind::MultiXactMembers, segno: u32::from_str_radix(parts.next()?, 16).ok()?, }; } else if let Some(rest) = fname.strip_prefix("pg_multixact_offsets_") { parts = rest.split('_'); rel = RelishTag::Slru { slru: SlruKind::MultiXactOffsets, segno: u32::from_str_radix(parts.next()?, 16).ok()?, }; } else if let Some(rest) = fname.strip_prefix("pg_filenodemap_") { parts = rest.split('_'); rel = RelishTag::FileNodeMap { spcnode: parts.next()?.parse::<u32>().ok()?, dbnode: parts.next()?.parse::<u32>().ok()?, }; } else if let Some(rest) = fname.strip_prefix("pg_twophase_") { parts = rest.split('_'); rel = RelishTag::TwoPhase { xid: parts.next()?.parse::<u32>().ok()?, }; } else if let Some(rest) = fname.strip_prefix("pg_control_checkpoint_") { parts = rest.split('_'); rel = RelishTag::Checkpoint; } else if let Some(rest) = fname.strip_prefix("pg_control_") { parts = rest.split('_'); rel = RelishTag::ControlFile; } else { return None; } let segno = parts.next()?.parse::<u32>().ok()?; let seg = SegmentTag { rel, segno }; let start_lsn = Lsn::from_hex(parts.next()?).ok()?; let end_lsn = Lsn::from_hex(parts.next()?).ok()?; let mut dropped = false; if let Some(suffix) = parts.next() { if suffix == "DROPPED" { dropped = true; } else { return None; } } if parts.next().is_some() { return None; } Some(DeltaFileName { seg, start_lsn, end_lsn, dropped, }) } } impl fmt::Display for DeltaFileName { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let basename = match self.seg.rel { RelishTag::Relation(reltag) => format!( "rel_{}_{}_{}_{}", reltag.spcnode, reltag.dbnode, reltag.relnode, reltag.forknum ), RelishTag::Slru { slru: SlruKind::Clog, segno, } => format!("pg_xact_{:04X}", segno), RelishTag::Slru { slru: SlruKind::MultiXactMembers, segno, } => format!("pg_multixact_members_{:04X}", segno), RelishTag::Slru { slru: SlruKind::MultiXactOffsets, segno, } => format!("pg_multixact_offsets_{:04X}", segno), RelishTag::FileNodeMap { spcnode, dbnode } => { format!("pg_filenodemap_{}_{}", spcnode, dbnode) } RelishTag::TwoPhase { xid } => format!("pg_twophase_{}", xid), RelishTag::Checkpoint => "pg_control_checkpoint".to_string(), RelishTag::ControlFile => "pg_control".to_string(), }; write!( f, "{}_{}_{:016X}_{:016X}{}", basename, self.seg.segno, u64::from(self.start_lsn), u64::from(self.end_lsn), if self.dropped { "_DROPPED" } else { "" } ) } } #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone)] pub struct ImageFileName { pub seg: SegmentTag, pub lsn: Lsn, } impl ImageFileName { pub fn parse_str(fname: &str) -> Option<Self> { let rel; let mut parts; if let Some(rest) = fname.strip_prefix("rel_") { parts = rest.split('_'); rel = RelishTag::Relation(RelTag { spcnode: parts.next()?.parse::<u32>().ok()?, dbnode: parts.next()?.parse::<u32>().ok()?, relnode: parts.next()?.parse::<u32>().ok()?, forknum: parts.next()?.parse::<u8>().ok()?, }); } else if let Some(rest) = fname.strip_prefix("pg_xact_") { parts = rest.split('_'); rel = RelishTag::Slru { slru: SlruKind::Clog, segno: u32::from_str_radix(parts.next()?, 16).ok()?, }; } else if let Some(rest) = fname.strip_prefix("pg_multixact_members_") { parts = rest.split('_'); rel = RelishTag::Slru { slru: SlruKind::MultiXactMembers, segno: u32::from_str_radix(parts.next()?, 16).ok()?, }; } else if let Some(rest) = fname.strip_prefix("pg_multixact_offsets_") { parts = rest.split('_'); rel = RelishTag::Slru { slru: SlruKind::MultiXactOffsets, segno: u32::from_str_radix(parts.next()?, 16).ok()?, }; } else if let Some(rest) = fname.strip_prefix("pg_filenodemap_") { parts = rest.split('_'); rel = RelishTag::FileNodeMap { spcnode: parts.next()?.parse::<u32>().ok()?, dbnode: parts.next()?.parse::<u32>().ok()?, }; } else if let Some(rest) = fname.strip_prefix("pg_twophase_") { parts = rest.split('_'); rel = RelishTag::TwoPhase { xid: parts.next()?.parse::<u32>().ok()?, }; } else if let Some(rest) = fname.strip_prefix("pg_control_checkpoint_") { parts = rest.split('_'); rel = RelishTag::Checkpoint; } else if let Some(rest) = fname.strip_prefix("pg_control_") { parts = rest.split('_'); rel = RelishTag::ControlFile; } else { return None; } let segno = parts.next()?.parse::<u32>().ok()?; let seg = SegmentTag { rel, segno }; let lsn = Lsn::from_hex(parts.next()?).ok()?; if parts.next().is_some() { return None; } Some(ImageFileName { seg, lsn }) } } impl fmt::Display for ImageFileName { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let basename = match self.seg.rel { RelishTag::Relation(reltag) => format!( "rel_{}_{}_{}_{}", reltag.spcnode, reltag.dbnode, reltag.relnode, reltag.forknum ), RelishTag::Slru { slru: SlruKind::Clog, segno, } => format!("pg_xact_{:04X}", segno), RelishTag::Slru { slru: SlruKind::MultiXactMembers, segno, } => format!("pg_multixact_members_{:04X}", segno), RelishTag::Slru { slru: SlruKind::MultiXactOffsets, segno, } => format!("pg_multixact_offsets_{:04X}", segno), RelishTag::FileNodeMap { spcnode, dbnode } => { format!("pg_filenodemap_{}_{}", spcnode, dbnode) } RelishTag::TwoPhase { xid } => format!("pg_twophase_{}", xid), RelishTag::Checkpoint => "pg_control_checkpoint".to_string(), RelishTag::ControlFile => "pg_control".to_string(), }; write!( f, "{}_{}_{:016X}", basename, self.seg.segno, u64::from(self.lsn), ) } } pub enum PathOrConf { Path(PathBuf), Conf(&'static PageServerConf), }
use crate::config::PageServerConf; use crate::layered_repository::storage_layer::SegmentTag; use crate::relish::*; use std::fmt; use std::path::PathBuf; use zenith_utils::lsn::Lsn; #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone)] pub struct DeltaFileName { pub seg: SegmentTag, pub start_lsn: Lsn, pub end_lsn: Lsn, pub dropped: bool, } impl DeltaFileName { pub fn parse_str(fname: &str) -> Option<Self> { let rel; let mut parts; if let Some(rest) = fname.strip_prefix("rel_") { parts = rest.split('_'); rel = RelishTag::Relation(RelTag { spcnode: parts.next()?.parse::<u32>().ok()?, dbnode: parts.next()?.parse::<u32>().ok()?, relnode: parts.next()?.parse::<u32>().ok()?, forknum: parts.next()?.parse::<u8>().ok()?, }); } else if let Some(rest) = fname.strip_prefix("pg_xact_") { parts = rest.split('_'); rel = RelishTag::Slru { slru: SlruKind::Clog, segno: u32::from_str_radix(parts.next()?, 16).ok()?, }; } else if let Some(rest) = fname.strip_prefix("pg_multixact_members_") { parts = rest.split('_'); rel = RelishTag::Slru { slru: SlruKind::MultiXactMembers, segno: u32::from_str_radix(parts.next()?, 16).ok()?, }; } else if let Some(rest) = fname.strip_prefix("pg_multixact_offsets_") { parts = rest.split('_'); rel = RelishTag::Slru { slru: SlruKind::MultiXactOffsets, segno: u32::from_str_radix(parts.next()?, 16).ok()?, }; } else if let Some(rest) = fname.strip_prefix("pg_filenodemap_") { parts = rest.split('_'); rel = RelishTag::FileNodeMap { spcnode: parts.next()?.parse::<u32>().ok()?, dbnode: parts.next()?.parse::<u32>().ok()?, }; } else if let Some(rest) = fname.strip_prefix("pg_twophase_") { parts = rest.split('_'); rel = RelishTag::TwoPhase { xid: parts.next()?.parse::<u32>().ok()?, }; } else if let Some(rest) = fname.strip_prefix("pg_control_checkpoint_") { parts = rest.split('_'); rel = RelishTag::Checkpoint; } else if let Some(rest) = fname.strip_prefix("pg_control_") { parts = rest.split('_'); rel = RelishTag::ControlFile; } else { return None; } let segno = parts.next()?.parse::<u32>().ok()?; let seg = SegmentTag { rel, segno }; let start_lsn = Lsn::from_hex(parts.next()?).ok()?; let end_lsn = Lsn::from_hex(parts.next()?).ok()?; let mut dropped = false; if let Some(suffix) = parts.next() { if suffix == "DROPPED" { dropped = true; } else { return None; } } if parts.next().is_some() { return None; } Some(DeltaFileName { seg, start_lsn, end_lsn, dropped, }) } } impl fmt::Display for DeltaFileName { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let basename = match self.seg.rel { RelishTag::Relation(reltag) => format!( "rel_{}_{}_{}_{}", reltag.spcnode, reltag.dbnode, reltag.relnode, reltag.forknum ), RelishTag::Slru { slru: SlruKind::Clog, segno, } => format!("pg_xact_{:04X}", segno), RelishTag::Slru { slru: SlruKind::MultiXactMembers, segno, } => format!("pg_multixact_members_{:04X}", segno), RelishTag::Slru { slru: SlruKind::MultiXactOffsets, segno, } => format!("pg_multixact_offsets_{:04X}", segno), RelishTag::FileNodeMap { spcnode, dbnode } => { format!("pg_filenodemap_{}_{}", spcnode, dbnode) } RelishTag::TwoPhase { xid } => format!("pg_twophase_{}", xid), RelishTag::Checkpoint => "pg_control_checkpoint".to_string(), RelishTag::ControlFile => "pg_control".to_string(), }; write!( f, "{}_{}_{:016X}_{:016X}{}", basename, self.seg.segno, u64::from(self.start_lsn), u64::from(self.end_lsn), if self.dropped { "_DROPPED" } else { "" } ) } } #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone)] pub struct ImageFileName { pub seg: SegmentTag, pub lsn: Lsn, } impl ImageFileName { pub fn parse_str(fname: &str) -> Option<Self> { let rel; let mut parts; if let Some(rest) = fname.strip_prefix("rel_") { parts = rest.split('_'); rel = RelishTag::Relation(RelTag { spcnode: parts.next()?.parse::<u32>().ok()?, dbnode: parts.next()?.parse::<u32>().ok()?, relnode: parts.next()?.parse::<u32>().ok()?, forknum: parts.next()?.parse::<u8>().ok()?, }); } else if let Some(rest) = fname.strip_prefix("pg_xact_") { parts = rest.split('_'); re
} impl fmt::Display for ImageFileName { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let basename = match self.seg.rel { RelishTag::Relation(reltag) => format!( "rel_{}_{}_{}_{}", reltag.spcnode, reltag.dbnode, reltag.relnode, reltag.forknum ), RelishTag::Slru { slru: SlruKind::Clog, segno, } => format!("pg_xact_{:04X}", segno), RelishTag::Slru { slru: SlruKind::MultiXactMembers, segno, } => format!("pg_multixact_members_{:04X}", segno), RelishTag::Slru { slru: SlruKind::MultiXactOffsets, segno, } => format!("pg_multixact_offsets_{:04X}", segno), RelishTag::FileNodeMap { spcnode, dbnode } => { format!("pg_filenodemap_{}_{}", spcnode, dbnode) } RelishTag::TwoPhase { xid } => format!("pg_twophase_{}", xid), RelishTag::Checkpoint => "pg_control_checkpoint".to_string(), RelishTag::ControlFile => "pg_control".to_string(), }; write!( f, "{}_{}_{:016X}", basename, self.seg.segno, u64::from(self.lsn), ) } } pub enum PathOrConf { Path(PathBuf), Conf(&'static PageServerConf), }
l = RelishTag::Slru { slru: SlruKind::Clog, segno: u32::from_str_radix(parts.next()?, 16).ok()?, }; } else if let Some(rest) = fname.strip_prefix("pg_multixact_members_") { parts = rest.split('_'); rel = RelishTag::Slru { slru: SlruKind::MultiXactMembers, segno: u32::from_str_radix(parts.next()?, 16).ok()?, }; } else if let Some(rest) = fname.strip_prefix("pg_multixact_offsets_") { parts = rest.split('_'); rel = RelishTag::Slru { slru: SlruKind::MultiXactOffsets, segno: u32::from_str_radix(parts.next()?, 16).ok()?, }; } else if let Some(rest) = fname.strip_prefix("pg_filenodemap_") { parts = rest.split('_'); rel = RelishTag::FileNodeMap { spcnode: parts.next()?.parse::<u32>().ok()?, dbnode: parts.next()?.parse::<u32>().ok()?, }; } else if let Some(rest) = fname.strip_prefix("pg_twophase_") { parts = rest.split('_'); rel = RelishTag::TwoPhase { xid: parts.next()?.parse::<u32>().ok()?, }; } else if let Some(rest) = fname.strip_prefix("pg_control_checkpoint_") { parts = rest.split('_'); rel = RelishTag::Checkpoint; } else if let Some(rest) = fname.strip_prefix("pg_control_") { parts = rest.split('_'); rel = RelishTag::ControlFile; } else { return None; } let segno = parts.next()?.parse::<u32>().ok()?; let seg = SegmentTag { rel, segno }; let lsn = Lsn::from_hex(parts.next()?).ok()?; if parts.next().is_some() { return None; } Some(ImageFileName { seg, lsn }) }
function_block-function_prefixed
[ { "content": "fn check_slru_segno(rel: &RelishTag, expected_slru: SlruKind, expected_segno: u32) -> bool {\n\n if let RelishTag::Slru { slru, segno } = rel {\n\n *slru == expected_slru && *segno == expected_segno\n\n } else {\n\n false\n\n }\n\n}\n\n\n\n/// An error happened in WAL redo\n...
Rust
src/main.rs
dbrgn/galerio
1fe5984f36f8362aca36aa03541f73322fb943c8
use std::{ fs, io::{self, Write}, path::{Path, PathBuf}, time::Instant, }; use anyhow::{anyhow, Result}; use exif::{In as IdfNum, Reader as ExifReader, Tag as ExifTag, Value as ExifValue}; use image::{self, imageops::FilterType, GenericImageView, ImageFormat}; use lazy_static::lazy_static; use serde::Serialize; use structopt::StructOpt; use tera::Tera; const NAME: &str = "galerio"; const VERSION: &str = env!("CARGO_PKG_VERSION"); lazy_static! { static ref START_TIME: Instant = Instant::now(); } fn log(msg: &str) { let start_time = *START_TIME; let elapsed = Instant::now().duration_since(start_time).as_millis(); println!("[+{:>4}ms] {}", elapsed, msg); } macro_rules! log { ($($arg:tt)*) => { log(&format!($($arg)*)); } } #[derive(Debug, StructOpt)] #[structopt(name = NAME)] struct Args { #[structopt(parse(from_os_str))] input_dir: PathBuf, #[structopt(parse(from_os_str))] output_dir: PathBuf, title: String, #[structopt(short = "h", long = "height", default_value = "300")] thumbnail_height: u32, #[structopt(short = "l", long = "max-large-size")] max_large_size: Option<u32>, #[structopt(long = "no-download")] no_download: bool, #[structopt(long)] skip_processing: bool, } #[derive(Serialize)] struct Image { filename_full: String, filename_thumb: String, } #[derive(Serialize)] struct Context { title: String, galerio_version: &'static str, isodate: String, download_filename: Option<String>, images: Vec<Image>, } fn get_dimensions(image_path: impl AsRef<Path>) -> Result<(u32, u32)> { let img = image::open(image_path)?; Ok(img.dimensions()) } fn resize_image( image_path: impl AsRef<Path>, max_width: u32, max_height: u32, orientation: &Orientation, ) -> Result<Vec<u8>> { let img = image::open(image_path)?; let resized = match orientation { Orientation::Deg0 => img, Orientation::Deg90 => img.rotate270(), Orientation::Deg180 => img.rotate180(), Orientation::Deg270 => img.rotate90(), } .resize(max_width, max_height, FilterType::CatmullRom); let mut buf = Vec::new(); resized.write_to(&mut buf, ImageFormat::Jpeg)?; Ok(buf) } #[derive(Copy, Clone, Debug, PartialEq, Eq)] enum Orientation { Deg0, Deg90, Deg180, Deg270, } fn get_orientation(image_path: impl AsRef<Path>) -> Result<Orientation> { let file = fs::File::open(&image_path)?; let orientation = ExifReader::new() .read_from_container(&mut std::io::BufReader::new(&file))? .get_field(ExifTag::Orientation, IdfNum::PRIMARY) .map(|field| field.value.clone()) .and_then(|val: ExifValue| { if let ExifValue::Short(data) = val { data.get(0).cloned() } else { None } }) .map(|orientation| match orientation { 1 => Orientation::Deg0, 8 => Orientation::Deg90, 3 => Orientation::Deg180, 6 => Orientation::Deg270, _ => Orientation::Deg0, }); Ok(orientation.unwrap_or(Orientation::Deg0)) } fn main() -> Result<()> { let args = Args::from_args(); log!("Starting..."); if !args.input_dir.exists() { return Err(anyhow!("Input directory does not exist")); } if !args.input_dir.is_dir() { return Err(anyhow!("Input directory path is not a directory")); } if !args.output_dir.exists() { log!("Creating output directory {:?}", args.output_dir); fs::create_dir_all(&args.output_dir)?; } log!("Input dir: {:?}", args.input_dir); log!("Output dir: {:?}", args.output_dir); let mut image_files = fs::read_dir(&args.input_dir)? .filter_map(|res| res.ok()) .filter(|dir_entry| { dir_entry .file_type() .map(|ft| ft.is_file()) .unwrap_or(false) }) .filter(|dir_entry| { dir_entry .file_name() .to_str() .map(|s| s.ends_with(".jpg") || s.ends_with(".JPG")) .unwrap_or(false) }) .map(|dir_entry| dir_entry.path()) .collect::<Vec<_>>(); image_files.sort(); let download_filename = if args.no_download { None } else { let name: String = args .title .chars() .map(|c| if c == ' ' { '_' } else { c }) .filter(|c| c.is_ascii_alphanumeric() || *c == '-' || *c == '_' || *c == '.') .collect(); Some(format!("{}.zip", name)) }; let mut images = Vec::with_capacity(image_files.len()); let mut zipfile = download_filename .as_ref() .and_then(|filename| Some(fs::File::create(args.output_dir.join(filename)).unwrap())) .map(zip::ZipWriter::new); for f in &image_files { let filename_full = f.file_name().unwrap().to_str().unwrap().to_string(); let filename_thumb = format!( "{}.thumb.jpg", f.file_stem() .and_then(|stem| stem.to_str()) .ok_or_else(|| anyhow!("Could not determine file stem for file {:?}", f))?, ); if !args.skip_processing { log!("Processing {:?}", filename_full); let orientation = get_orientation(&f)?; let thumbnail_bytes = resize_image( &f, args.thumbnail_height * 4, args.thumbnail_height, &orientation, )?; let thumbnail_path = args.output_dir.join(&filename_thumb); fs::write(thumbnail_path, thumbnail_bytes)?; let full_path = args.output_dir.join(&filename_full); if let Some(max_size) = args.max_large_size { let (w, h) = get_dimensions(&f)?; if w > max_size || h > max_size { let large_bytes = resize_image(&f, max_size, max_size, &orientation)?; fs::write(&full_path, large_bytes)?; } else { fs::copy(&f, &full_path)?; } } else { fs::copy(&f, &full_path)?; } let options = zip::write::FileOptions::default() .compression_method(zip::CompressionMethod::Stored); if let Some(ref mut zipwriter) = zipfile { zipwriter.start_file(&filename_full, options)?; zipwriter.write(&fs::read(&full_path)?)?; } } images.push(Image { filename_full, filename_thumb, }); } let context = Context { title: args.title.clone(), galerio_version: VERSION, images, download_filename, isodate: chrono::Utc::now().to_rfc3339(), }; let tera = Tera::new("templates/**/*.html")?; let rendered = tera.render("index.html", &tera::Context::from_serialize(&context)?)?; log!("Writing index.html"); fs::write(args.output_dir.join("index.html"), rendered)?; log!("Writing static files"); fs::create_dir(args.output_dir.join("static")).or_else(|e| { if e.kind() == io::ErrorKind::AlreadyExists { Ok(()) } else { Err(e) } })?; fs::write( args.output_dir.join("static/simple-lightbox.min.js"), include_bytes!("../static/simple-lightbox.min.js"), )?; fs::write( args.output_dir.join("static/simple-lightbox.min.css"), include_bytes!("../static/simple-lightbox.min.css"), )?; log!("Done!"); Ok(()) }
use std::{ fs, io::{self, Write}, path::{Path, PathBuf}, time::Instant, }; use anyhow::{anyhow, Result}; use exif::{In as IdfNum, Reader as ExifReader, Tag as ExifTag, Value as ExifValue}; use image::{self, imageops::FilterType, GenericImageView, ImageFormat}; use lazy_static::lazy_static; use serde::Serialize; use structopt::StructOpt; use tera::Tera; const NAME: &str = "galerio"; const VERSION: &str = env!("CARGO_PKG_VERSION"); lazy_static! { static ref START_TIME: Instant = Instant::now(); } fn log(msg: &str) { let start_time = *START_TIME; let elapsed = Instant::now().duration_since(start_time).as_millis(); println!("[+{:>4}ms] {}", elapsed, msg); } macro_rules! log { ($($arg:tt)*) => { log(&format!($($arg)*)); } } #[derive(Debug, StructOpt)] #[structopt(name = NAME)] struct Args { #[structopt(parse(from_os_str))] input_dir: PathBuf, #[structopt(parse(from_os_str))] output_dir: PathBuf, title: String, #[structopt(short = "h", long = "height", default_value = "300")] thumbnail_height: u32, #[structopt(short = "l", long = "max-large-size")] max_large_size: Option<u32>, #[structopt(long = "no-download")] no_download: bool, #[structopt(long)] skip_processing: bool, } #[derive(Serialize)] struct Image { filename_full: String, filename_thumb: String, } #[derive(Serialize)] struct Context { title: String, galerio_version: &'static str, isodate: String, download_filename: Option<String>, images: Vec<Image>, } fn get_dimensions(image_path: impl AsRef<Path>) -> Result<(u32, u32)> { let img = image::open(image_path)?; Ok(img.dimensions()) } fn resize_image( image_path: impl AsRef<Path>, max_width: u32, max_height: u32, orientation: &Orientation, ) -> Result<Vec<u8>> { let img = image::open(image_path)?; let resized = match orientation { Orientation::Deg0 => img, Orientation::Deg90 => img.rotate270(), Orientation::Deg180 => img.rotate180(), Orientation::Deg270 => img.rotate90(), } .resize(max_width, max_height, FilterType::CatmullRom); let mut buf = Vec::new(); resized.write_to(&mut buf, ImageFormat::Jpeg)?; Ok(buf) } #[derive(Copy, Clone, Debug, PartialEq, Eq)] enum Orientation { Deg0, Deg90, Deg180, Deg270, } fn get_orientation(image_path: impl AsRef<Path>) -> Result<Orientation> { let file = fs::File::open(&image_path)?; let orientation = ExifReader::new() .read_from_container(&mut std::io::BufReader::new(&file))? .get_field(ExifTag::Orientation, IdfNum::PRIMARY) .map(|field| field.value.clone()) .and_then(|val: ExifValue| { if let ExifValue::Short(data) = val { data.get(0).cloned() } else { None } }) .map(|
let full_path = args.output_dir.join(&filename_full); if let Some(max_size) = args.max_large_size { let (w, h) = get_dimensions(&f)?; if w > max_size || h > max_size { let large_bytes = resize_image(&f, max_size, max_size, &orientation)?; fs::write(&full_path, large_bytes)?; } else { fs::copy(&f, &full_path)?; } } else { fs::copy(&f, &full_path)?; } let options = zip::write::FileOptions::default() .compression_method(zip::CompressionMethod::Stored); if let Some(ref mut zipwriter) = zipfile { zipwriter.start_file(&filename_full, options)?; zipwriter.write(&fs::read(&full_path)?)?; } } images.push(Image { filename_full, filename_thumb, }); } let context = Context { title: args.title.clone(), galerio_version: VERSION, images, download_filename, isodate: chrono::Utc::now().to_rfc3339(), }; let tera = Tera::new("templates/**/*.html")?; let rendered = tera.render("index.html", &tera::Context::from_serialize(&context)?)?; log!("Writing index.html"); fs::write(args.output_dir.join("index.html"), rendered)?; log!("Writing static files"); fs::create_dir(args.output_dir.join("static")).or_else(|e| { if e.kind() == io::ErrorKind::AlreadyExists { Ok(()) } else { Err(e) } })?; fs::write( args.output_dir.join("static/simple-lightbox.min.js"), include_bytes!("../static/simple-lightbox.min.js"), )?; fs::write( args.output_dir.join("static/simple-lightbox.min.css"), include_bytes!("../static/simple-lightbox.min.css"), )?; log!("Done!"); Ok(()) }
orientation| match orientation { 1 => Orientation::Deg0, 8 => Orientation::Deg90, 3 => Orientation::Deg180, 6 => Orientation::Deg270, _ => Orientation::Deg0, }); Ok(orientation.unwrap_or(Orientation::Deg0)) } fn main() -> Result<()> { let args = Args::from_args(); log!("Starting..."); if !args.input_dir.exists() { return Err(anyhow!("Input directory does not exist")); } if !args.input_dir.is_dir() { return Err(anyhow!("Input directory path is not a directory")); } if !args.output_dir.exists() { log!("Creating output directory {:?}", args.output_dir); fs::create_dir_all(&args.output_dir)?; } log!("Input dir: {:?}", args.input_dir); log!("Output dir: {:?}", args.output_dir); let mut image_files = fs::read_dir(&args.input_dir)? .filter_map(|res| res.ok()) .filter(|dir_entry| { dir_entry .file_type() .map(|ft| ft.is_file()) .unwrap_or(false) }) .filter(|dir_entry| { dir_entry .file_name() .to_str() .map(|s| s.ends_with(".jpg") || s.ends_with(".JPG")) .unwrap_or(false) }) .map(|dir_entry| dir_entry.path()) .collect::<Vec<_>>(); image_files.sort(); let download_filename = if args.no_download { None } else { let name: String = args .title .chars() .map(|c| if c == ' ' { '_' } else { c }) .filter(|c| c.is_ascii_alphanumeric() || *c == '-' || *c == '_' || *c == '.') .collect(); Some(format!("{}.zip", name)) }; let mut images = Vec::with_capacity(image_files.len()); let mut zipfile = download_filename .as_ref() .and_then(|filename| Some(fs::File::create(args.output_dir.join(filename)).unwrap())) .map(zip::ZipWriter::new); for f in &image_files { let filename_full = f.file_name().unwrap().to_str().unwrap().to_string(); let filename_thumb = format!( "{}.thumb.jpg", f.file_stem() .and_then(|stem| stem.to_str()) .ok_or_else(|| anyhow!("Could not determine file stem for file {:?}", f))?, ); if !args.skip_processing { log!("Processing {:?}", filename_full); let orientation = get_orientation(&f)?; let thumbnail_bytes = resize_image( &f, args.thumbnail_height * 4, args.thumbnail_height, &orientation, )?; let thumbnail_path = args.output_dir.join(&filename_thumb); fs::write(thumbnail_path, thumbnail_bytes)?;
random
[ { "content": "# Galerio\n\n\n\nGalerio is a simple generator for HTML flexbox galleries written in Rust. From\n\na directory with JPEG files, it generates a self-contained gallery without\n\nexternal dependencies.\n\n\n\n## Features\n\n\n\n- Simple CSS3/Flexbox based gallery\n\n- Touch friendly lightbox for vie...
Rust
oxidizer-entity-macro/src/utils.rs
TylerLafayette/oxidizer
c59ce9a50243f0eb35203d2d72fbc9ff36cb5afd
use proc_macro2::TokenStream; use quote::{quote, quote_spanned}; use syn::{ spanned::Spanned, AngleBracketedGenericArguments, Field, GenericArgument, Meta, Path, PathArguments, PathSegment, Type, TypePath, }; pub fn iterate_angle_bracketed( ab: &AngleBracketedGenericArguments, expected: &Vec<String>, index: usize, ) -> bool { let index = index; if expected.len() == index { return true; } for arg in &ab.args { let res = match arg { GenericArgument::Type(Type::Path(tp)) => check_type_order(tp, expected, index), _ => unimplemented!(), }; if res { return true; } } false } pub fn iterate_path_arguments(seg: &PathSegment, expected: &Vec<String>, index: usize) -> bool { let mut index = index; if expected.len() == index { return true; } if seg.ident.to_string() == expected[index] { index += 1; } if expected.len() == index { return true; } match &seg.arguments { PathArguments::AngleBracketed(angle) => iterate_angle_bracketed(angle, expected, index), PathArguments::Parenthesized(_paren) => unimplemented!(), PathArguments::None => expected.len() == index, } } pub fn iterate_path_segments(p: &Path, expected: &Vec<String>, index: usize) -> bool { let index = index; if expected.len() == index { return true; } for seg in p.segments.iter() { if iterate_path_arguments(seg, &expected, index) { return true; } } expected.len() == index } pub fn check_type_order(p: &TypePath, expected: &Vec<String>, index: usize) -> bool { let mut index = index; if expected.len() == index { return true; } if let Some(ident) = p.path.get_ident() { if ident.to_string() == expected[0] { index += 1; } } iterate_path_segments(&p.path, expected, index) } pub fn is_typed_with(segment: &PathSegment, expected: Vec<&str>) -> bool { let expected = expected.iter().map(|v| v.to_string()).collect(); iterate_path_arguments(segment, &expected, 0) } pub fn is_chrono_option(segment: &PathSegment) -> bool { let expected: Vec<&str> = vec!["Option", "DateTime", "Utc"]; let no_option_expected: Vec<&str> = vec!["DateTime", "Utc"]; is_typed_with(segment, expected) || is_typed_with(segment, no_option_expected) } pub fn search_attr_in_field(field: &Field, attr: &str) -> bool { for option in (&field.attrs).into_iter() { let option = option.parse_meta().unwrap(); match option { Meta::Path(path) if path.get_ident().unwrap().to_string() == attr => { return true; } _ => {} } } return false; } pub fn type_to_db_type(ty: &Type) -> TokenStream { let segments = match ty { syn::Type::Path(TypePath { path: Path { segments, .. }, .. }) => segments, _ => unimplemented!(), }; match segments.first().unwrap() { PathSegment { ident, .. } if ident.to_string() == "String" => { quote! { oxidizer::types::text() } } segment if is_typed_with(segment, vec!["Option", "String"]) => { quote! { oxidizer::types::text() } } PathSegment { ident, .. } if ident.to_string() == "i8" => { quote! { oxidizer::types::custom("char") } } segment if is_typed_with(segment, vec!["Option", "i8"]) => { quote! { oxidizer::types::text() } } PathSegment { ident, .. } if ident.to_string() == "i16" => { quote! { oxidizer::types::custom("SMALLINT") } } segment if is_typed_with(segment, vec!["Option", "i16"]) => { quote! { oxidizer::types::text() } } PathSegment { ident, .. } if ident.to_string() == "i32" => { quote! { oxidizer::types::integer() } } segment if is_typed_with(segment, vec!["Option", "i32"]) => { quote! { oxidizer::types::text() } } PathSegment { ident, .. } if ident.to_string() == "u32" => { quote! { oxidizer::types::custom("OID") } } segment if is_typed_with(segment, vec!["Option", "u32"]) => { quote! { oxidizer::types::text() } } PathSegment { ident, .. } if ident.to_string() == "i64" => { quote! { oxidizer::types::custom("BIGINT") } } segment if is_typed_with(segment, vec!["Option", "i64"]) => { quote! { oxidizer::types::text() } } PathSegment { ident, .. } if ident.to_string() == "f32" => { quote! { oxidizer::types::custom("REAL") } } segment if is_typed_with(segment, vec!["Option", "f32"]) => { quote! { oxidizer::types::text() } } PathSegment { ident, .. } if ident.to_string() == "f64" => { quote! { oxidizer::types::custom("DOUBLE PRECISION") } } segment if is_typed_with(segment, vec!["Option", "f64"]) => { quote! { oxidizer::types::text() } } PathSegment { ident, .. } if ident.to_string() == "bool" => { quote! { oxidizer::types::boolean() } } segment if is_typed_with(segment, vec!["Option", "bool"]) => { quote! { oxidizer::types::text() } } segment if is_chrono_option(segment) => { quote! { oxidizer::types::custom("timestamp with time zone") } } _ => quote_spanned! { ty.span() => compile_error!("Invalid type") }, } }
use proc_macro2::TokenStream; use quote::{quote, quote_spanned}; use syn::{ spanned::Spanned, AngleBracketedGenericArguments, Field, GenericArgument, Meta, Path, PathArguments, PathSegment, Type, TypePath, }; pub fn iterate_angle_bracketed( ab: &AngleBracketedGenericArguments, expected: &Vec<String>, index: usize, ) -> bool { let index = index; if expected.len() == index { return true; } for arg in &ab.args { let res = match arg { GenericArgument::Type(Type::Path(tp
egments.iter() { if iterate_path_arguments(seg, &expected, index) { return true; } } expected.len() == index } pub fn check_type_order(p: &TypePath, expected: &Vec<String>, index: usize) -> bool { let mut index = index; if expected.len() == index { return true; } if let Some(ident) = p.path.get_ident() { if ident.to_string() == expected[0] { index += 1; } } iterate_path_segments(&p.path, expected, index) } pub fn is_typed_with(segment: &PathSegment, expected: Vec<&str>) -> bool { let expected = expected.iter().map(|v| v.to_string()).collect(); iterate_path_arguments(segment, &expected, 0) } pub fn is_chrono_option(segment: &PathSegment) -> bool { let expected: Vec<&str> = vec!["Option", "DateTime", "Utc"]; let no_option_expected: Vec<&str> = vec!["DateTime", "Utc"]; is_typed_with(segment, expected) || is_typed_with(segment, no_option_expected) } pub fn search_attr_in_field(field: &Field, attr: &str) -> bool { for option in (&field.attrs).into_iter() { let option = option.parse_meta().unwrap(); match option { Meta::Path(path) if path.get_ident().unwrap().to_string() == attr => { return true; } _ => {} } } return false; } pub fn type_to_db_type(ty: &Type) -> TokenStream { let segments = match ty { syn::Type::Path(TypePath { path: Path { segments, .. }, .. }) => segments, _ => unimplemented!(), }; match segments.first().unwrap() { PathSegment { ident, .. } if ident.to_string() == "String" => { quote! { oxidizer::types::text() } } segment if is_typed_with(segment, vec!["Option", "String"]) => { quote! { oxidizer::types::text() } } PathSegment { ident, .. } if ident.to_string() == "i8" => { quote! { oxidizer::types::custom("char") } } segment if is_typed_with(segment, vec!["Option", "i8"]) => { quote! { oxidizer::types::text() } } PathSegment { ident, .. } if ident.to_string() == "i16" => { quote! { oxidizer::types::custom("SMALLINT") } } segment if is_typed_with(segment, vec!["Option", "i16"]) => { quote! { oxidizer::types::text() } } PathSegment { ident, .. } if ident.to_string() == "i32" => { quote! { oxidizer::types::integer() } } segment if is_typed_with(segment, vec!["Option", "i32"]) => { quote! { oxidizer::types::text() } } PathSegment { ident, .. } if ident.to_string() == "u32" => { quote! { oxidizer::types::custom("OID") } } segment if is_typed_with(segment, vec!["Option", "u32"]) => { quote! { oxidizer::types::text() } } PathSegment { ident, .. } if ident.to_string() == "i64" => { quote! { oxidizer::types::custom("BIGINT") } } segment if is_typed_with(segment, vec!["Option", "i64"]) => { quote! { oxidizer::types::text() } } PathSegment { ident, .. } if ident.to_string() == "f32" => { quote! { oxidizer::types::custom("REAL") } } segment if is_typed_with(segment, vec!["Option", "f32"]) => { quote! { oxidizer::types::text() } } PathSegment { ident, .. } if ident.to_string() == "f64" => { quote! { oxidizer::types::custom("DOUBLE PRECISION") } } segment if is_typed_with(segment, vec!["Option", "f64"]) => { quote! { oxidizer::types::text() } } PathSegment { ident, .. } if ident.to_string() == "bool" => { quote! { oxidizer::types::boolean() } } segment if is_typed_with(segment, vec!["Option", "bool"]) => { quote! { oxidizer::types::text() } } segment if is_chrono_option(segment) => { quote! { oxidizer::types::custom("timestamp with time zone") } } _ => quote_spanned! { ty.span() => compile_error!("Invalid type") }, } }
)) => check_type_order(tp, expected, index), _ => unimplemented!(), }; if res { return true; } } false } pub fn iterate_path_arguments(seg: &PathSegment, expected: &Vec<String>, index: usize) -> bool { let mut index = index; if expected.len() == index { return true; } if seg.ident.to_string() == expected[index] { index += 1; } if expected.len() == index { return true; } match &seg.arguments { PathArguments::AngleBracketed(angle) => iterate_angle_bracketed(angle, expected, index), PathArguments::Parenthesized(_paren) => unimplemented!(), PathArguments::None => expected.len() == index, } } pub fn iterate_path_segments(p: &Path, expected: &Vec<String>, index: usize) -> bool { let index = index; if expected.len() == index { return true; } for seg in p.s
random
[ { "content": "type GetFieldsIter<'a> = std::iter::Filter<syn::punctuated::Iter<'a, Field>, fn(&&Field) -> bool>;\n\n\n\nimpl Props {\n\n pub fn new(\n\n input: DeriveInput,\n\n attrs: Option<EntityAttr>,\n\n indexes: Vec<IndexAttr>,\n\n has_many_attrs: Vec<HasManyAttr>,\n\n ) -...
Rust
src/day5.rs
mathstar/adventOfCode2021
19e843ebf1f0e2abbee5c4502b39bfdab5755a1e
use std::cmp::{max, min}; use std::collections::HashMap; use crate::day::Day; pub struct Day5 {} #[derive(Debug)] struct Line { start: (i32, i32), end: (i32, i32) } enum AxialClassification { X, Y, NonAxial } impl Line { fn axial_classification(&self) -> AxialClassification { if self.start.0 == self.end.0 { AxialClassification::X } else if self.start.1 == self.end.1 { AxialClassification::Y } else { AxialClassification::NonAxial } } } fn parse_input(input: &str) -> Vec<Line> { let mut lines = Vec::new(); for line in input.lines() { let mut split = line.split(" -> "); let mut p_split = split.next().unwrap().split(","); let start = (p_split.next().unwrap().parse().unwrap(), p_split.next().unwrap().parse().unwrap()); p_split = split.next().unwrap().split(","); let end = (p_split.next().unwrap().parse().unwrap(), p_split.next().unwrap().parse().unwrap()); lines.push(Line {start, end}); } lines } fn increment_grid_position(grid: &mut HashMap<i32, HashMap<i32, i32>>, overlap: &mut i32, x: i32, y: i32) { match grid.get_mut(&x) { Some(r) => { match r.get_mut(&y) { Some(existing) if *existing == 1 => { *overlap += 1; *existing += 1; } Some(existing) => { *existing += 1; } None => { r.insert(y, 1); } } }, None => { let mut m = HashMap::new(); m.insert(y, 1); grid.insert(x, m); } } } fn compute_overlap(lines: &Vec<Line>, consider_diagonal: bool) -> i32 { let mut grid : HashMap<i32, HashMap<i32, i32>> = HashMap::new(); let mut overlap = 0; for line in lines { match line.axial_classification() { AxialClassification::X => { let y_start = min(line.start.1, line.end.1); let y_end = max(line.start.1, line.end.1) + 1; for y in y_start .. y_end { increment_grid_position(&mut grid, &mut overlap, line.start.0, y); } }, AxialClassification::Y => { let x_start = min(line.start.0, line.end.0); let x_end = max(line.start.0, line.end.0) + 1; for x in x_start..x_end { increment_grid_position(&mut grid, &mut overlap, x, line.start.1); } }, AxialClassification::NonAxial => { if consider_diagonal { let x_start = line.start.0; let y_start = line.start.1; let x_sign = if line.end.0 - line.start.0 > 0 {1} else {-1}; let y_sign = if line.end.1 - line.start.1 > 0 {1} else {-1}; let length = (line.start.0 - line.end.0).abs() + 1; for inc in 0..length { let x = x_start + inc * x_sign; let y = y_start + inc * y_sign; increment_grid_position(&mut grid, &mut overlap, x, y); } } } } } overlap } impl Day for Day5 { fn part1(&self, input: &str) -> String { let lines = parse_input(input); compute_overlap(&lines, false).to_string() } fn part2(&self, input: &str) -> String { let lines = parse_input(input); compute_overlap(&lines, true).to_string() } } #[cfg(test)] mod tests { use super::*; #[test] fn part1_test1() { assert_eq!(Day5{}.part1("0,9 -> 5,9 8,0 -> 0,8 9,4 -> 3,4 2,2 -> 2,1 7,0 -> 7,4 6,4 -> 2,0 0,9 -> 2,9 3,4 -> 1,4 0,0 -> 8,8 5,5 -> 8,2"), "5"); } #[test] fn part2_test1() { assert_eq!(Day5{}.part2("0,9 -> 5,9 8,0 -> 0,8 9,4 -> 3,4 2,2 -> 2,1 7,0 -> 7,4 6,4 -> 2,0 0,9 -> 2,9 3,4 -> 1,4 0,0 -> 8,8 5,5 -> 8,2"), "12"); } }
use std::cmp::{max, min}; use std::collections::HashMap; use crate::day::Day; pub struct Day5 {} #[derive(Debug)] struct Line { start: (i32, i32), end: (i32, i32) } enum AxialClassification { X, Y, NonAxial } impl Line {
} fn parse_input(input: &str) -> Vec<Line> { let mut lines = Vec::new(); for line in input.lines() { let mut split = line.split(" -> "); let mut p_split = split.next().unwrap().split(","); let start = (p_split.next().unwrap().parse().unwrap(), p_split.next().unwrap().parse().unwrap()); p_split = split.next().unwrap().split(","); let end = (p_split.next().unwrap().parse().unwrap(), p_split.next().unwrap().parse().unwrap()); lines.push(Line {start, end}); } lines } fn increment_grid_position(grid: &mut HashMap<i32, HashMap<i32, i32>>, overlap: &mut i32, x: i32, y: i32) { match grid.get_mut(&x) { Some(r) => { match r.get_mut(&y) { Some(existing) if *existing == 1 => { *overlap += 1; *existing += 1; } Some(existing) => { *existing += 1; } None => { r.insert(y, 1); } } }, None => { let mut m = HashMap::new(); m.insert(y, 1); grid.insert(x, m); } } } fn compute_overlap(lines: &Vec<Line>, consider_diagonal: bool) -> i32 { let mut grid : HashMap<i32, HashMap<i32, i32>> = HashMap::new(); let mut overlap = 0; for line in lines { match line.axial_classification() { AxialClassification::X => { let y_start = min(line.start.1, line.end.1); let y_end = max(line.start.1, line.end.1) + 1; for y in y_start .. y_end { increment_grid_position(&mut grid, &mut overlap, line.start.0, y); } }, AxialClassification::Y => { let x_start = min(line.start.0, line.end.0); let x_end = max(line.start.0, line.end.0) + 1; for x in x_start..x_end { increment_grid_position(&mut grid, &mut overlap, x, line.start.1); } }, AxialClassification::NonAxial => { if consider_diagonal { let x_start = line.start.0; let y_start = line.start.1; let x_sign = if line.end.0 - line.start.0 > 0 {1} else {-1}; let y_sign = if line.end.1 - line.start.1 > 0 {1} else {-1}; let length = (line.start.0 - line.end.0).abs() + 1; for inc in 0..length { let x = x_start + inc * x_sign; let y = y_start + inc * y_sign; increment_grid_position(&mut grid, &mut overlap, x, y); } } } } } overlap } impl Day for Day5 { fn part1(&self, input: &str) -> String { let lines = parse_input(input); compute_overlap(&lines, false).to_string() } fn part2(&self, input: &str) -> String { let lines = parse_input(input); compute_overlap(&lines, true).to_string() } } #[cfg(test)] mod tests { use super::*; #[test] fn part1_test1() { assert_eq!(Day5{}.part1("0,9 -> 5,9 8,0 -> 0,8 9,4 -> 3,4 2,2 -> 2,1 7,0 -> 7,4 6,4 -> 2,0 0,9 -> 2,9 3,4 -> 1,4 0,0 -> 8,8 5,5 -> 8,2"), "5"); } #[test] fn part2_test1() { assert_eq!(Day5{}.part2("0,9 -> 5,9 8,0 -> 0,8 9,4 -> 3,4 2,2 -> 2,1 7,0 -> 7,4 6,4 -> 2,0 0,9 -> 2,9 3,4 -> 1,4 0,0 -> 8,8 5,5 -> 8,2"), "12"); } }
fn axial_classification(&self) -> AxialClassification { if self.start.0 == self.end.0 { AxialClassification::X } else if self.start.1 == self.end.1 { AxialClassification::Y } else { AxialClassification::NonAxial } }
function_block-full_function
[ { "content": "struct BingoBoard {\n\n values: Vec<Vec<i32>>,\n\n marked: Vec<Vec<bool>>\n\n}\n\n\n\nimpl BingoBoard {\n\n fn new(values: Vec<Vec<i32>>) -> BingoBoard {\n\n let mut marked = Vec::new();\n\n for a in &values {\n\n let mut row = Vec::new();\n\n for _ in ...
Rust
pongo-rs-derive/src/raw_index_options.rs
simoneromano96/pongo-rs
615e776990e4c0435efc1ff7b87aa0d39e3b9024
use darling::FromMeta; #[derive(Clone, Debug)] pub(crate) struct Document(mongodb::bson::Document); impl FromMeta for Document { fn from_string(value: &str) -> darling::Result<Self> { println!("{value:#?}"); let value = serde_json::from_str(value); match value { Ok(document) => Ok(Self(document)), Err(error) => Err(darling::Error::unsupported_shape(&format!("{error}"))), } } } #[derive(Clone, Debug)] pub(crate) struct Collation(mongodb::options::Collation); impl FromMeta for Collation { fn from_string(value: &str) -> darling::Result<Self> { println!("{value:#?}"); let value = serde_json::from_str(value); match value { Ok(document) => Ok(Self(document)), Err(error) => Err(darling::Error::unsupported_shape(&format!("{error}"))), } } } #[derive(Clone, Debug)] pub(crate) struct IndexOptions(mongodb::options::IndexOptions); impl FromMeta for IndexOptions { fn from_string(value: &str) -> darling::Result<Self> { println!("{value:#?}"); let value = serde_json::from_str(value); match value { Ok(document) => Ok(Self(document)), Err(error) => Err(darling::Error::unsupported_shape(&format!("{error}"))), } } } #[derive(Debug, Default, FromMeta)] pub(crate) struct RawIndexOptions { #[darling(default)] pub(crate) background: Option<bool>, #[darling(default)] pub(crate) expire_after: Option<u64>, #[darling(default)] pub(crate) name: Option<String>, #[darling(default)] pub(crate) sparse: Option<bool>, #[darling(default)] pub(crate) storage_engine: Option<Document>, #[darling(default)] pub(crate) unique: Option<bool>, #[darling(default)] pub(crate) version: Option<u32>, #[darling(default)] pub(crate) default_language: Option<String>, #[darling(default)] pub(crate) language_override: Option<String>, #[darling(default)] pub(crate) text_index_version: Option<u32>, #[darling(default)] pub(crate) weights: Option<Document>, #[darling(default)] pub(crate) sphere_2d_index_version: Option<u32>, #[darling(default)] pub(crate) bits: Option<u32>, #[darling(default)] pub(crate) max: Option<f64>, #[darling(default)] pub(crate) min: Option<f64>, #[darling(default)] pub(crate) bucket_size: Option<u32>, #[darling(default)] pub(crate) partial_filter_expression: Option<Document>, #[darling(default)] pub(crate) collation: Option<Collation>, #[darling(default)] pub(crate) wildcard_projection: Option<Document>, #[darling(default)] pub(crate) hidden: Option<bool>, } impl From<&RawIndexOptions> for mongodb::options::IndexOptions { fn from(raw_options: &RawIndexOptions) -> Self { let builder = mongodb::options::IndexOptions::builder(); builder .background(raw_options.background) .expire_after(raw_options.expire_after.map(std::time::Duration::from_secs)) .name(raw_options.name.clone()) .sparse(raw_options.sparse) .storage_engine( raw_options .storage_engine .clone() .map(|storage_engine| storage_engine.0), ) .unique(raw_options.unique) .version(raw_options.version.map(|version| match version { 0 => mongodb::options::IndexVersion::V0, 1 => mongodb::options::IndexVersion::V1, 2 => mongodb::options::IndexVersion::V2, _custom => mongodb::options::IndexVersion::Custom(_custom), })) .default_language(raw_options.default_language.clone()) .language_override(raw_options.language_override.clone()) .text_index_version(raw_options.text_index_version.map(|version| match version { 1 => mongodb::options::TextIndexVersion::V1, 2 => mongodb::options::TextIndexVersion::V2, 3 => mongodb::options::TextIndexVersion::V3, _custom => mongodb::options::TextIndexVersion::Custom(_custom), })) .weights(raw_options.weights.clone().map(|weights| weights.0)) .sphere_2d_index_version(raw_options.sphere_2d_index_version.map( |version| match version { 2 => mongodb::options::Sphere2DIndexVersion::V2, 3 => mongodb::options::Sphere2DIndexVersion::V3, _custom => mongodb::options::Sphere2DIndexVersion::Custom(_custom), }, )) .bits(raw_options.bits) .max(raw_options.max) .min(raw_options.min) .bucket_size(raw_options.bucket_size) .partial_filter_expression( raw_options .partial_filter_expression .clone() .map(|partial_filter_expression| partial_filter_expression.0), ) .collation(raw_options.collation.clone().map(|collation| collation.0)) .wildcard_projection( raw_options .wildcard_projection .clone() .map(|wildcard_projection| wildcard_projection.0), ) .hidden(raw_options.hidden) .build() } }
use darling::FromMeta; #[derive(Clone, Debug)] pub(crate) struct Document(mongodb::bson::Document); impl FromMeta for Document { fn from_string(value: &str) -> darling::Result<Self> { println!("{value:#?}"); let value = serde_json::from_str(value); match value { Ok(document) => Ok(Self(document)), Err(error) => Err(darling::Error::unsupported_shape(&format!("{error}"))), } } } #[derive(Clone, Debug)] pub(crate) struct Collation(mongodb::options::Collation); impl FromMeta for Collation { fn from_string(value: &str) -> darling::Result<Self> { println!("{value:#?}"); let value = serde_json::from_str(value);
t)] pub(crate) bits: Option<u32>, #[darling(default)] pub(crate) max: Option<f64>, #[darling(default)] pub(crate) min: Option<f64>, #[darling(default)] pub(crate) bucket_size: Option<u32>, #[darling(default)] pub(crate) partial_filter_expression: Option<Document>, #[darling(default)] pub(crate) collation: Option<Collation>, #[darling(default)] pub(crate) wildcard_projection: Option<Document>, #[darling(default)] pub(crate) hidden: Option<bool>, } impl From<&RawIndexOptions> for mongodb::options::IndexOptions { fn from(raw_options: &RawIndexOptions) -> Self { let builder = mongodb::options::IndexOptions::builder(); builder .background(raw_options.background) .expire_after(raw_options.expire_after.map(std::time::Duration::from_secs)) .name(raw_options.name.clone()) .sparse(raw_options.sparse) .storage_engine( raw_options .storage_engine .clone() .map(|storage_engine| storage_engine.0), ) .unique(raw_options.unique) .version(raw_options.version.map(|version| match version { 0 => mongodb::options::IndexVersion::V0, 1 => mongodb::options::IndexVersion::V1, 2 => mongodb::options::IndexVersion::V2, _custom => mongodb::options::IndexVersion::Custom(_custom), })) .default_language(raw_options.default_language.clone()) .language_override(raw_options.language_override.clone()) .text_index_version(raw_options.text_index_version.map(|version| match version { 1 => mongodb::options::TextIndexVersion::V1, 2 => mongodb::options::TextIndexVersion::V2, 3 => mongodb::options::TextIndexVersion::V3, _custom => mongodb::options::TextIndexVersion::Custom(_custom), })) .weights(raw_options.weights.clone().map(|weights| weights.0)) .sphere_2d_index_version(raw_options.sphere_2d_index_version.map( |version| match version { 2 => mongodb::options::Sphere2DIndexVersion::V2, 3 => mongodb::options::Sphere2DIndexVersion::V3, _custom => mongodb::options::Sphere2DIndexVersion::Custom(_custom), }, )) .bits(raw_options.bits) .max(raw_options.max) .min(raw_options.min) .bucket_size(raw_options.bucket_size) .partial_filter_expression( raw_options .partial_filter_expression .clone() .map(|partial_filter_expression| partial_filter_expression.0), ) .collation(raw_options.collation.clone().map(|collation| collation.0)) .wildcard_projection( raw_options .wildcard_projection .clone() .map(|wildcard_projection| wildcard_projection.0), ) .hidden(raw_options.hidden) .build() } }
match value { Ok(document) => Ok(Self(document)), Err(error) => Err(darling::Error::unsupported_shape(&format!("{error}"))), } } } #[derive(Clone, Debug)] pub(crate) struct IndexOptions(mongodb::options::IndexOptions); impl FromMeta for IndexOptions { fn from_string(value: &str) -> darling::Result<Self> { println!("{value:#?}"); let value = serde_json::from_str(value); match value { Ok(document) => Ok(Self(document)), Err(error) => Err(darling::Error::unsupported_shape(&format!("{error}"))), } } } #[derive(Debug, Default, FromMeta)] pub(crate) struct RawIndexOptions { #[darling(default)] pub(crate) background: Option<bool>, #[darling(default)] pub(crate) expire_after: Option<u64>, #[darling(default)] pub(crate) name: Option<String>, #[darling(default)] pub(crate) sparse: Option<bool>, #[darling(default)] pub(crate) storage_engine: Option<Document>, #[darling(default)] pub(crate) unique: Option<bool>, #[darling(default)] pub(crate) version: Option<u32>, #[darling(default)] pub(crate) default_language: Option<String>, #[darling(default)] pub(crate) language_override: Option<String>, #[darling(default)] pub(crate) text_index_version: Option<u32>, #[darling(default)] pub(crate) weights: Option<Document>, #[darling(default)] pub(crate) sphere_2d_index_version: Option<u32>, #[darling(defaul
random
[ { "content": "fn impl_model_derive_macro(ast: &syn::DeriveInput) -> TokenStream {\n\n let parsed: Model = FromDeriveInput::from_derive_input(ast).unwrap();\n\n println!(\"{parsed:#?}\");\n\n let name = &parsed.ident;\n\n\n\n let collection_name = match parsed.collection_options {\n\n Some(col...
Rust
src/python_module.rs
gchers/fbleau
fe66cc859efaba47526cb864fbe8438bfd6bdc35
use numpy::*; use pyo3::prelude::*; use pyo3::types::PyDict; use crate::estimates::*; use crate::fbleau_estimation::{run_fbleau, Logger}; use crate::Label; #[pymodule(fbleau)] fn pyfbleau(_py: Python, m: &PyModule) -> PyResult<()> { #[pyfn(m, "run_fbleau")] fn run_fbleau_py( py: Python, train_x: &PyArray2<f64>, train_y: &PyArray1<Label>, test_x: &PyArray2<f64>, test_y: &PyArray1<Label>, estimate: &str, knn_strategy: Option<&str>, distance: Option<String>, log_errors: bool, log_individual_errors: bool, delta: Option<f64>, qstop: Option<usize>, absolute: bool, scale: bool, ) -> PyResult<PyObject> { let train_x = unsafe { train_x.as_array().to_owned() }; let train_y = unsafe { train_y.as_array().to_owned() }; let test_x = unsafe { test_x.as_array().to_owned() }; let test_y = unsafe { test_y.as_array().to_owned() }; let estimate = match estimate { "nn" => Estimate::NN, "knn" => Estimate::KNN, "frequentist" => Estimate::Frequentist, "nn-bound" => Estimate::NNBound, _ => { unimplemented!() } }; let knn_strategy = if let Some(strategy) = knn_strategy { match strategy { "ln" => Some(KNNStrategy::Ln), "log10" => Some(KNNStrategy::Log10), _ => { unimplemented!() } } } else { None }; let mut error_logger = if log_errors { Some(Logger::LogVec(vec![])) } else { None }; let mut individual_error_logger = if log_individual_errors { Some(Logger::LogVec(vec![])) } else { None }; let (min_error, last_error, random_guessing) = run_fbleau( train_x, train_y, test_x, test_y, estimate, knn_strategy, distance, &mut error_logger, &mut individual_error_logger, delta, qstop, absolute, scale, ); let res = PyDict::new(py); res.set_item("min-estimate", min_error)?; res.set_item("last-estimate", last_error)?; res.set_item("random-guessing", random_guessing)?; res.set_item( "estimates", if let Some(Logger::LogVec(v)) = error_logger { v } else { vec![] }, )?; res.set_item( "min-individual-errors", if let Some(Logger::LogVec(v)) = individual_error_logger { v } else { vec![] }, )?; Ok(res.into()) } Ok(()) }
use numpy::*; use pyo3::prelude::*; use pyo3::types::PyDict; use crate::estimates::*; use crate::fbleau_estimation::{run_fbleau, Logger}; use crate::Label; #[pymodule(fbleau)] fn pyfbleau(_py: Python, m: &PyModule) -> PyResult<()> { #[pyfn(m, "run_fbleau")] fn run_fbleau_py( py: Python, train_x: &PyArray2<f64>, train_y: &PyArray1<Label>, test_x: &PyArray2<f64>, test_y: &PyArray1<Label>, estimate: &str, knn_strategy: Option<&str>, distance: Option<String>, log_errors: bool, log_individual_errors: bool, delta: Option<f64>, qstop: Option<usize>, absolute: bool, scale: bool, ) -> PyResult<PyObject> { let train_x = unsafe { train_x.as_array().to_owned() }; let train_y = unsafe { train_y.as_array().to_owned() }; let test_x = unsafe { test_x.as_array().to_owned() }; let test_y = unsafe { test_y.as_array().to_owned() }; let estimate = match estimate { "nn" => Estimate::NN, "knn" => Estimate::KNN, "frequentist" => Estimate::Frequentist, "nn-bound" => Estimate::NNBound, _ => { unimplemented!() } }; let knn_strategy = if let Some(strategy) = knn_strategy { match strategy { "ln" => Some(KNNStrategy::Ln), "log10" => Some(KNNStrategy::Log10), _ => { unimplemented!() } } } else { None }; let mut error_logger = if log_errors { Some(Logger::LogVec(vec![])) } else { None }; let mut individual_error_logger = if log_individual_errors { Some(Logger::LogVec(vec![])) } else { None }; let (min_error, last_error, random_guessing) = run_fbleau( train_x, train_y, test_x, test_y, estimate, knn_strategy, distance, &mut error_logger, &mut individual_error_logger, delta, qstop, absolute, scale, ); let res = PyDict::new(py); res.set_item("min-estimate", min_error)?; res.set_item("last-estimate", last_error)?; res.set_item("random-guessing", random_guessing)?; res.set_item( "
estimates", if let Some(Logger::LogVec(v)) = error_logger { v } else { vec![] }, )?; res.set_item( "min-individual-errors", if let Some(Logger::LogVec(v)) = individual_error_logger { v } else { vec![] }, )?; Ok(res.into()) } Ok(()) }
function_block-function_prefixed
[ { "content": "pub fn knn_strategy(strategy: KNNStrategy) -> Box<dyn Fn(usize) -> usize> {\n\n match strategy {\n\n KNNStrategy::NN => Box::new(move |_| 1),\n\n KNNStrategy::FixedK(k) => Box::new(move |_| k),\n\n KNNStrategy::Ln => Box::new(move |n| {\n\n next_odd(if n != 0 {\n...
Rust
src/emit/flatbin.rs
kubasz/rvasm
f28c2e857bd41c5f511bd5494ae89d8425882535
use crate::arch; use crate::parser::Node; use smallvec::SmallVec; use std::collections::HashMap; #[derive(Clone, Debug)] pub enum EmitError { UnexpectedNodeType(String), InvalidInstruction(String), InvalidArgumentCount(String), InvalidArgumentType(String, usize), InvalidEncoding(String), DuplicateLabel(String), DuplicateConstant(String), } pub fn emit_flat_binary(spec: &arch::RiscVSpec, ast: &Node) -> Result<Vec<u8>, EmitError> { let mut state = BinaryEmitState { out_buf: Vec::new(), out_pos: 0, deferred: Vec::new(), label_set: HashMap::new(), local_label_set: HashMap::new(), const_set: HashMap::new(), }; emit_binary_recurse(spec, &mut state, ast).map(move |_| state.out_buf) } #[derive(Debug)] struct BinaryEmitState { out_buf: Vec<u8>, out_pos: usize, deferred: Vec<(usize, Node)>, label_set: HashMap<String, u64>, local_label_set: HashMap<String, u64>, const_set: HashMap<String, u64>, } impl BinaryEmitState { fn accomodate_bytes(&mut self, byte_count: usize) -> &mut [u8] { let start_pos = self.out_pos; let end_pos = start_pos + byte_count; if self.out_buf.len() < end_pos { self.out_buf.resize(end_pos, 0); } self.out_pos = end_pos; &mut self.out_buf[start_pos..end_pos] } fn find_const(&self, key: &str, spec: &arch::RiscVSpec) -> Option<u64> { self.label_set .get(key) .or_else(|| self.local_label_set.get(key)) .or_else(|| self.const_set.get(key)) .copied() .or_else(|| spec.get_const(key)) } } fn emit_deferred(spec: &arch::RiscVSpec, state: &mut BinaryEmitState) -> Result<(), EmitError> { let mut to_remove = Vec::new(); let mut to_emit = Vec::new(); for (i, (pos, insn)) in state.deferred.iter().enumerate() { let pc = *pos as u64; let simp = insn.emitter_simplify(&|cname| state.find_const(cname, spec), pc); if !simp.1 { continue; } to_emit.push((*pos, simp.0)); to_remove.push(i); } for i in to_remove.iter().rev() { state.deferred.swap_remove(*i); } for (pos, insn) in to_emit.into_iter() { let saved_pos = state.out_pos; state.out_pos = pos; emit_binary_recurse(&spec, state, &insn)?; state.out_pos = saved_pos; } Ok(()) } fn emit_binary_recurse( spec: &arch::RiscVSpec, state: &mut BinaryEmitState, node: &Node, ) -> Result<(), EmitError> { use Node::*; let ialign_bytes = (spec.get_const("IALIGN").unwrap_or(32) as usize + 7) / 8; let max_ilen_bytes = (spec.get_const("ILEN").unwrap_or(32) as usize + 7) / 8; match node { Root(nodes) => { for node in nodes.iter() { emit_binary_recurse(spec, state, node)?; } emit_deferred(spec, state)?; if let Some(defnode) = state.deferred.first() { return Err(EmitError::UnexpectedNodeType(format!("{:?}", defnode))); } Ok(()) } Label(lname) => { if lname.starts_with('.') { if state .local_label_set .insert(lname.to_owned(), state.out_pos as u64) .is_some() { return Err(EmitError::DuplicateLabel(lname.to_owned())); } } else { emit_deferred(spec, state)?; state.local_label_set.clear(); if state .label_set .insert(lname.to_owned(), state.out_pos as u64) .is_some() { return Err(EmitError::DuplicateLabel(lname.to_owned())); } } Ok(()) } Instruction(iname, args) => { match iname.as_ref() { ".org" | ".ORG" => { if args.len() != 1 { return Err(EmitError::InvalidArgumentCount(iname.clone())); } if let (Node::Argument(box Node::Integer(adr)), _) = args[0].emitter_simplify( &|cname| state.find_const(cname, spec), state.out_pos as u64, ) { let new_out_pos = adr as usize; if new_out_pos > state.out_buf.len() { state .out_buf .reserve(new_out_pos - state.out_buf.len() + 32 * 32); state.out_buf.resize(new_out_pos, 0); } state.out_pos = new_out_pos; Ok(()) } else { Err(EmitError::InvalidArgumentType(iname.clone(), 0)) } } ".equ" | ".EQU" | ".define" | ".DEFINE" => { if args.len() != 2 { return Err(EmitError::InvalidArgumentCount(iname.clone())); } if let Node::Argument(box Node::Identifier(defname)) = &args[0] { if let (Node::Argument(box Node::Integer(val)), _) = args[1] .emitter_simplify( &|cname| state.find_const(cname, spec), state.out_pos as u64, ) { if state.const_set.insert(defname.to_owned(), val).is_none() { Ok(()) } else { Err(EmitError::DuplicateConstant(defname.to_owned())) } } else { Err(EmitError::InvalidArgumentType(iname.clone(), 1)) } } else { Err(EmitError::InvalidArgumentType(iname.clone(), 0)) } } _ => { let specinsn = spec .get_instruction_by_name(iname) .ok_or_else(|| EmitError::InvalidInstruction(iname.clone()))?; let fmt = specinsn.get_format(&spec); if args.len() != specinsn.args.len() { return Err(EmitError::InvalidArgumentCount(iname.clone())); } let ilen_bytes = (fmt.ilen + 7) / 8; if ilen_bytes > max_ilen_bytes { return Err(EmitError::InvalidEncoding(iname.clone())); } let aligned_pos = (state.out_pos + ialign_bytes - 1) / ialign_bytes * ialign_bytes; if state.out_pos != aligned_pos { state.accomodate_bytes(aligned_pos - state.out_pos); } let simpinsn = node.emitter_simplify( &|cname| state.find_const(cname, spec), state.out_pos as u64, ); if !simpinsn.1 { state.deferred.push((state.out_pos, simpinsn.0)); state.accomodate_bytes(ilen_bytes); return Ok(()); } let args; if let Node::Instruction(_, sargs) = simpinsn.0 { args = sargs; } else { panic!("Simplified instruction is now a {:?}", simpinsn.0); } let mut argv: SmallVec<[u64; 4]> = SmallVec::new(); for (i, arg) in args.iter().enumerate() { match fmt.fields[specinsn.args[i]].vtype { arch::FieldType::Value => { if let Node::Argument(box Node::Integer(val)) = arg { argv.push(*val); } else { return Err(EmitError::InvalidArgumentType(iname.clone(), i)); } } arch::FieldType::Register => { if let Node::Argument(box Node::Register(rid)) = arg { argv.push(*rid as u64); } else { return Err(EmitError::InvalidArgumentType(iname.clone(), i)); } } } } assert_eq!(argv.len(), specinsn.args.len()); let bytes = state.accomodate_bytes(ilen_bytes); specinsn .encode_into(bytes, spec, argv.as_slice()) .map_err(|_| EmitError::InvalidEncoding(iname.clone())) } } } _ => Err(EmitError::UnexpectedNodeType(format!("{:?}", node))), } }
use crate::arch; use crate::parser::Node; use smallvec::SmallVec; use std::collections::HashMap; #[derive(Clone, Debug)] pub enum EmitError { UnexpectedNodeType(String), InvalidInstruction(String), InvalidArgumentCount(String), InvalidArgumentType(String, usize), InvalidEncoding(String), DuplicateLabel(String), DuplicateConstant(String), } pub fn emit_flat_binary(spec: &arch::RiscVSpec, ast: &Node) -> Result<Vec<u8>, EmitError> { let mut state = BinaryEmitState { out_buf: Vec::new(), out_pos: 0, deferred: Vec::new(), label_set: HashMap::new(), local_label_set: HashMap::new(), const_set: HashMap::new(), }; emit_binary_recurse(spec, &mut state, ast).map(move |_| state.out_buf) } #[derive(Debug)] struct BinaryEmitState { out_buf: Vec<u8>, out_pos: usize, deferred: Vec<(usize, Node)>, label_set: HashMap<String, u64>, local_label_set: HashMap<String, u64>, const_set: HashMap<String, u64>, } impl BinaryEmitState { fn accomodate_bytes(&mut self, byte_count: usize) -> &mut [u8] { let start_pos = self.out_pos; let end_pos = start_pos + byte_count; if self.out_buf.len() < end_pos { self.out_buf.resize(end_pos, 0); } self.out_pos = end_pos; &mut self.out_buf[start_pos..end_pos] } fn find_const(&self, key: &str, spec: &arch::RiscVSpec) -> Option<u64> { self.label_set .get(key) .or_else(|| self.local_label_set.get(key)) .or_else(|| self.const_set.get(key)) .copied() .or_else(|| spec.get_const(key)) } } fn emit_deferred(spec: &arch::RiscVSpec, state: &mut BinaryEmitState) -> Result<(), EmitError> { let mut to_remove = Vec::new(); let mut to_emit = Vec::new(); for (i, (pos, insn)) in state.deferred.iter().enumerate() { let pc = *pos as u64; let simp = insn.emitter_simplify(&|cname| state.find_const(cname, spec), pc); if !simp.1 { continue; } to_emit.push((*pos, simp.0)); to_remove.push(i); } for i in to_remove.iter().rev() { state.deferred.swap_remove(*i); } for (pos, insn) in to_emit.into_iter() { let saved_pos = state.out_pos; state.out_pos = pos; emit_binary_recurse(&spec, state, &insn)?; state.out_pos = saved_pos; } Ok(()) } fn emit_binary_recurse( spec: &arch::RiscVSpec, state: &mut BinaryEmitState, node: &Node, ) -> Result<(), EmitError> { use Node::*; let ialign_bytes = (spec.get_const("IALIGN").unwrap_or(32) as usize + 7) / 8; let max_ilen_bytes = (spec.get_const("ILEN").unwrap_or(32) as usize + 7) / 8; match node { Root(nodes) => { for node in nodes.iter() { emit_binary_recurse(spec, state, node)?; } emit_deferred(spec, state)?; if let Some(defnode) = state.deferred.first() { return Err(EmitError::UnexpectedNodeType(format!("{:?}", defnode))); } Ok(()) } Label(lname) => { if lname.starts_with('.') { if state .local_label_set .insert(lname.to_owned(), state.out_pos as u64) .is_some() { return Err(EmitError::DuplicateLabel(lname.to_owned())); } } else { emit_deferred(spec, state)?; state.local_label_set.clear(); if state .label_set .insert(lname.to_owned(), state.out_pos as u64) .is_some() { return Err(EmitError::DuplicateLabel(lname.to_owned())); } } Ok(()) } Instruction(iname, args) => { match iname.as_ref() { ".org" | ".ORG" => { if args.len() != 1 { return Err(EmitError::InvalidArgumentCount(iname.clone())); } if let (Node::Argument(box Node::Integer(adr)), _) = args[0].emitter_simplify( &|cname| state.find_const(cname, spec), state.out_pos as u64, ) { let new_out_pos = adr as usize; if new_out_pos > state.out_buf.len() { state .out_buf .reserve(new_out_pos - state.out_buf.len() + 32 * 32); state.out_buf.resize(new_out_pos, 0); } state.out_pos = new_out_pos; Ok(()) } else { Err(EmitError::InvalidArgumentType(iname.clone(), 0)) } } ".equ" | ".EQU" | ".define" | ".DEFINE" => { if args.len() != 2 { return Err(EmitError::InvalidArgumentCount(iname.clone())); } if let Node::Argument(box Node::Identifier(defname)) = &args[0] { if let (Node::Argument(box Node::Integer(val)), _) = args[1] .emitter_simplify( &|cname| state.find_const(cname, spec), state.out_pos as u64, ) { if state.const_set.insert(defname.to_owned(), val).is_none() { Ok(()) } else { Err(EmitError::DuplicateConstant(defname.to_owned())) } } else { Err(EmitError::InvalidArgumentType(iname.clone(), 1)) } } else { Err(EmitError::InvalidArgumentType(iname.clone(), 0)) } } _ => { let specinsn = spec .get_instruction_by_name(iname) .ok_or_else(|| EmitError::InvalidInstruction(iname.clone()))?; let fmt = specinsn.get_format(&spec); if args.len() != specinsn.args.len() { return Err(EmitError::InvalidArgumentCount(iname.clone())); } let ilen_bytes = (fmt.ilen + 7) / 8; if ilen_bytes > max_ilen_bytes { return Err(EmitError::InvalidEncoding(iname.clone())); } let aligned_pos = (state.out_pos + ialign_bytes - 1) / ialign_bytes * ialign_bytes; if state.out_pos != aligned_pos { state.accomodate_bytes(aligned_pos - state.out_pos); } let simpinsn = node.emitter_simplify( &|cname| state.find_const(cname, spec), state.out_pos as u64, ); if !simpinsn.1 { state.deferred.push((state.out_pos, simpinsn.0)); state.accomodate_bytes(ilen_bytes); return Ok(()); } let args;
let mut argv: SmallVec<[u64; 4]> = SmallVec::new(); for (i, arg) in args.iter().enumerate() { match fmt.fields[specinsn.args[i]].vtype { arch::FieldType::Value => { if let Node::Argument(box Node::Integer(val)) = arg { argv.push(*val); } else { return Err(EmitError::InvalidArgumentType(iname.clone(), i)); } } arch::FieldType::Register => { if let Node::Argument(box Node::Register(rid)) = arg { argv.push(*rid as u64); } else { return Err(EmitError::InvalidArgumentType(iname.clone(), i)); } } } } assert_eq!(argv.len(), specinsn.args.len()); let bytes = state.accomodate_bytes(ilen_bytes); specinsn .encode_into(bytes, spec, argv.as_slice()) .map_err(|_| EmitError::InvalidEncoding(iname.clone())) } } } _ => Err(EmitError::UnexpectedNodeType(format!("{:?}", node))), } }
if let Node::Instruction(_, sargs) = simpinsn.0 { args = sargs; } else { panic!("Simplified instruction is now a {:?}", simpinsn.0); }
if_condition
[ { "content": "pub fn ast_from_str(s: &str, spec: &arch::RiscVSpec) -> Result<Node, ParseError> {\n\n grammar::top_level(s, spec)\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 0, "score": 137818.47554548547 }, { "content": "pub fn ast_from_file(path: &str, spec: &arch::RiscVSpec) -> R...
Rust
anvil/src/eth/backend/executor.rs
Rjected/dapptools-rs
b11b776934cce2a0e70ce4879e7a05c9a34ac008
use crate::eth::{ backend::{db::Db, validate::TransactionValidator}, error::InvalidTransactionError, pool::transactions::PoolTransaction, }; use anvil_core::eth::{ block::{Block, BlockInfo, Header, PartialHeader}, receipt::{EIP1559Receipt, EIP2930Receipt, EIP658Receipt, Log, TypedReceipt}, transaction::{PendingTransaction, TransactionInfo, TypedTransaction}, trie, }; use ethers::{ abi::ethereum_types::BloomInput, types::{Bloom, H256, U256}, utils::rlp, }; use foundry_evm::{ executor::inspector::Tracer, revm, revm::{BlockEnv, CfgEnv, Env, Return, TransactOut}, trace::node::CallTraceNode, }; use std::sync::Arc; use tracing::{trace, warn}; pub struct ExecutedTransaction { transaction: Arc<PoolTransaction>, exit: Return, out: TransactOut, gas: u64, logs: Vec<Log>, traces: Vec<CallTraceNode>, } impl ExecutedTransaction { fn create_receipt(&self) -> TypedReceipt { let used_gas: U256 = self.gas.into(); let mut bloom = Bloom::default(); logs_bloom(self.logs.clone(), &mut bloom); let logs = self.logs.clone(); let status_code: u8 = if self.exit as u8 <= Return::SelfDestruct as u8 { 1 } else { 0 }; match &self.transaction.pending_transaction.transaction { TypedTransaction::Legacy(_) => TypedReceipt::Legacy(EIP658Receipt { status_code, gas_used: used_gas, logs_bloom: bloom, logs, }), TypedTransaction::EIP2930(_) => TypedReceipt::EIP2930(EIP2930Receipt { status_code, gas_used: used_gas, logs_bloom: bloom, logs, }), TypedTransaction::EIP1559(_) => TypedReceipt::EIP1559(EIP1559Receipt { status_code, gas_used: used_gas, logs_bloom: bloom, logs, }), } } } #[derive(Debug, Clone)] pub struct ExecutedTransactions { pub block: BlockInfo, pub included: Vec<Arc<PoolTransaction>>, pub invalid: Vec<Arc<PoolTransaction>>, } pub struct TransactionExecutor<'a, Db: ?Sized, Validator: TransactionValidator> { pub db: &'a mut Db, pub validator: Validator, pub pending: std::vec::IntoIter<Arc<PoolTransaction>>, pub block_env: BlockEnv, pub cfg_env: CfgEnv, pub parent_hash: H256, pub gas_used: U256, } impl<'a, DB: Db + ?Sized, Validator: TransactionValidator> TransactionExecutor<'a, DB, Validator> { pub fn execute(mut self, timestamp: u64) -> ExecutedTransactions { let mut transactions = Vec::new(); let mut transaction_infos = Vec::new(); let mut receipts = Vec::new(); let mut bloom = Bloom::default(); let mut cumulative_gas_used = U256::zero(); let mut invalid = Vec::new(); let mut included = Vec::new(); let gas_limit = self.block_env.gas_limit; let parent_hash = self.parent_hash; let block_number = self.block_env.number; let difficulty = self.block_env.difficulty; let beneficiary = self.block_env.coinbase; for (idx, tx) in self.enumerate() { let tx = match tx { TransactionExecutionOutcome::Executed(tx) => { included.push(tx.transaction.clone()); tx } TransactionExecutionOutcome::Exhausted(_) => continue, TransactionExecutionOutcome::Invalid(tx, _) => { invalid.push(tx); continue } }; let receipt = tx.create_receipt(); cumulative_gas_used = cumulative_gas_used.saturating_add(receipt.gas_used()); let ExecutedTransaction { transaction, logs, out, traces, .. } = tx; logs_bloom(logs.clone(), &mut bloom); let contract_address = if let TransactOut::Create(_, contract_address) = out { trace!(target: "backend", "New contract deployed: at {:?}", contract_address); contract_address } else { None }; let info = TransactionInfo { transaction_hash: *transaction.hash(), transaction_index: idx as u32, from: *transaction.pending_transaction.sender(), to: transaction.pending_transaction.transaction.to().copied(), contract_address, logs, logs_bloom: *receipt.logs_bloom(), traces, }; transaction_infos.push(info); receipts.push(receipt); transactions.push(transaction.pending_transaction.transaction.clone()); } let ommers: Vec<Header> = Vec::new(); let receipts_root = trie::ordered_trie_root(receipts.iter().map(rlp::encode)); let partial_header = PartialHeader { parent_hash, beneficiary, state_root: self.db.maybe_state_root().unwrap_or_default(), receipts_root, logs_bloom: bloom, difficulty, number: block_number, gas_limit, gas_used: cumulative_gas_used, timestamp, extra_data: Default::default(), mix_hash: Default::default(), nonce: Default::default(), }; let block = Block::new(partial_header, transactions.clone(), ommers); let block = BlockInfo { block, transactions: transaction_infos, receipts }; ExecutedTransactions { block, included, invalid } } fn env_for(&self, tx: &PendingTransaction) -> Env { Env { cfg: self.cfg_env.clone(), block: self.block_env.clone(), tx: tx.to_revm_tx_env() } } } pub enum TransactionExecutionOutcome { Executed(ExecutedTransaction), Invalid(Arc<PoolTransaction>, InvalidTransactionError), Exhausted(Arc<PoolTransaction>), } impl<'a, 'b, DB: Db + ?Sized, Validator: TransactionValidator> Iterator for &'b mut TransactionExecutor<'a, DB, Validator> { type Item = TransactionExecutionOutcome; fn next(&mut self) -> Option<Self::Item> { let transaction = self.pending.next()?; let account = self.db.basic(*transaction.pending_transaction.sender()); let env = self.env_for(&transaction.pending_transaction); let max_gas = self.gas_used.saturating_add(U256::from(env.tx.gas_limit)); if max_gas > env.block.gas_limit { return Some(TransactionExecutionOutcome::Exhausted(transaction)) } if let Err(err) = self.validator.validate_pool_transaction_for( &transaction.pending_transaction, &account, &env, ) { warn!(target: "backend", "Skipping invalid tx execution [{:?}] {}", transaction.hash(), err); return Some(TransactionExecutionOutcome::Invalid(transaction, err)) } let mut evm = revm::EVM::new(); evm.env = env; evm.database(&mut self.db); let mut tracer = Tracer::default(); trace!(target: "backend", "[{:?}] executing", transaction.hash()); let (exit, out, gas, logs) = evm.inspect_commit(&mut tracer); if exit == Return::OutOfGas { warn!(target: "backend", "[{:?}] executed with out of gas", transaction.hash()) } trace!(target: "backend", "[{:?}] executed with out={:?}, gas ={}", transaction.hash(), out, gas); self.gas_used.saturating_add(U256::from(gas)); trace!(target: "backend::executor", "transacted [{:?}], result: {:?} gas {}", transaction.hash(), exit, gas); let tx = ExecutedTransaction { transaction, exit, out, gas, logs: logs.into_iter().map(Into::into).collect(), traces: tracer.traces.arena, }; Some(TransactionExecutionOutcome::Executed(tx)) } } fn logs_bloom(logs: Vec<Log>, bloom: &mut Bloom) { for log in logs { bloom.accrue(BloomInput::Raw(&log.address[..])); for topic in log.topics { bloom.accrue(BloomInput::Raw(&topic[..])); } } }
use crate::eth::{ backend::{db::Db, validate::TransactionValidator}, error::InvalidTransactionError, pool::transactions::PoolTransaction, }; use anvil_core::eth::{ block::{Block, BlockInfo, Header, PartialHeader}, receipt::{EIP1559Receipt, EIP2930Receipt, EIP658Receipt, Log, TypedReceipt}, transaction::{PendingTransaction, TransactionInfo, TypedTransaction}, trie, }; use ethers::{ abi::ethereum_types::BloomInput, types::{Bloom, H256, U256}, utils::rlp, }; use foundry_evm::{ executor::inspector::Tracer, revm, revm::{BlockEnv, CfgEnv, Env, Return, TransactOut}, trace::node::CallTraceNode, }; use std::sync::Arc; use tracing::{trace, warn}; pub struct ExecutedTransaction { transaction: Arc<PoolTransaction>, exit: Return, out: TransactOut, gas: u64, logs: Vec<Log>, traces: Vec<CallTraceNode>, } impl ExecutedTransaction { fn create_receipt(&self) -> TypedReceipt { let used_gas: U256 = self.gas.into(); let mut bloom = Bloom::default(); logs_bloom(self.logs.clone(), &mut bloom); let logs = self.logs.clone(); let status_code: u8 = if self.exit as u8 <= Return::SelfDestruct as u8 { 1 } else { 0 }; match &self.transaction.pending_transaction.transaction { TypedTransaction::Legacy(_) => TypedReceipt::Legacy(EIP658Receipt { status_code, gas_used: used_gas, logs_bloom: bloom, logs, }), TypedTransaction::EIP2930(_) => TypedReceipt::EIP2930(EIP2930Receipt { status_code, gas_used: used_gas, logs_bloom: bloom, logs, }), TypedTransaction::EIP1559(_) => TypedReceipt::EIP1559(EIP1559Receipt { status_code, gas_used: used_gas, logs_bloom: bloom, logs, }), } } } #[derive(Debug, Clone)] pub struct ExecutedTransactions { pub block: BlockInfo, pub included: Vec<Arc<PoolTransaction>>, pub invalid: Vec<Arc<PoolTransaction>>, } pub struct TransactionExecutor<'a, Db: ?Sized, Validator: TransactionValidator> { pub db: &'a mut Db, pub validator: Validator, pub pending: std::vec::IntoIter<Arc<PoolTransaction>>, pub block_env: BlockEnv, pub cfg_env: CfgEnv, pub parent_hash: H256, pub gas_used: U256, } impl<'a, DB: Db + ?Sized, Validator: TransactionValidator> TransactionExecutor<'a, DB, Validator> { pub fn execute(mut self, timestamp: u64) -> ExecutedTransactions { let mut transactions = Vec::new(); let mut transaction_infos = Vec::new(); let mut receipts = Vec::new(); let mut bloom = Bloom::default(); let mut cumulative_gas_used = U256::zero(); let mut invalid = Vec::new(); let mut included = Vec::new(); let gas_limit = self.block_env.gas_limit; let parent_hash = self.parent_hash; let block_number = self.block_env.number; let difficulty = self.block_env.difficulty; let beneficiary = self.block_env.coinbase; for (idx, tx) in self.enumerate() { let tx = match tx { TransactionExecutionOutcome::Executed(tx) => { included.push(tx.transaction.clone()); tx } TransactionExecutionOutcome::Exhausted(_) => continue, TransactionExecutionOutcome::Invalid(tx, _) => { invalid.push(tx); continue } }; let receipt = tx.create_receipt(); cumulative_gas_used = cumulative_gas_used.saturating_add(receipt.gas_used()); let ExecutedTransaction { transaction, logs, out, traces, .. } = tx; logs_bloom(logs.clone(), &mut bloom); let contract_address = if let TransactOut::Create(_, contract_address) = out { trace!(target: "backend", "New contract deployed: at {:?}", contract_address); contract_address } else { None }; let info = TransactionInfo { transaction_hash: *transaction.hash(), transaction_index: idx as u32, from: *transaction.pending_transaction.sender(), to: transaction.pending_transaction.transaction.to().copied(), contract_address, logs, logs_bloom: *receipt.logs_bloom(), traces, }; transaction_infos.push(info); receipts.push(receipt); transactions.push(transaction.pending_transaction.transaction.clone()); } let ommers: Vec<Header> = Vec::new(); let receipts_root = trie::ordered_trie_root(receipts.iter().map(rlp::encode)); let partial_header = PartialHeader { parent_hash, beneficiary, state_root: self.db.maybe_state_root().unwrap_or_default(), receipts_root, logs_bloom: bloom, difficulty, number: block_number, gas_limit, gas_used: cumulative_gas_used, timestamp, extra_data: Default::default(), mix_hash: Default::default(), nonce: Default::default(), }; let block = Block::new(partial_header, transactions.clone(), ommers); let block = BlockInfo { block, transactions: transaction_infos, receipts }; ExecutedTransactions { block, included, invalid } } fn env_for(&self, tx: &PendingTransaction) -> Env { Env { cfg: self.cfg_env.clone(), block: self.block_env.clone(), tx: tx.to_revm_tx_env() } } } pub enum TransactionExecutionOutcome { Executed(ExecutedTransaction), Invalid(Arc<PoolTransaction>, InvalidTransactionError), Exhausted(Arc<PoolTransaction>), } impl<'a, 'b, DB: Db + ?Sized, Validator: TransactionValidator> Iterator for &'b mut TransactionExecutor<'a, DB, Validator> { type Item = TransactionExecutionOutcome; fn next(&mut self) -> Option<Self::Item> { let transaction = self.pending.next()?; let account = self.db.basic(*transaction.pending_transaction.sender()); let env = self.env_for(&transaction.pending_transaction); let max_gas = self.gas_used.saturating_add(U256::from(env.tx.gas_limit)); if max_gas > env.block.gas_limit { return Some(TransactionExecutionOutcome::Exhausted(transaction)) } if let Err(err) = self.validator.validate_pool_transaction_for( &transaction.pending_transaction, &account, &env, ) { warn!(target: "backend", "Skipping invalid tx execution [{:?}] {}", transaction.hash(), err); return Some(TransactionExecutionOutcome::Invalid(transaction, err)) } let mut evm = revm::EVM::new(); evm.env = env; evm.database(&mut self.d
on { transaction, exit, out, gas, logs: logs.into_iter().map(Into::into).collect(), traces: tracer.traces.arena, }; Some(TransactionExecutionOutcome::Executed(tx)) } } fn logs_bloom(logs: Vec<Log>, bloom: &mut Bloom) { for log in logs { bloom.accrue(BloomInput::Raw(&log.address[..])); for topic in log.topics { bloom.accrue(BloomInput::Raw(&topic[..])); } } }
b); let mut tracer = Tracer::default(); trace!(target: "backend", "[{:?}] executing", transaction.hash()); let (exit, out, gas, logs) = evm.inspect_commit(&mut tracer); if exit == Return::OutOfGas { warn!(target: "backend", "[{:?}] executed with out of gas", transaction.hash()) } trace!(target: "backend", "[{:?}] executed with out={:?}, gas ={}", transaction.hash(), out, gas); self.gas_used.saturating_add(U256::from(gas)); trace!(target: "backend::executor", "transacted [{:?}], result: {:?} gas {}", transaction.hash(), exit, gas); let tx = ExecutedTransacti
function_block-random_span
[ { "content": "/// Returns the RLP for this account.\n\npub fn trie_account_rlp(info: &AccountInfo, storage: Map<U256, U256>) -> Bytes {\n\n let mut stream = RlpStream::new_list(4);\n\n stream.append(&info.nonce);\n\n stream.append(&info.balance);\n\n stream.append(&{\n\n sec_trie_root(storage...
Rust
menoh/src/model/mod.rs
Hakuyume/menoh-rs
2d463e94c0159a56821ec6766cba681cdc6a5edd
use menoh_sys; use std::ffi; use std::mem; use std::ptr; use std::slice; use Dtype; use handler::Handler; use Error; use error::check; pub struct Model { handle: menoh_sys::menoh_model_handle, } impl Model { pub fn get_variable_dims(&self, name: &str) -> Result<Vec<usize>, Error> { let name = ffi::CString::new(name)?; unsafe { let mut size = 0; check(menoh_sys::menoh_model_get_variable_dims_size(self.handle, name.as_ptr(), &mut size))?; let mut dims = Vec::with_capacity(size as _); for index in 0..size { let mut dim = 0; check(menoh_sys::menoh_model_get_variable_dims_at(self.handle, name.as_ptr(), index, &mut dim))?; dims.push(dim as _); } Ok(dims) } } fn get_variable_dtype(&self, name: &str) -> Result<menoh_sys::menoh_dtype, Error> { let name = ffi::CString::new(name)?; unsafe { let mut dtype = mem::uninitialized(); check(menoh_sys::menoh_model_get_variable_dtype(self.handle, name.as_ptr(), &mut dtype))?; Ok(dtype) } } pub fn get_variable<T>(&self, name: &str) -> Result<(Vec<usize>, &[T]), Error> where T: Dtype { T::check(self.get_variable_dtype(name)?)?; let dims = self.get_variable_dims(name)?; let name = ffi::CString::new(name)?; let mut buffer = ptr::null_mut(); unsafe { check(menoh_sys::menoh_model_get_variable_buffer_handle(self.handle, name.as_ptr(), &mut buffer))?; let buffer = slice::from_raw_parts(buffer as _, dims.iter().product()); Ok((dims, buffer)) } } pub fn get_variable_mut<T>(&mut self, name: &str) -> Result<(Vec<usize>, &mut [T]), Error> where T: Dtype { T::check(self.get_variable_dtype(name)?)?; let dims = self.get_variable_dims(name)?; let name = ffi::CString::new(name)?; let mut buffer = ptr::null_mut(); unsafe { check(menoh_sys::menoh_model_get_variable_buffer_handle(self.handle, name.as_ptr(), &mut buffer))?; let buffer = slice::from_raw_parts_mut(buffer as _, dims.iter().product()); Ok((dims, buffer)) } } pub fn run(&mut self) -> Result<(), Error> { unsafe { check(menoh_sys::menoh_model_run(self.handle)) } } } impl Handler for Model { type Handle = menoh_sys::menoh_model_handle; unsafe fn from_handle(handle: Self::Handle) -> Self { Self { handle } } unsafe fn handle(&self) -> Self::Handle { self.handle } } impl Drop for Model { fn drop(&mut self) { unsafe { menoh_sys::menoh_delete_model(self.handle) } } }
use menoh_sys; use std::ffi; use std::mem; use std::ptr; use std::slice; use Dtype; use handler::Handler; use Error; use error::check; pub struct Model { handle: menoh_sys::menoh_model_handle, } impl Model { pub fn get_variable_dims(&self, name: &str) -> Result<Vec<usize>, Error> { let name = ffi::CString::new(name)?; unsafe { let mut size = 0; check(menoh_sys::menoh_model_get_variable_dims_size(self.handle, name.as_ptr(), &mut size))?; let mut dims = Vec::with_capacity(size as _); for index in 0..size { let mut dim = 0; check(menoh_sys::menoh_model_get_variable_dims_at(self.handle, name.as_ptr(), index, &mut dim))?; dims.push(dim as _); } Ok(dims) } } fn get_variable_dtype(&self, name: &str) -> Result<menoh_sys::menoh_dtype, Error> { let name = ffi::CString::new(name)?; unsafe { let mut dtype = mem::uninitialized(); check(menoh_sys::menoh_model_get_variable_dtype(self.handle, name.as_ptr(), &mut dtype))?; Ok(dtype) } } pub fn get_variable<T>(&self, name: &str) -> Result<(Vec<usize>, &[T]), Error> where T: Dtype { T::check(self.get_variable_dtype(name)?)?; let dims = self.get_variable_dims(name)?; let name = ffi::CString::new(name)?; let mut buffer = ptr::null_mut(); unsaf
pub fn get_variable_mut<T>(&mut self, name: &str) -> Result<(Vec<usize>, &mut [T]), Error> where T: Dtype { T::check(self.get_variable_dtype(name)?)?; let dims = self.get_variable_dims(name)?; let name = ffi::CString::new(name)?; let mut buffer = ptr::null_mut(); unsafe { check(menoh_sys::menoh_model_get_variable_buffer_handle(self.handle, name.as_ptr(), &mut buffer))?; let buffer = slice::from_raw_parts_mut(buffer as _, dims.iter().product()); Ok((dims, buffer)) } } pub fn run(&mut self) -> Result<(), Error> { unsafe { check(menoh_sys::menoh_model_run(self.handle)) } } } impl Handler for Model { type Handle = menoh_sys::menoh_model_handle; unsafe fn from_handle(handle: Self::Handle) -> Self { Self { handle } } unsafe fn handle(&self) -> Self::Handle { self.handle } } impl Drop for Model { fn drop(&mut self) { unsafe { menoh_sys::menoh_delete_model(self.handle) } } }
e { check(menoh_sys::menoh_model_get_variable_buffer_handle(self.handle, name.as_ptr(), &mut buffer))?; let buffer = slice::from_raw_parts(buffer as _, dims.iter().product()); Ok((dims, buffer)) } }
function_block-function_prefixed
[ { "content": "pub fn check(code: menoh_sys::menoh_error_code) -> Result<(), Error> {\n\n let code = code as menoh_sys::menoh_error_code_constant;\n\n\n\n if code == menoh_sys::menoh_error_code_success {\n\n Ok(())\n\n } else {\n\n let message = unsafe {\n\n ffi::CStr::from_ptr(...
Rust
src/component/transformed.rs
DaseinPhaos/arendur
5c3b6c4dffd969131ebf37a3f8eb9b6a3cf7f5c6
use geometry::prelude::*; use super::*; use std::sync::Arc; use spectrum::*; use renderer::scene::Scene; use lighting::{LightFlag, LightSample, SampleInfo, PathInfo}; #[derive(Clone, Debug)] pub struct TransformedComposable<T> { inner: T, local_parent: Arc<Matrix4f>, parent_local: Arc<Matrix4f>, } impl<T> TransformedComposable<T> { pub fn new(inner: T, local_parent: Arc<Matrix4f>, parent_local: Arc<Matrix4f>) -> Self { #[cfg(debug)] { assert_relative_eq(*local_parent *(*parent_local), Matrix4f::identity()); } TransformedComposable{ inner: inner, local_parent: local_parent, parent_local: parent_local, } } } impl<T: Composable> Composable for TransformedComposable<T> { #[inline] fn bbox_parent(&self) -> BBox3f { self.inner.bbox_parent().apply_transform(&*self.local_parent) } #[inline] fn intersection_cost(&self) -> Float { 1.0 as Float + self.inner.intersection_cost() } #[inline] default fn intersect_ray(&self, ray: &mut RawRay) -> Option<SurfaceInteraction> { *ray = ray.apply_transform(&*self.parent_local); let mut ret = self.inner.intersect_ray(ray); if let Some(ret) = ret.as_mut() { *ret = ret.apply_transform(&*self.local_parent); } *ray = ray.apply_transform(&*self.local_parent); ret } #[inline] default fn as_light(&self) -> &Light { unimplemented!(); } } impl<T: Primitive> Composable for TransformedComposable<T> { #[inline] fn intersect_ray(&self, ray: &mut RawRay) -> Option<SurfaceInteraction> { *ray = ray.apply_transform(&*self.parent_local); let mut ret = self.inner.intersect_ray(ray); if let Some(ret) = ret.as_mut() { *ret = ret.apply_transform(&*self.local_parent); ret.primitive_hit = Some(self); } *ray = ray.apply_transform(&*self.local_parent); ret } #[inline] fn as_light(&self) -> &Light { self } } impl<T: Primitive> Primitive for TransformedComposable<T> { #[inline] fn is_emissive(&self) -> bool { self.inner.is_emissive() } #[inline] fn get_material(&self) -> &Material { self.inner.get_material() } } impl<T: Primitive> Light for TransformedComposable<T> { fn flags(&self) -> LightFlag { self.inner.flags() } #[inline] fn evaluate_ray(&self, rd: &RayDifferential) -> RGBSpectrumf { let rd = rd.apply_transform(&self.parent_local); self.inner.evaluate_ray(&rd) } #[inline] fn evaluate_path(&self, pos: Point3f, dir: Vector3f) -> RGBSpectrumf { let pos = self.parent_local.transform_point(pos); let dir = self.parent_local.transform_vector(dir); self.inner.evaluate_path(pos, dir) } #[inline] fn evaluate_sampled(&self, pos: Point3f, sample: Point2f) -> LightSample { let pos = self.parent_local.transform_point(pos); let ls = self.inner.evaluate_sampled(pos, sample); ls.apply_transform(&*self.local_parent) } #[inline] fn generate_path(&self, samples: SampleInfo) -> PathInfo { self.inner.generate_path(samples).apply_transform(&*self.local_parent) } #[inline] fn pdf_path(&self, pos: Point3f, dir: Vector3f, norm: Vector3f) -> (Float, Float) { let pos = self.parent_local.transform_point(pos); let dir = self.parent_local.transform_vector(dir); let norm = self.parent_local.transform_norm(norm); self.inner.pdf_path(pos, dir, norm) } #[inline] fn pdf(&self, pos: Point3f, wi: Vector3f) -> Float { let pos = self.parent_local.transform_point(pos); let wi = self.parent_local.transform_vector(wi); self.inner.pdf(pos, wi) } #[inline] fn power(&self) -> RGBSpectrumf { self.inner.power() } #[inline] fn preprocess(&mut self, s: &Scene) { self.inner.preprocess(s); } } impl<T: Composable> Composable for TransformedComposable<Arc<T>> { #[inline] fn bbox_parent(&self) -> BBox3f { self.inner.bbox_parent().apply_transform(&*self.local_parent) } #[inline] fn intersection_cost(&self) -> Float { 2.0 as Float + self.inner.intersection_cost() } #[inline] default fn intersect_ray(&self, ray: &mut RawRay) -> Option<SurfaceInteraction> { *ray = ray.apply_transform(&*self.parent_local); let mut ret = self.inner.intersect_ray(ray); if let Some(ret) = ret.as_mut() { *ret = ret.apply_transform(&*self.local_parent); } *ray = ray.apply_transform(&*self.local_parent); ret } #[inline] default fn as_light(&self) -> &Light { unimplemented!(); } } impl<T: Primitive> Composable for TransformedComposable<Arc<T>> { #[inline] fn intersect_ray(&self, ray: &mut RawRay) -> Option<SurfaceInteraction> { *ray = ray.apply_transform(&*self.parent_local); let mut ret = self.inner.intersect_ray(ray); if let Some(ret) = ret.as_mut() { *ret = ret.apply_transform(&*self.local_parent); ret.primitive_hit = Some(self); } *ray = ray.apply_transform(&*self.local_parent); ret } #[inline] fn as_light(&self) -> &Light { self } } impl<T: Primitive> Primitive for TransformedComposable<Arc<T>> { #[inline] fn is_emissive(&self) -> bool { self.inner.is_emissive() } #[inline] fn get_material(&self) -> &Material { self.inner.get_material() } } impl<T: Primitive> Light for TransformedComposable<Arc<T>> { fn flags(&self) -> LightFlag { self.inner.flags() } #[inline] fn evaluate_ray(&self, rd: &RayDifferential) -> RGBSpectrumf { let rd = rd.apply_transform(&self.parent_local); self.inner.evaluate_ray(&rd) } #[inline] fn evaluate_path(&self, pos: Point3f, dir: Vector3f) -> RGBSpectrumf { let pos = self.parent_local.transform_point(pos); let dir = self.parent_local.transform_vector(dir); self.inner.evaluate_path(pos, dir) } #[inline] fn evaluate_sampled(&self, pos: Point3f, sample: Point2f) -> LightSample { let pos = self.parent_local.transform_point(pos); let ls = self.inner.evaluate_sampled(pos, sample); ls.apply_transform(&*self.local_parent) } #[inline] fn generate_path(&self, samples: SampleInfo) -> PathInfo { self.inner.generate_path(samples).apply_transform(&*self.local_parent) } #[inline] fn pdf_path(&self, pos: Point3f, dir: Vector3f, norm: Vector3f) -> (Float, Float) { let pos = self.parent_local.transform_point(pos); let dir = self.parent_local.transform_vector(dir); let norm = self.parent_local.transform_norm(norm); self.inner.pdf_path(pos, dir, norm) } #[inline] fn pdf(&self, pos: Point3f, wi: Vector3f) -> Float { let pos = self.parent_local.transform_point(pos); let wi = self.parent_local.transform_vector(wi); self.inner.pdf(pos, wi) } #[inline] fn power(&self) -> RGBSpectrumf { self.inner.power() } } impl Composable for TransformedComposable<Arc<Composable>> { #[inline] fn bbox_parent(&self) -> BBox3f { self.inner.bbox_parent().apply_transform(&*self.local_parent) } #[inline] fn intersection_cost(&self) -> Float { 2.0 as Float + self.inner.intersection_cost() } #[inline] fn intersect_ray(&self, ray: &mut RawRay) -> Option<SurfaceInteraction> { *ray = ray.apply_transform(&*self.parent_local); let mut ret = self.inner.intersect_ray(ray); if let Some(ret) = ret.as_mut() { *ret = ret.apply_transform(&*self.local_parent); } *ray = ray.apply_transform(&*self.local_parent); ret } #[inline] fn as_light(&self) -> &Light { unimplemented!(); } } impl Composable for TransformedComposable<Arc<Primitive>> { #[inline] fn bbox_parent(&self) -> BBox3f { self.inner.bbox_parent().apply_transform(&*self.local_parent) } #[inline] fn intersection_cost(&self) -> Float { 2.0 as Float + self.inner.intersection_cost() } #[inline] fn intersect_ray(&self, ray: &mut RawRay) -> Option<SurfaceInteraction> { *ray = ray.apply_transform(&*self.parent_local); let mut ret = self.inner.intersect_ray(ray); if let Some(ret) = ret.as_mut() { *ret = ret.apply_transform(&*self.local_parent); ret.primitive_hit = Some(self); } *ray = ray.apply_transform(&*self.local_parent); ret } #[inline] fn as_light(&self) -> &Light { self } } impl Primitive for TransformedComposable<Arc<Primitive>> { #[inline] fn is_emissive(&self) -> bool { self.inner.is_emissive() } #[inline] fn get_material(&self) -> &Material { self.inner.get_material() } } impl Light for TransformedComposable<Arc<Primitive>> { fn flags(&self) -> LightFlag { self.inner.flags() } #[inline] fn evaluate_ray(&self, rd: &RayDifferential) -> RGBSpectrumf { let rd = rd.apply_transform(&self.parent_local); self.inner.evaluate_ray(&rd) } #[inline] fn evaluate_path(&self, pos: Point3f, dir: Vector3f) -> RGBSpectrumf { let pos = self.parent_local.transform_point(pos); let dir = self.parent_local.transform_vector(dir); self.inner.evaluate_path(pos, dir) } #[inline] fn evaluate_sampled(&self, pos: Point3f, sample: Point2f) -> LightSample { let pos = self.parent_local.transform_point(pos); let ls = self.inner.evaluate_sampled(pos, sample); ls.apply_transform(&*self.local_parent) } #[inline] fn generate_path(&self, samples: SampleInfo) -> PathInfo { self.inner.generate_path(samples).apply_transform(&*self.local_parent) } #[inline] fn pdf_path(&self, pos: Point3f, dir: Vector3f, norm: Vector3f) -> (Float, Float) { let pos = self.parent_local.transform_point(pos); let dir = self.parent_local.transform_vector(dir); let norm = self.parent_local.transform_norm(norm); self.inner.pdf_path(pos, dir, norm) } #[inline] fn pdf(&self, pos: Point3f, wi: Vector3f) -> Float { let pos = self.parent_local.transform_point(pos); let wi = self.parent_local.transform_vector(wi); self.inner.pdf(pos, wi) } #[inline] fn power(&self) -> RGBSpectrumf { self.inner.power() } }
use geometry::prelude::*; use super::*; use std::sync::Arc; use spectrum::*; use renderer::scene::Scene; use lighting::{LightFlag, LightSample, SampleInfo, PathInfo}; #[derive(Clone, Debug)] pub struct TransformedComposable<T> { inner: T, local_parent: Arc<Matrix4f>, parent_local: Arc<Matrix4f>, } impl<T> TransformedComposable<T> { pub fn new(inner: T, local_parent: Arc<Matrix4f>, parent_local: Arc<Matrix4f>) -> Self { #[cfg(debug)] { assert_relative_eq(*local_parent *(*parent_local), Matrix4f::identity()); } TransformedComposable{ inner: inner, local_parent: local_parent, parent_local: parent_local, } } } impl<T: Composable> Composable for TransformedComposable<T> { #[inline] fn bbox_parent(&self) -> BBox3f { self.inner.bbox_parent().apply_transform(&*self.local_parent) } #[inline] fn intersection_cost(&self) -> Float { 1.0 as Float + self.inner.intersection_cost() } #[inline] default fn intersect_ray(&self, ray: &mut RawRay) -> Option<SurfaceInteraction> { *ray = ray.apply_transform(&*self.parent_local); let mut ret = self.inner.intersect_ray(ray); if let Some(ret) = ret.as_mut() { *ret = ret.apply_transform(&*self.local_parent); } *ray = ray.apply_transform(&*self.local_parent); ret } #[inline] default fn as_light(&self) -> &Light { unimplemented!(); } } impl<T: Primitive> Composable for TransformedComposable<T> { #[inline] fn intersect_ray(&self, ray: &mut RawRay) -> Option<SurfaceInteraction> { *ray = ray.apply_transform(&*self.parent_local); let mut ret = self.inner.intersect_ray(ray); if let Some(ret) = ret.as_mut() { *ret = ret.apply_transform(&*self.local_parent); ret.primitive_hit = Some(self); } *ray = ray.apply_transform(&*self.local_parent); ret } #[inline] fn as_light(&self) -> &Light { self } } impl<T: Primitive> Primitive for TransformedComposable<T> { #[inline] fn is_emissive(&self) -> bool { self.inner.is_emissive() } #[inline] fn get_material(&self) -> &Material { self.inner.get_material() } } impl<T: Primitive> Light for TransformedComposable<T> { fn flags(&self) -> LightFlag { self.inner.flags() } #[inline] fn evaluate_ray(&self, rd: &RayDifferential) -> RGBSpectrumf { let rd = rd.apply_transform(&self.parent_local); self.inner.evaluate_ray(&rd) } #[inline] fn evaluate_path(&self, pos: Point3f, dir: Vector3f) -> RGBSpectrumf { let pos = self.parent_local.transform_point(pos); let dir = self.parent_local.transform_vector(dir); self.inner.evaluate_path(pos, dir) } #[inline] fn evaluate_sampled(&self, pos: Point3f, sample: Point2f) -> LightSample { let pos = self.parent_local.transform_point(pos); let ls = self.inner.evaluate_sampled(pos, sample); ls.apply_transform(&*self.local_parent) } #[inline] fn generate_path(&self, samples: SampleInfo) -> PathInfo { self.inner.generate_path(samples).apply_transform(&*self.local_parent) } #[inline] fn pdf_path(&self, pos: Point3f, dir: Vector3f, norm: Vector3f) -> (Float, Float) { let pos = self.parent_local.transform_point(pos); let dir = self.parent_local.transform_vector(dir); let norm = self.parent_local.transform_norm(norm); self.inner.pdf_path(pos, dir, norm) } #[inline] fn pdf(&self, pos: Point3f, wi: Vector3f) -> Float { let pos = self.parent_local.transform_point(pos); let wi = self.parent_local.transform_vector(wi); self.inner.pdf(pos, wi) } #[inline] fn power(&self) -> RGBSpectrumf { self.inner.power() } #[inline] fn preprocess(&mut self, s: &Scene) { self.inner.preprocess(s); } } impl<T: Composable> Composable for TransformedComposable<Arc<T>> { #[inline] fn bbox_parent(&self) -> BBox3f { self.inner.bbox_parent().apply_transform(&*self.local_parent) } #[inline] fn intersection_cost(&self) -> Float { 2.0 as Float + self.inner.intersection_cost() } #[inline] default fn intersect_ray(&self, ray: &mut RawRay) -> Option<SurfaceInteraction> { *ray = ray.apply_transform(&*self.parent_local); let mut ret = self.inner.intersect_ray(ray); if let Some(ret) = ret.as_mut() { *ret = ret.apply_transform(&*self.local_parent); } *ray = ray.apply_transform(&*self.local_parent); ret } #[inline] default fn as_light(&self) -> &Light { unimplemented!(); } } impl<T: Primitive> Composable for TransformedComposable<Arc<T>> { #[inline] fn intersect_ray(&self, ray: &mut RawRay) -> Option<SurfaceInteraction> { *ray = ray.apply_transform(&*self.parent_local); let mut ret = self.inner.intersect_ray(ray); if let Some(ret) = ret.as_mut() { *ret = ret.apply_transform(&*self.local_parent); ret.primitive_hit = Some(self); } *ray = ray.apply_transform(&*self.local_parent); ret } #[inline] fn as_light(&self) -> &Light { self } } impl<T: Primitive> Primitive for TransformedComposable<Arc<T>> { #[inline] fn is_emissive(&self) -> bool { self.inner.is_emissive() } #[inline] fn get_material(&self) -> &Material { self.inner.get_material() } } impl<T: Primitive> Light for TransformedComposable<Arc<T>> { fn flags(&self) -> LightFlag { self.inner.flags() } #[inline] fn evaluate_ray(&self, rd: &RayDifferential) -> RGBSpectrumf { let rd = rd.apply_transform(&self.parent_local); self.inner.evaluate_ray(&rd) } #[inline] fn evaluate_path(&self, pos: Point3f, dir: Vector3f) -> RGBSpectrumf { let pos = self.parent_local.transform_point(pos); let dir = self.parent_local.transform_vector(dir); self.inner.evaluate_path(pos, dir) } #[inline] fn evaluate_sampled(&self, pos: Point3f, sample: Point2f) -> LightSample { let pos = self.parent_local.transform_point(pos); let ls = self.inner.evaluate_sampled(pos, sample); ls.apply_transform(&*self.local_parent) } #[inline] fn generate_path(&self, samples: SampleInfo) -> PathInfo { self.inner.generate_path(samples).apply_transform(&*self.local_parent) } #[inline] fn pdf_path(&self, pos: Point3f, dir: Vector3f, norm: Vector3f) -> (Float, Float) { let pos = self.parent_local.transform_point(po
#[inline] fn pdf(&self, pos: Point3f, wi: Vector3f) -> Float { let pos = self.parent_local.transform_point(pos); let wi = self.parent_local.transform_vector(wi); self.inner.pdf(pos, wi) } #[inline] fn power(&self) -> RGBSpectrumf { self.inner.power() } } impl Composable for TransformedComposable<Arc<Composable>> { #[inline] fn bbox_parent(&self) -> BBox3f { self.inner.bbox_parent().apply_transform(&*self.local_parent) } #[inline] fn intersection_cost(&self) -> Float { 2.0 as Float + self.inner.intersection_cost() } #[inline] fn intersect_ray(&self, ray: &mut RawRay) -> Option<SurfaceInteraction> { *ray = ray.apply_transform(&*self.parent_local); let mut ret = self.inner.intersect_ray(ray); if let Some(ret) = ret.as_mut() { *ret = ret.apply_transform(&*self.local_parent); } *ray = ray.apply_transform(&*self.local_parent); ret } #[inline] fn as_light(&self) -> &Light { unimplemented!(); } } impl Composable for TransformedComposable<Arc<Primitive>> { #[inline] fn bbox_parent(&self) -> BBox3f { self.inner.bbox_parent().apply_transform(&*self.local_parent) } #[inline] fn intersection_cost(&self) -> Float { 2.0 as Float + self.inner.intersection_cost() } #[inline] fn intersect_ray(&self, ray: &mut RawRay) -> Option<SurfaceInteraction> { *ray = ray.apply_transform(&*self.parent_local); let mut ret = self.inner.intersect_ray(ray); if let Some(ret) = ret.as_mut() { *ret = ret.apply_transform(&*self.local_parent); ret.primitive_hit = Some(self); } *ray = ray.apply_transform(&*self.local_parent); ret } #[inline] fn as_light(&self) -> &Light { self } } impl Primitive for TransformedComposable<Arc<Primitive>> { #[inline] fn is_emissive(&self) -> bool { self.inner.is_emissive() } #[inline] fn get_material(&self) -> &Material { self.inner.get_material() } } impl Light for TransformedComposable<Arc<Primitive>> { fn flags(&self) -> LightFlag { self.inner.flags() } #[inline] fn evaluate_ray(&self, rd: &RayDifferential) -> RGBSpectrumf { let rd = rd.apply_transform(&self.parent_local); self.inner.evaluate_ray(&rd) } #[inline] fn evaluate_path(&self, pos: Point3f, dir: Vector3f) -> RGBSpectrumf { let pos = self.parent_local.transform_point(pos); let dir = self.parent_local.transform_vector(dir); self.inner.evaluate_path(pos, dir) } #[inline] fn evaluate_sampled(&self, pos: Point3f, sample: Point2f) -> LightSample { let pos = self.parent_local.transform_point(pos); let ls = self.inner.evaluate_sampled(pos, sample); ls.apply_transform(&*self.local_parent) } #[inline] fn generate_path(&self, samples: SampleInfo) -> PathInfo { self.inner.generate_path(samples).apply_transform(&*self.local_parent) } #[inline] fn pdf_path(&self, pos: Point3f, dir: Vector3f, norm: Vector3f) -> (Float, Float) { let pos = self.parent_local.transform_point(pos); let dir = self.parent_local.transform_vector(dir); let norm = self.parent_local.transform_norm(norm); self.inner.pdf_path(pos, dir, norm) } #[inline] fn pdf(&self, pos: Point3f, wi: Vector3f) -> Float { let pos = self.parent_local.transform_point(pos); let wi = self.parent_local.transform_vector(wi); self.inner.pdf(pos, wi) } #[inline] fn power(&self) -> RGBSpectrumf { self.inner.power() } }
s); let dir = self.parent_local.transform_vector(dir); let norm = self.parent_local.transform_norm(norm); self.inner.pdf_path(pos, dir, norm) }
function_block-function_prefixed
[ { "content": "#[inline]\n\npub fn sample_uniform_cone(u: Point2f, cos_max: Float) -> Vector3f {\n\n let costheta = (1.0 as Float - u.x) + u.x * cos_max;\n\n let sintheta = (1.0 as Float - costheta*costheta).sqrt();\n\n let phi = u.y * (2.0 as Float * float::pi());\n\n Vector3f::new(sintheta*phi.cos(...
Rust
src/game.rs
frellica/bevy_mine_sweeper
31bf44c00615a25f03e1dabc12c61a2e4d4c666b
use std::cmp; use bevy::{ diagnostic::{Diagnostics, FrameTimeDiagnosticsPlugin}, prelude::*, }; use crate::mine_core::{ BlockType, BlockStatus, MinePlayground, MineBlock, Position, ClickResult }; pub fn game_app(config: GameConfig) { App::build() .add_resource(WindowDescriptor { vsync: false, width: cmp::max(config.width * BLOCK_WIDTH, MIN_WIDTH) as f32, height: cmp::max(config.height * BLOCK_WIDTH + Y_MARGIN, MIN_HEIGHT) as f32, title: String::from("Mine Sweeper"), resizable: false, ..Default::default() }) .add_resource(config) .add_plugins(DefaultPlugins) .add_plugin(GamePlugin) .run(); } struct GamePlugin; impl Plugin for GamePlugin { fn build(&self, app: &mut AppBuilder) { app.init_resource::<ButtonMaterials>() .add_resource(CursorLocation(Vec2::new(0.0, 0.0))) .add_plugin(FrameTimeDiagnosticsPlugin) .add_resource(State::new(GameState::Prepare)) .add_startup_system(setup.system()) .add_system(fps_update.system()) .add_system(debug_text_update.system()) .add_system(restart_button_system.system()) .add_startup_system(new_map.system()) .add_system(handle_movement.system()) .add_system(handle_click.system()) .add_system(render_map.system()) .add_stage_after(stage::UPDATE, STAGE, StateStage::<GameState>::default()) .on_state_enter(STAGE, GameState::Prepare, init_map_render.system()) .on_state_enter(STAGE, GameState::Ready, new_map.system()); } } const BLOCK_WIDTH: usize = 24; const MIN_HEIGHT: usize = 160; const MIN_WIDTH: usize = 160; const Y_MARGIN: usize = 50; const SPRITE_SIZE: f32 = 48.0; const STAGE: &str = "game_state"; const NEW_GAME_TEXT: &str = "New Game"; const HIDDEN_INDEX: usize = 10; struct RefreshButton; struct DebugText; struct MapData { map_entity: Entity, } struct WindowOffset { x: f32, y: f32, } #[derive(Debug, Clone, Copy)] pub struct GameConfig { pub width: usize, pub height: usize, pub mine_count: usize, } #[derive(Debug, Clone, PartialEq)] enum GameState { Prepare, Ready, Running, Over, } #[derive(Default, Debug)] struct CursorLocation(Vec2); struct LastActionText(String); struct ButtonMaterials { normal: Handle<ColorMaterial>, hovered: Handle<ColorMaterial>, pressed: Handle<ColorMaterial>, } impl FromResources for ButtonMaterials { fn from_resources(resources: &Resources) -> Self { let mut materials = resources.get_mut::<Assets<ColorMaterial>>().unwrap(); ButtonMaterials { normal: materials.add(Color::rgb(0.15, 0.15, 0.15).into()), hovered: materials.add(Color::rgb(0.25, 0.25, 0.25).into()), pressed: materials.add(Color::rgb(0.35, 0.75, 0.35).into()), } } } impl MineBlock { fn get_sprite_index(&self) -> usize { match self.bstatus { BlockStatus::Flaged => 12, BlockStatus::QuestionMarked => 11, BlockStatus::Shown => { match self.btype { BlockType::Mine => 9, BlockType::Tip(val) => val, BlockType::Space => 0, } }, BlockStatus::Hidden => HIDDEN_INDEX, } } } struct FpsRefresh; fn setup( commands: &mut Commands, asset_server: Res<AssetServer>, button_materials: Res<ButtonMaterials>, windows: ResMut<Windows>, mut texture_atlases: ResMut<Assets<TextureAtlas>>, ) { let font = asset_server.load("fonts/pointfree.ttf"); let window = windows.get_primary().unwrap(); commands .spawn(CameraUiBundle::default()) .spawn(Camera2dBundle::default()); commands.spawn(TextBundle { style: Style { align_self: AlignSelf::FlexEnd, position_type: PositionType::Absolute, position: Rect { top: Val::Px(5.0), left: Val::Px(5.0), ..Default::default() }, ..Default::default() }, text: Text { value: "debug text here".to_string(), font: font.clone(), style: TextStyle { font_size: 18.0, color: Color::rgba(0.0, 0.5, 0.5, 0.5), alignment: TextAlignment::default(), }, }, ..Default::default() }).with(DebugText); commands .spawn(TextBundle { style: Style { align_self: AlignSelf::FlexEnd, position_type: PositionType::Absolute, position: Rect { bottom: Val::Px(5.0), right: Val::Px(5.0), ..Default::default() }, ..Default::default() }, text: Text { value: "-".to_string(), font: font.clone(), style: TextStyle { font_size: 20.0, color: Color::rgba(0.0, 0.5, 0.5, 0.5), alignment: TextAlignment::default(), }, }, ..Default::default() }) .with(FpsRefresh); commands.insert_resource(WindowOffset { x: window.width() as f32 / 2.0 - BLOCK_WIDTH as f32 / 2.0, y: window.height() as f32 / 2.0 - BLOCK_WIDTH as f32 / 2.0, }); commands .insert_resource(LastActionText(NEW_GAME_TEXT.to_string())) .spawn(ButtonBundle { style: Style { size: Size::new(Val::Px(100.0), Val::Px(25.0)), position_type: PositionType::Absolute, position: Rect { left: Val::Px((window.width() as f32) / 2.0 - 50.0), top: Val::Px(12.5), ..Default::default() }, justify_content: JustifyContent::Center, align_items: AlignItems::Center, ..Default::default() }, material: button_materials.normal.clone(), ..Default::default() }) .with_children(|parent| { parent.spawn(TextBundle { text: Text { value: "New Game".to_string(), font: asset_server.load("fonts/pointfree.ttf"), style: TextStyle { font_size: 20.0, color: Color::rgb(0.9, 0.9, 0.9), ..Default::default() }, }, ..Default::default() }).with(RefreshButton); }); let texture_handle = asset_server.load("textures/block.png"); let texture_atlas = TextureAtlas::from_grid(texture_handle, Vec2::new(SPRITE_SIZE, SPRITE_SIZE), 13, 1); let texture_atlas_handle = texture_atlases.add(texture_atlas); commands.insert_resource(texture_atlas_handle); } struct RenderBlock { pos: Position, } fn new_map( commands: &mut Commands, config: Res<GameConfig>, ) { commands .insert_resource(MinePlayground::init(&config.width, &config.height, &config.mine_count).unwrap()); commands.spawn((MinePlayground::init(&config.width, &config.height, &config.mine_count).unwrap(), )); commands.insert_resource(MapData { map_entity: commands.current_entity().unwrap(), }); } fn init_map_render( commands: &mut Commands, texture_atlases: Res<Assets<TextureAtlas>>, atlas_handle: Res<Handle<TextureAtlas>>, window_offset: Res<WindowOffset>, config: Res<GameConfig>, mut game_state: ResMut<State<GameState>>, ) { println!("111init_map_render run once"); for y in 0..config.height { for x in 0..config.width { let texture_atlas = texture_atlases.get_handle(atlas_handle.clone()); commands .spawn(SpriteSheetBundle { transform: Transform { translation: Vec3::new( (x * BLOCK_WIDTH) as f32 - window_offset.x, (y * BLOCK_WIDTH) as f32 - window_offset.y, 0.0 ), scale: Vec3::splat(0.5), ..Default::default() }, texture_atlas, sprite: TextureAtlasSprite::new(HIDDEN_INDEX as u32), ..Default::default() }) .with(RenderBlock { pos: Position { x, y } }); } } println!("{:?}", game_state.current()); game_state.set_next(GameState::Ready).unwrap(); } fn render_map ( query: Query< &MinePlayground, Changed<MinePlayground>, >, mut sprites: Query<(&mut TextureAtlasSprite, &RenderBlock)>, ) { for mp in query.iter() { println!("detect mp changed{:?}", mp.shown_count); for (mut sprite, rb) in sprites.iter_mut() { sprite.index = mp.map[rb.pos.y][rb.pos.x].get_sprite_index() as u32; } } } fn handle_movement( mut cursor_pos: ResMut<CursorLocation>, cursor_moved_events: Res<Events<CursorMoved>>, mut evr_cursor: Local<EventReader<CursorMoved>>, ) { for ev in evr_cursor.iter(&cursor_moved_events) { cursor_pos.0 = ev.position; } } fn handle_click( btns: Res<Input<MouseButton>>, cursor_pos: Res<CursorLocation>, config: Res<GameConfig>, mut mquery: Query<&mut MinePlayground>, map_data: Res<MapData>, mut text_query: Query<&mut Text, With<RefreshButton>>, mut last_action_text: ResMut<LastActionText>, mut game_state: ResMut<State<GameState>>, ) { if let GameState::Over = game_state.current() { return; } if btns.just_released(MouseButton::Left) { if let Some((x, y)) = get_block_index_by_cursor_pos(cursor_pos.0, *config) { println!("{:?}-{:?}", x, y); let mut mp: Mut<MinePlayground> = mquery.get_component_mut(map_data.map_entity).unwrap(); if let GameState::Ready = game_state.current() { if let BlockType::Mine = mp.map[y][x].btype { mp.fix(&x, &y); } } let click_result = mp.click(&x, &y); println!("{:?}", click_result); match click_result { ClickResult::Wasted => { let mut text = text_query.iter_mut().next().unwrap(); text.value = String::from("Game Over"); *last_action_text = LastActionText(String::from("Game Over")); game_state.set_next(GameState::Over).unwrap(); return; }, ClickResult::Win => { let mut text = text_query.iter_mut().next().unwrap(); text.value = String::from("Finished!"); *last_action_text = LastActionText(String::from("Finished!")); game_state.set_next(GameState::Over).unwrap(); return; } _ => {} } if let GameState::Ready = game_state.current() { game_state.set_next(GameState::Running).unwrap(); } } } if btns.just_released(MouseButton::Right) { if let Some((x, y)) = get_block_index_by_cursor_pos(cursor_pos.0, *config) { println!("{:?}-{:?}", x, y); if let GameState::Ready = game_state.current() { game_state.set_next(GameState::Running).unwrap(); } let mut mp: Mut<MinePlayground> = mquery.get_component_mut(map_data.map_entity).unwrap(); mp.right_click(&x, &y); } } } fn fps_update( diagnostics: Res<Diagnostics>, mut query: Query<&mut Text, With<FpsRefresh>>, ) { for mut text in query.iter_mut() { let mut fps = 0.0; if let Some(fps_diagnostic) = diagnostics.get(FrameTimeDiagnosticsPlugin::FPS) { if let Some(fps_avg) = fps_diagnostic.average() { fps = fps_avg; } } text.value = format!( "{:.1} fps", fps, ); } } fn debug_text_update( mut query: Query<&mut Text, With<DebugText>>, game_state: Res<State<GameState>>, ) { for mut text in query.iter_mut() { text.value = format!("state: {:?}", game_state.current()); } } fn restart_button_system( button_materials: Res<ButtonMaterials>, mut interaction_query: Query< (&Interaction, &mut Handle<ColorMaterial>, &Children), (Mutated<Interaction>, With<Button>), >, mut text_query: Query<&mut Text>, mut last_action_text: ResMut<LastActionText>, mut game_state: ResMut<State<GameState>>, ) { for (interaction, mut material, children) in interaction_query.iter_mut() { let mut text = text_query.get_mut(children[0]).unwrap(); match *interaction { Interaction::Clicked => { *material = button_materials.pressed.clone(); text.value = NEW_GAME_TEXT.to_string(); *last_action_text = LastActionText(NEW_GAME_TEXT.to_string()); if *game_state.current() != GameState::Prepare { game_state.set_next(GameState::Prepare).unwrap(); } } Interaction::Hovered => { *material = button_materials.hovered.clone(); text.value = NEW_GAME_TEXT.to_string(); } Interaction::None => { *material = button_materials.normal.clone(); text.value = (*last_action_text.0).to_string(); } } } } fn get_block_index_by_cursor_pos(pos: Vec2, config: GameConfig) -> Option<(usize, usize)> { let x = (pos.x / BLOCK_WIDTH as f32).floor() as usize; let y = (pos.y / BLOCK_WIDTH as f32).floor() as usize; if (0..config.height).contains(&y) && (0..config.width).contains(&x) { return Some((x, y)); } None }
use std::cmp; use bevy::{ diagnostic::{Diagnostics, FrameTimeDiagnosticsPlugin}, prelude::*, }; use crate::mine_core::{ BlockType, BlockStatus, MinePlayground, MineBlock, Position, ClickResult }; pub fn game_app(config: GameConfig) { App::build() .add_resource(WindowDescriptor { vsync: false, width: cmp::max(config.width * BLOCK_WIDTH, MIN_WIDTH) as f32, height: cmp::max(config.height * BLOCK_WIDTH + Y_MARGIN, MIN_HEIGHT) as f32, title: String::from("Mine Sweeper"), resizable: false, ..Default::default() }) .add_resource(config) .add_plugins(DefaultPlugins) .add_plugin(GamePlugin) .run(); } struct GamePlugin; impl Plugin for GamePlugin { fn build(&self, app: &mut AppBuilder) { app.init_resource::<ButtonMaterials>() .add_resource(CursorLocation(Vec2::new(0.0, 0.0))) .add_plugin(FrameTimeDiagnosticsPlugin) .add_resource(State::new(GameState::Prepare)) .add_startup_system(setup.system()) .add_system(fps_update.system()) .add_system(debug_text_update.system()) .add_system(restart_button_system.system()) .add_startup_system(new_map.system()) .add_system(handle_movement.system()) .add_system(handle_click.system()) .add_system(render_map.system()) .add_stage_after(stage::UPDATE, STAGE, StateStage::<GameState>::default()) .on_state_enter(STAGE, GameState::Prepare, init_map_render.system()) .on_state_enter(STAGE, GameState::Ready, new_map.system()); } } const BLOCK_WIDTH: usize = 24; const MIN_HEIGHT: usize = 160; const MIN_WIDTH: usize = 160; const Y_MARGIN: usize = 50; const SPRITE_SIZE: f32 = 48.0; const STAGE: &str = "game_state"; const NEW_GAME_TEXT: &str = "New Game"; const HIDDEN_INDEX: usize = 10; struct RefreshButton; struct DebugText; struct MapData { map_entity: Entity, } struct WindowOffset { x: f32, y: f32, } #[derive(Debug, Clone, Copy)] pub struct GameConfig { pub width: usize, pub height: usize, pub mine_count: usi
lexEnd, position_type: PositionType::Absolute, position: Rect { top: Val::Px(5.0), left: Val::Px(5.0), ..Default::default() }, ..Default::default() }, text: Text { value: "debug text here".to_string(), font: font.clone(), style: TextStyle { font_size: 18.0, color: Color::rgba(0.0, 0.5, 0.5, 0.5), alignment: TextAlignment::default(), }, }, ..Default::default() }).with(DebugText); commands .spawn(TextBundle { style: Style { align_self: AlignSelf::FlexEnd, position_type: PositionType::Absolute, position: Rect { bottom: Val::Px(5.0), right: Val::Px(5.0), ..Default::default() }, ..Default::default() }, text: Text { value: "-".to_string(), font: font.clone(), style: TextStyle { font_size: 20.0, color: Color::rgba(0.0, 0.5, 0.5, 0.5), alignment: TextAlignment::default(), }, }, ..Default::default() }) .with(FpsRefresh); commands.insert_resource(WindowOffset { x: window.width() as f32 / 2.0 - BLOCK_WIDTH as f32 / 2.0, y: window.height() as f32 / 2.0 - BLOCK_WIDTH as f32 / 2.0, }); commands .insert_resource(LastActionText(NEW_GAME_TEXT.to_string())) .spawn(ButtonBundle { style: Style { size: Size::new(Val::Px(100.0), Val::Px(25.0)), position_type: PositionType::Absolute, position: Rect { left: Val::Px((window.width() as f32) / 2.0 - 50.0), top: Val::Px(12.5), ..Default::default() }, justify_content: JustifyContent::Center, align_items: AlignItems::Center, ..Default::default() }, material: button_materials.normal.clone(), ..Default::default() }) .with_children(|parent| { parent.spawn(TextBundle { text: Text { value: "New Game".to_string(), font: asset_server.load("fonts/pointfree.ttf"), style: TextStyle { font_size: 20.0, color: Color::rgb(0.9, 0.9, 0.9), ..Default::default() }, }, ..Default::default() }).with(RefreshButton); }); let texture_handle = asset_server.load("textures/block.png"); let texture_atlas = TextureAtlas::from_grid(texture_handle, Vec2::new(SPRITE_SIZE, SPRITE_SIZE), 13, 1); let texture_atlas_handle = texture_atlases.add(texture_atlas); commands.insert_resource(texture_atlas_handle); } struct RenderBlock { pos: Position, } fn new_map( commands: &mut Commands, config: Res<GameConfig>, ) { commands .insert_resource(MinePlayground::init(&config.width, &config.height, &config.mine_count).unwrap()); commands.spawn((MinePlayground::init(&config.width, &config.height, &config.mine_count).unwrap(), )); commands.insert_resource(MapData { map_entity: commands.current_entity().unwrap(), }); } fn init_map_render( commands: &mut Commands, texture_atlases: Res<Assets<TextureAtlas>>, atlas_handle: Res<Handle<TextureAtlas>>, window_offset: Res<WindowOffset>, config: Res<GameConfig>, mut game_state: ResMut<State<GameState>>, ) { println!("111init_map_render run once"); for y in 0..config.height { for x in 0..config.width { let texture_atlas = texture_atlases.get_handle(atlas_handle.clone()); commands .spawn(SpriteSheetBundle { transform: Transform { translation: Vec3::new( (x * BLOCK_WIDTH) as f32 - window_offset.x, (y * BLOCK_WIDTH) as f32 - window_offset.y, 0.0 ), scale: Vec3::splat(0.5), ..Default::default() }, texture_atlas, sprite: TextureAtlasSprite::new(HIDDEN_INDEX as u32), ..Default::default() }) .with(RenderBlock { pos: Position { x, y } }); } } println!("{:?}", game_state.current()); game_state.set_next(GameState::Ready).unwrap(); } fn render_map ( query: Query< &MinePlayground, Changed<MinePlayground>, >, mut sprites: Query<(&mut TextureAtlasSprite, &RenderBlock)>, ) { for mp in query.iter() { println!("detect mp changed{:?}", mp.shown_count); for (mut sprite, rb) in sprites.iter_mut() { sprite.index = mp.map[rb.pos.y][rb.pos.x].get_sprite_index() as u32; } } } fn handle_movement( mut cursor_pos: ResMut<CursorLocation>, cursor_moved_events: Res<Events<CursorMoved>>, mut evr_cursor: Local<EventReader<CursorMoved>>, ) { for ev in evr_cursor.iter(&cursor_moved_events) { cursor_pos.0 = ev.position; } } fn handle_click( btns: Res<Input<MouseButton>>, cursor_pos: Res<CursorLocation>, config: Res<GameConfig>, mut mquery: Query<&mut MinePlayground>, map_data: Res<MapData>, mut text_query: Query<&mut Text, With<RefreshButton>>, mut last_action_text: ResMut<LastActionText>, mut game_state: ResMut<State<GameState>>, ) { if let GameState::Over = game_state.current() { return; } if btns.just_released(MouseButton::Left) { if let Some((x, y)) = get_block_index_by_cursor_pos(cursor_pos.0, *config) { println!("{:?}-{:?}", x, y); let mut mp: Mut<MinePlayground> = mquery.get_component_mut(map_data.map_entity).unwrap(); if let GameState::Ready = game_state.current() { if let BlockType::Mine = mp.map[y][x].btype { mp.fix(&x, &y); } } let click_result = mp.click(&x, &y); println!("{:?}", click_result); match click_result { ClickResult::Wasted => { let mut text = text_query.iter_mut().next().unwrap(); text.value = String::from("Game Over"); *last_action_text = LastActionText(String::from("Game Over")); game_state.set_next(GameState::Over).unwrap(); return; }, ClickResult::Win => { let mut text = text_query.iter_mut().next().unwrap(); text.value = String::from("Finished!"); *last_action_text = LastActionText(String::from("Finished!")); game_state.set_next(GameState::Over).unwrap(); return; } _ => {} } if let GameState::Ready = game_state.current() { game_state.set_next(GameState::Running).unwrap(); } } } if btns.just_released(MouseButton::Right) { if let Some((x, y)) = get_block_index_by_cursor_pos(cursor_pos.0, *config) { println!("{:?}-{:?}", x, y); if let GameState::Ready = game_state.current() { game_state.set_next(GameState::Running).unwrap(); } let mut mp: Mut<MinePlayground> = mquery.get_component_mut(map_data.map_entity).unwrap(); mp.right_click(&x, &y); } } } fn fps_update( diagnostics: Res<Diagnostics>, mut query: Query<&mut Text, With<FpsRefresh>>, ) { for mut text in query.iter_mut() { let mut fps = 0.0; if let Some(fps_diagnostic) = diagnostics.get(FrameTimeDiagnosticsPlugin::FPS) { if let Some(fps_avg) = fps_diagnostic.average() { fps = fps_avg; } } text.value = format!( "{:.1} fps", fps, ); } } fn debug_text_update( mut query: Query<&mut Text, With<DebugText>>, game_state: Res<State<GameState>>, ) { for mut text in query.iter_mut() { text.value = format!("state: {:?}", game_state.current()); } } fn restart_button_system( button_materials: Res<ButtonMaterials>, mut interaction_query: Query< (&Interaction, &mut Handle<ColorMaterial>, &Children), (Mutated<Interaction>, With<Button>), >, mut text_query: Query<&mut Text>, mut last_action_text: ResMut<LastActionText>, mut game_state: ResMut<State<GameState>>, ) { for (interaction, mut material, children) in interaction_query.iter_mut() { let mut text = text_query.get_mut(children[0]).unwrap(); match *interaction { Interaction::Clicked => { *material = button_materials.pressed.clone(); text.value = NEW_GAME_TEXT.to_string(); *last_action_text = LastActionText(NEW_GAME_TEXT.to_string()); if *game_state.current() != GameState::Prepare { game_state.set_next(GameState::Prepare).unwrap(); } } Interaction::Hovered => { *material = button_materials.hovered.clone(); text.value = NEW_GAME_TEXT.to_string(); } Interaction::None => { *material = button_materials.normal.clone(); text.value = (*last_action_text.0).to_string(); } } } } fn get_block_index_by_cursor_pos(pos: Vec2, config: GameConfig) -> Option<(usize, usize)> { let x = (pos.x / BLOCK_WIDTH as f32).floor() as usize; let y = (pos.y / BLOCK_WIDTH as f32).floor() as usize; if (0..config.height).contains(&y) && (0..config.width).contains(&x) { return Some((x, y)); } None }
ze, } #[derive(Debug, Clone, PartialEq)] enum GameState { Prepare, Ready, Running, Over, } #[derive(Default, Debug)] struct CursorLocation(Vec2); struct LastActionText(String); struct ButtonMaterials { normal: Handle<ColorMaterial>, hovered: Handle<ColorMaterial>, pressed: Handle<ColorMaterial>, } impl FromResources for ButtonMaterials { fn from_resources(resources: &Resources) -> Self { let mut materials = resources.get_mut::<Assets<ColorMaterial>>().unwrap(); ButtonMaterials { normal: materials.add(Color::rgb(0.15, 0.15, 0.15).into()), hovered: materials.add(Color::rgb(0.25, 0.25, 0.25).into()), pressed: materials.add(Color::rgb(0.35, 0.75, 0.35).into()), } } } impl MineBlock { fn get_sprite_index(&self) -> usize { match self.bstatus { BlockStatus::Flaged => 12, BlockStatus::QuestionMarked => 11, BlockStatus::Shown => { match self.btype { BlockType::Mine => 9, BlockType::Tip(val) => val, BlockType::Space => 0, } }, BlockStatus::Hidden => HIDDEN_INDEX, } } } struct FpsRefresh; fn setup( commands: &mut Commands, asset_server: Res<AssetServer>, button_materials: Res<ButtonMaterials>, windows: ResMut<Windows>, mut texture_atlases: ResMut<Assets<TextureAtlas>>, ) { let font = asset_server.load("fonts/pointfree.ttf"); let window = windows.get_primary().unwrap(); commands .spawn(CameraUiBundle::default()) .spawn(Camera2dBundle::default()); commands.spawn(TextBundle { style: Style { align_self: AlignSelf::F
random
[ { "content": "fn get_surroundings(&x: &usize, &y: &usize, &max_width: &usize, &max_height: &usize) -> Vec<(usize, usize)> {\n\n let max_x = max_width - 1;\n\n let max_y = max_height - 1;\n\n let mut r = vec![];\n\n if x > 0 { r.push((x - 1, y)); }\n\n if x < max_x { r.push((x + 1, y)); }\n\n i...
Rust
src/parser/error.rs
boxofrox/combine
5b89f1913d7932b20c37e6a11bebd347a5942df1
use crate::{ error::{ ErrorInfo, ParseError, ParseResult::{self, *}, StreamError, Tracked, }, lib::marker::PhantomData, parser::ParseMode, Parser, Stream, StreamOnce, }; #[derive(Clone)] pub struct Unexpected<I, T, E>(E, PhantomData<fn(I) -> (I, T)>) where I: Stream; impl<Input, T, E> Parser<Input> for Unexpected<Input, T, E> where Input: Stream, E: for<'s> ErrorInfo<'s, Input::Token, Input::Range>, { type Output = T; type PartialState = (); #[inline] fn parse_lazy(&mut self, input: &mut Input) -> ParseResult<T, <Input as StreamOnce>::Error> { EmptyErr(<Input as StreamOnce>::Error::empty(input.position()).into()) } fn add_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) { errors.error.add(StreamError::unexpected(&self.0)); } } pub fn unexpected<Input, S>(message: S) -> Unexpected<Input, (), S> where Input: Stream, S: for<'s> ErrorInfo<'s, Input::Token, Input::Range>, { unexpected_any(message) } pub fn unexpected_any<Input, S, T>(message: S) -> Unexpected<Input, T, S> where Input: Stream, S: for<'s> ErrorInfo<'s, Input::Token, Input::Range>, { Unexpected(message, PhantomData) } #[derive(Clone)] pub struct Message<P, S>(P, S); impl<Input, P, S> Parser<Input> for Message<P, S> where Input: Stream, P: Parser<Input>, S: for<'s> ErrorInfo<'s, Input::Token, Input::Range>, { type Output = P::Output; type PartialState = P::PartialState; parse_mode!(Input); #[inline] fn parse_mode_impl<M>( &mut self, mode: M, input: &mut Input, state: &mut Self::PartialState, ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> where M: ParseMode, { match self.0.parse_mode(mode, input, state) { ConsumedOk(x) => ConsumedOk(x), EmptyOk(x) => EmptyOk(x), ConsumedErr(mut err) => { err.add_message(&self.1); ConsumedErr(err) } EmptyErr(err) => EmptyErr(err), } } fn add_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) { self.0.add_error(errors); errors.error.add_message(&self.1); } forward_parser!(Input, parser_count add_consumed_expected_error, 0); } pub fn message<Input, P, S>(p: P, msg: S) -> Message<P, S> where P: Parser<Input>, Input: Stream, S: for<'s> ErrorInfo<'s, Input::Token, Input::Range>, { Message(p, msg) } #[derive(Clone)] pub struct Expected<P, S>(P, S); impl<Input, P, S> Parser<Input> for Expected<P, S> where P: Parser<Input>, Input: Stream, S: for<'s> ErrorInfo<'s, Input::Token, Input::Range>, { type Output = P::Output; type PartialState = P::PartialState; parse_mode!(Input); #[inline] fn parse_mode_impl<M>( &mut self, mode: M, input: &mut Input, state: &mut Self::PartialState, ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> where M: ParseMode, { self.0.parse_mode(mode, input, state) } fn add_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) { ParseError::set_expected(errors, StreamError::expected(&self.1), |errors| { self.0.add_error(errors); }) } forward_parser!(Input, parser_count add_consumed_expected_error, 0); } pub fn expected<Input, P, S>(p: P, info: S) -> Expected<P, S> where P: Parser<Input>, Input: Stream, S: for<'s> ErrorInfo<'s, Input::Token, Input::Range>, { Expected(p, info) } #[derive(Clone)] pub struct Silent<P>(P); impl<Input, P> Parser<Input> for Silent<P> where P: Parser<Input>, Input: Stream, { type Output = P::Output; type PartialState = P::PartialState; parse_mode!(Input); #[inline] fn parse_mode_impl<M>( &mut self, mode: M, input: &mut Input, state: &mut Self::PartialState, ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> where M: ParseMode, { self.0.parse_mode(mode, input, state).map_err(|mut err| { err.clear_expected(); err }) } fn add_error(&mut self, _errors: &mut Tracked<<Input as StreamOnce>::Error>) {} fn add_consumed_expected_error(&mut self, _errors: &mut Tracked<<Input as StreamOnce>::Error>) { } forward_parser!(Input, parser_count, 0); } pub fn silent<Input, P>(p: P) -> Silent<P> where P: Parser<Input>, Input: Stream, { Silent(p) }
use crate::{ error::{ ErrorInfo, ParseError, ParseResult::{self, *}, StreamError, Tracked, }, lib::marker::PhantomData, parser::ParseMode, Parser, Stream, StreamOnce, }; #[derive(Clone)] pub struct Unexpected<I, T, E>(E, PhantomData<fn(I) -> (I, T)>) where I: Stream; impl<Input, T, E> Parser<Input> for Unexpected<Input, T, E> where Input: Stream, E: for<'s> ErrorInfo<'s, Input::Token, Input::Range>, { type Output = T; type PartialState = (); #[inline] fn parse_lazy(&mut self, input: &mut Input) -> ParseResult<T, <Input as StreamOnce>::Error> { EmptyErr(<Input as StreamOnce>::Error::empty(input.position()).into()) } fn add_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) { errors.error.add(StreamError::unexpected(&self.0)); } } pub fn unexpected<Input, S>(message: S) -> Unexpected<Input, (), S> where Input: Stream, S: for<'s> ErrorInfo<'s, Input::Token, Input::Range>, { unexpected_any(message) } pub fn unexpected_any<Input, S, T>(message: S) -> Unexpected<Input, T, S> where Input: Stream, S: for<'s> ErrorInfo<'s, Input::Token, Input::Range>, { Unexpected(message, PhantomData) } #[derive(Clone)] pub struct Message<P, S>(P, S); impl<Input, P, S> Parser<Input> for Message<P, S> where Input: Stream, P: Parser<Input>, S: for<'s> ErrorInfo<'s, Input::Token, Input::Range>, { type Output = P::Output; type PartialState = P::PartialState; parse_mode!(Input); #[inline] fn parse_mode_impl<M>( &mut self, mode: M, input: &mut Input, state: &mut Self::PartialState, ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> where M: ParseMode, { match self.0.parse_mode(mode, input, state) { ConsumedOk(x) => ConsumedOk(x), EmptyOk(x) => EmptyOk(x), ConsumedErr(mut err) => { err.add_message(&self.1); ConsumedErr(err) } EmptyErr(err) => EmptyErr(err), } } fn add_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) { self.0.add_error(errors); errors.error.add_message(&self.1); } forward_parser!(Input, parser_count add_consumed_expected_error, 0); } pub fn message<Input, P, S>(p: P, msg: S) -> Message<P, S> where P: Parser<Input>, Input: Stream, S: for<'s> ErrorInfo<'s, Input::Token, Input::Range>, { Message(p, msg) } #[derive(Clone)] pub struct Expected<P, S>(P, S); impl<Input, P, S> Parser<Input> for Expected<P, S> where P: Parser<Input>, Input: Stream, S: for<'s> ErrorInfo<'s, Input::Token, Input::Range>, { type Output = P::Output; type PartialState = P::PartialState; parse_mode!(Input); #[inline] fn parse_mode_impl<M>( &mut self, mode: M, input: &mut Input, state: &mut Self::PartialState, ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> where M: ParseMode, { self.0.parse_mode(mode, input, state) } fn add_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) { ParseError::set_expected(errors, StreamError::expected(&self.1), |errors| { self.0.add_error(errors); }) } forward_parser!(Input, parser_count add_consumed_expected_error, 0); } pub fn expected<Input, P, S>(p: P, info: S) -> Expected<P, S> where P: Parser<Input>, Input: Stream, S: for<'s> ErrorInfo<'s, Input::Token, Input::Range>, { Expected(p, info) } #[derive(Clone)] pub struct Silent<P>(P); impl<Input, P> Parser<Input> for Silent<P> where P: Parser<Input>, Input: Stream, { type Output = P::Output; type PartialState = P::PartialState; parse_mode!(Input); #[inline] fn parse_mode_impl<M>( &mut self, mode: M, inpu
pected(); err }) } fn add_error(&mut self, _errors: &mut Tracked<<Input as StreamOnce>::Error>) {} fn add_consumed_expected_error(&mut self, _errors: &mut Tracked<<Input as StreamOnce>::Error>) { } forward_parser!(Input, parser_count, 0); } pub fn silent<Input, P>(p: P) -> Silent<P> where P: Parser<Input>, Input: Stream, { Silent(p) }
t: &mut Input, state: &mut Self::PartialState, ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> where M: ParseMode, { self.0.parse_mode(mode, input, state).map_err(|mut err| { err.clear_ex
function_block-random_span
[ { "content": "pub fn repeat_until<F, Input, P, E>(parser: P, end: E) -> RepeatUntil<F, P, E>\n\nwhere\n\n Input: Stream,\n\n F: Extend<P::Output> + Default,\n\n P: Parser<Input>,\n\n E: Parser<Input>,\n\n{\n\n RepeatUntil {\n\n parser,\n\n end,\n\n _marker: PhantomData,\n\n ...
Rust
providers/nitro/nitro-helper/src/command/nitro_enclave.rs
chatchai-hub/tmkms-light
972a739277002704308bfc4e75a0cfa79f62bbb6
use crate::command::check_vsock_proxy; use crate::config::{EnclaveOpt, VSockProxyOpt}; use crate::enclave_log_server::LogServer; use serde::de::DeserializeOwned; use serde::{Deserialize, Serialize}; use std::process::{Command, Output}; use std::sync::mpsc::Receiver; #[derive(Clone, Serialize, Deserialize)] pub struct EnclaveDescribeInfo { #[serde(rename = "EnclaveID")] pub enclave_id: String, #[serde(rename = "ProcessID")] pub process_id: u32, #[serde(rename = "EnclaveCID")] pub enclave_cid: u64, #[serde(rename = "NumberOfCPUs")] pub cpu_count: u64, #[serde(rename = "CPUIDs")] pub cpu_ids: Vec<u32>, #[serde(rename = "MemoryMiB")] pub memory_mib: u64, #[serde(rename = "State")] pub state: String, #[serde(rename = "Flags")] pub flags: String, } #[derive(Clone, Serialize, Deserialize)] pub struct EnclaveRunInfo { #[serde(rename = "EnclaveID")] pub enclave_id: String, #[serde(rename = "ProcessID")] pub process_id: u32, #[serde(rename = "EnclaveCID")] pub enclave_cid: u64, #[serde(rename = "NumberOfCPUs")] pub cpu_count: usize, #[serde(rename = "CPUIDs")] pub cpu_ids: Vec<u32>, #[serde(rename = "MemoryMiB")] pub memory_mib: u64, } #[derive(Clone, Serialize, Deserialize)] pub struct EnclaveTerminateInfo { #[serde(rename = "EnclaveID")] pub enclave_id: String, #[serde(rename = "Terminated")] pub terminated: bool, } fn parse_output<T: DeserializeOwned>(output: Output) -> Result<T, String> { if !output.status.success() { return Err(format!( "{}, status code: {:?}", String::from_utf8_lossy(output.stderr.as_slice()), output.status.code(), )); } serde_json::from_slice(output.stdout.as_slice()) .map_err(|_| "command invalid output".to_string()) } fn run_enclave_daemon( image_path: &str, cpu_count: usize, memory_mib: u64, cid: Option<u64>, ) -> Result<EnclaveRunInfo, String> { let mut cmd = Command::new("nitro-cli"); cmd.arg("run-enclave") .args(&["--eif-path", image_path]) .args(&["--cpu-count", &format!("{}", cpu_count)]) .args(&["--memory", &format!("{}", memory_mib)]); if let Some(cid) = cid { cmd.args(&["--cid", &cid.to_string()]); } let output = cmd .output() .map_err(|e| format!("execute nitro-cli error: {}", e))?; parse_output(output) } pub fn run_enclave(opt: &EnclaveOpt, stop_receiver: Receiver<()>) -> Result<(), String> { let enclave_info = describe_enclave()?; if !enclave_info.is_empty() { let info = serde_json::to_string_pretty(&enclave_info).expect("get invalid enclave info"); return Err(format!( "the following enclave is already active, please stop and try again:\n{:?}", info )); } tracing::info!("start enclave log server at port {}", opt.log_server_port); let enclave_log_server = LogServer::new( opt.log_server_port, opt.log_to_console, opt.log_file.clone(), ) .map_err(|e| format!("{:?}", e))?; enclave_log_server.launch(); let info = run_enclave_daemon( &opt.eif_path, opt.cpu_count, opt.memory_mib, opt.enclave_cid, )?; let s = serde_json::to_string_pretty(&info).unwrap(); tracing::info!("run enclave success:\n{}", s); let _ = stop_receiver.recv(); let _ = stop_enclave(Some(info.enclave_id)); Ok(()) } pub fn stop_enclave(cid: Option<String>) -> Result<EnclaveTerminateInfo, String> { let mut cmd = Command::new("nitro-cli"); cmd.arg("terminate-enclave"); if let Some(id) = cid { cmd.args(&["--enclave-id", &id]); } else { cmd.arg("--all"); } let output = cmd .output() .map_err(|e| format!("execute nitro-cli error: {:?}", e))?; parse_output(output) } pub fn describe_enclave() -> Result<Vec<EnclaveDescribeInfo>, String> { let output = Command::new("nitro-cli") .arg("describe-enclaves") .output() .map_err(|e| format!("execute nitro-cli error: {:?}", e))?; parse_output(output) } pub fn run_vsock_proxy(opt: &VSockProxyOpt, stop_receiver: Receiver<()>) -> Result<(), String> { tracing::debug!("run vsock proxy with config: {:?}", opt); if check_vsock_proxy() { tracing::warn!("vsock proxy is already running, ignore this start"); return Ok(()); } let mut child = Command::new("vsock-proxy") .args(&["--num_workers", &format!("{}", opt.num_workers)]) .args(&["--config", &opt.config_file]) .arg(opt.local_port.to_string()) .arg(&opt.remote_addr) .arg(opt.remote_port.to_string()) .spawn() .map_err(|e| format!("spawn vsock proxy error: {:?}", e))?; let _ = stop_receiver.recv(); if child.kill().is_ok() { tracing::info!("vsock proxy stopped"); } Ok(()) }
use crate::command::check_vsock_proxy; use crate::config::{EnclaveOpt, VSockProxyOpt}; use crate::enclave_log_server::LogServer; use serde::de::DeserializeOwned; use serde::{Deserialize, Serialize}; use std::process::{Command, Output}; use std::sync::mpsc::Receiver; #[derive(Clone, Serialize, Deserialize)] pub struct EnclaveDescribeInfo { #[serde(rename = "EnclaveID")] pub enclave_id: String, #[serde(rename = "ProcessID")] pub process_id: u32, #[serde(rename = "EnclaveCID")] pub enclave_cid: u64, #[serde(rename = "NumberOfCPUs")] pub cpu_count: u64, #[serde(rename = "CPUIDs")] pub cpu_ids: Vec<u32>, #[serde(rename = "MemoryMiB")] pub memory_mib: u64, #[serde(rename = "State")] pub state: String, #[serde(rename = "Flags")] pub flags: String, } #[derive(Clone, Serialize, Deserialize)] pub struct EnclaveRunInfo { #[serde(rename = "EnclaveID")] pub enclave_id: String, #[serde(rename = "ProcessID")] pub process_id: u32, #[serde(rename = "EnclaveCID")] pub enclave_cid: u64, #[serde(rename = "NumberOfCPUs")] pub cpu_count: usize, #[serde(rename = "CPUIDs")] pub cpu_ids: Vec<u32>, #[serde(rename = "MemoryMiB")] pub memory_mib: u64, } #[derive(Clone, Serialize, Deserialize)] pub struct EnclaveTerminateInfo { #[serde(rename = "EnclaveID")] pub enclave_id: String, #[serde(rename = "Terminated")] pub terminated: bool, } fn parse_output<T: DeserializeOwned>(output: Output) -> Result<T, String> { if !output.status.success() { return
; } serde_json::from_slice(output.stdout.as_slice()) .map_err(|_| "command invalid output".to_string()) } fn run_enclave_daemon( image_path: &str, cpu_count: usize, memory_mib: u64, cid: Option<u64>, ) -> Result<EnclaveRunInfo, String> { let mut cmd = Command::new("nitro-cli"); cmd.arg("run-enclave") .args(&["--eif-path", image_path]) .args(&["--cpu-count", &format!("{}", cpu_count)]) .args(&["--memory", &format!("{}", memory_mib)]); if let Some(cid) = cid { cmd.args(&["--cid", &cid.to_string()]); } let output = cmd .output() .map_err(|e| format!("execute nitro-cli error: {}", e))?; parse_output(output) } pub fn run_enclave(opt: &EnclaveOpt, stop_receiver: Receiver<()>) -> Result<(), String> { let enclave_info = describe_enclave()?; if !enclave_info.is_empty() { let info = serde_json::to_string_pretty(&enclave_info).expect("get invalid enclave info"); return Err(format!( "the following enclave is already active, please stop and try again:\n{:?}", info )); } tracing::info!("start enclave log server at port {}", opt.log_server_port); let enclave_log_server = LogServer::new( opt.log_server_port, opt.log_to_console, opt.log_file.clone(), ) .map_err(|e| format!("{:?}", e))?; enclave_log_server.launch(); let info = run_enclave_daemon( &opt.eif_path, opt.cpu_count, opt.memory_mib, opt.enclave_cid, )?; let s = serde_json::to_string_pretty(&info).unwrap(); tracing::info!("run enclave success:\n{}", s); let _ = stop_receiver.recv(); let _ = stop_enclave(Some(info.enclave_id)); Ok(()) } pub fn stop_enclave(cid: Option<String>) -> Result<EnclaveTerminateInfo, String> { let mut cmd = Command::new("nitro-cli"); cmd.arg("terminate-enclave"); if let Some(id) = cid { cmd.args(&["--enclave-id", &id]); } else { cmd.arg("--all"); } let output = cmd .output() .map_err(|e| format!("execute nitro-cli error: {:?}", e))?; parse_output(output) } pub fn describe_enclave() -> Result<Vec<EnclaveDescribeInfo>, String> { let output = Command::new("nitro-cli") .arg("describe-enclaves") .output() .map_err(|e| format!("execute nitro-cli error: {:?}", e))?; parse_output(output) } pub fn run_vsock_proxy(opt: &VSockProxyOpt, stop_receiver: Receiver<()>) -> Result<(), String> { tracing::debug!("run vsock proxy with config: {:?}", opt); if check_vsock_proxy() { tracing::warn!("vsock proxy is already running, ignore this start"); return Ok(()); } let mut child = Command::new("vsock-proxy") .args(&["--num_workers", &format!("{}", opt.num_workers)]) .args(&["--config", &opt.config_file]) .arg(opt.local_port.to_string()) .arg(&opt.remote_addr) .arg(opt.remote_port.to_string()) .spawn() .map_err(|e| format!("spawn vsock proxy error: {:?}", e))?; let _ = stop_receiver.recv(); if child.kill().is_ok() { tracing::info!("vsock proxy stopped"); } Ok(()) }
Err(format!( "{}, status code: {:?}", String::from_utf8_lossy(output.stderr.as_slice()), output.status.code(), ))
call_expression
[]
Rust
src/main.rs
mverleg/next_semver
c9c9833f41e4fb768354479a05f32f6440ff2325
use ::std::fmt; use ::rocket::get; use ::rocket::launch; use ::rocket::request::FromParam; use ::rocket::response::status; use ::rocket::routes; use ::rocket::Build; use ::rocket::Rocket; use ::semver::Version; use ::next_semver::bump; use ::next_semver::Part; #[cfg(feature = "jemalloc")] #[global_allocator] static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc; #[derive(Debug, Clone, Copy)] pub enum BumpPart { Major, Minor, Patch, } impl From<BumpPart> for Part { fn from(part: BumpPart) -> Self { match part { BumpPart::Major => Part::Major, BumpPart::Minor => Part::Minor, BumpPart::Patch => Part::Patch, } } } impl<'a> FromParam<'a> for BumpPart { type Error = (); fn from_param(param: &'a str) -> Result<Self, Self::Error> { Ok(match param { "ma" | "major" | "breaking" => BumpPart::Major, "mi" | "minor" | "feature" => BumpPart::Minor, "pa" | "patch" | "fix" => BumpPart::Patch, _ => return Err(()), }) } } #[derive(Debug, Clone)] pub struct BumpVersion { version: Version, } impl From<BumpVersion> for Version { fn from(version: BumpVersion) -> Self { version.version } } impl fmt::Display for BumpVersion { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.version) } } impl<'a> FromParam<'a> for BumpVersion { type Error = (); fn from_param(param: &'a str) -> Result<Self, Self::Error> { Ok(BumpVersion { version: Version::parse(param).map_err(|_| ())?, }) } } #[derive(Debug, Clone)] pub struct PrefixBumpVersion { version: Version, } impl From<PrefixBumpVersion> for Version { fn from(version: PrefixBumpVersion) -> Self { version.version } } impl<'a> FromParam<'a> for PrefixBumpVersion { type Error = (); fn from_param(param: &'a str) -> Result<Self, Self::Error> { if !param.starts_with('v') { return Err(()); }; Ok(PrefixBumpVersion { version: Version::parse(&param[1..]).map_err(|_| ())?, }) } } #[get("/<part>/<version>", rank = 1)] fn next(part: BumpPart, version: BumpVersion) -> String { bump(&version.into(), part.into()).to_string() } #[get("/<part>/<version>", rank = 2)] fn next_prefix(part: BumpPart, version: PrefixBumpVersion) -> String { bump(&version.into(), part.into()).to_string() } #[get("/<part>/<_>", rank = 3)] fn part_err(part: &str) -> status::BadRequest<String> { status::BadRequest(Some(format!( "cannot parse part (first part of path): '{}' \ should be one of 'major', 'minor' or 'patch'", part ))) } #[get("/<_>/<version>", rank = 4)] fn version_err(version: &str) -> status::BadRequest<String> { status::BadRequest(Some(format!( "cannot parse version (second part of path): '{}' \ should be a semver, e.g. '1.2.4'", version ))) } #[get("/<_>/<_>/<_>")] fn three_parts() -> status::BadRequest<String> { status::BadRequest(Some( "path too long, expected two parts, e.g. /major/1.2.4 or /patch/0.2.0".to_owned(), )) } #[get("/<_>/<_>/<_>/<_>")] fn four_parts() -> status::BadRequest<String> { status::BadRequest(Some( "path too long, expected two parts, e.g. /major/1.2.4 or /patch/0.2.0".to_owned(), )) } #[get("/<param>")] fn missing_part(param: &str) -> status::BadRequest<String> { status::BadRequest(Some(format!( "found only one path part ('{}'), expected two \ parts, e.g. /major/1.2.4 or /patch/0.2.0", param ))) } #[get("/")] fn fallback() -> status::BadRequest<String> { status::BadRequest(Some( ("Welcome to next_semver! This service gives you \ bumped version numbers. Are you on version 1.2.5 and have a new feature? Request \ /minor/1.2.5 and you get your next version: 1.3.0. It is extremely simple. First path \ part is major, minor or patch, second part is the current semantic version.") .to_owned(), )) } #[launch] fn rocket() -> Rocket<Build> { rocket::build().mount( "/", routes![ next, next_prefix, part_err, version_err, three_parts, four_parts, missing_part, fallback, ], ) }
use ::std::fmt; use ::rocket::get; use ::rocket::launch; use ::rocket::request::FromParam; use ::rocket::response::status; use ::rocket::routes; use ::rocket::Build; use ::rocket::Rocket; use ::semver::Version; use ::next_semver::bump; use ::next_semver::Part; #[cfg(feature = "jemalloc")] #[global_allocator] static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc; #[derive(Debug, Clone, Copy)] pub enum BumpPart { Major, Minor, Patch, } impl From<BumpPart> for Part { fn from(part: BumpPart) -> Self { match part { BumpPart::Major => Part::Major, BumpPart::Minor => Part::Minor, BumpPart::Patch => Part::Patch, } } } impl<'a> FromParam<'a> for BumpPart { type Error = (); fn from_param(param: &'a str) -> Result<Self, Self::Error> { Ok(
) } } #[derive(Debug, Clone)] pub struct BumpVersion { version: Version, } impl From<BumpVersion> for Version { fn from(version: BumpVersion) -> Self { version.version } } impl fmt::Display for BumpVersion { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.version) } } impl<'a> FromParam<'a> for BumpVersion { type Error = (); fn from_param(param: &'a str) -> Result<Self, Self::Error> { Ok(BumpVersion { version: Version::parse(param).map_err(|_| ())?, }) } } #[derive(Debug, Clone)] pub struct PrefixBumpVersion { version: Version, } impl From<PrefixBumpVersion> for Version { fn from(version: PrefixBumpVersion) -> Self { version.version } } impl<'a> FromParam<'a> for PrefixBumpVersion { type Error = (); fn from_param(param: &'a str) -> Result<Self, Self::Error> { if !param.starts_with('v') { return Err(()); }; Ok(PrefixBumpVersion { version: Version::parse(&param[1..]).map_err(|_| ())?, }) } } #[get("/<part>/<version>", rank = 1)] fn next(part: BumpPart, version: BumpVersion) -> String { bump(&version.into(), part.into()).to_string() } #[get("/<part>/<version>", rank = 2)] fn next_prefix(part: BumpPart, version: PrefixBumpVersion) -> String { bump(&version.into(), part.into()).to_string() } #[get("/<part>/<_>", rank = 3)] fn part_err(part: &str) -> status::BadRequest<String> { status::BadRequest(Some(format!( "cannot parse part (first part of path): '{}' \ should be one of 'major', 'minor' or 'patch'", part ))) } #[get("/<_>/<version>", rank = 4)] fn version_err(version: &str) -> status::BadRequest<String> { status::BadRequest(Some(format!( "cannot parse version (second part of path): '{}' \ should be a semver, e.g. '1.2.4'", version ))) } #[get("/<_>/<_>/<_>")] fn three_parts() -> status::BadRequest<String> { status::BadRequest(Some( "path too long, expected two parts, e.g. /major/1.2.4 or /patch/0.2.0".to_owned(), )) } #[get("/<_>/<_>/<_>/<_>")] fn four_parts() -> status::BadRequest<String> { status::BadRequest(Some( "path too long, expected two parts, e.g. /major/1.2.4 or /patch/0.2.0".to_owned(), )) } #[get("/<param>")] fn missing_part(param: &str) -> status::BadRequest<String> { status::BadRequest(Some(format!( "found only one path part ('{}'), expected two \ parts, e.g. /major/1.2.4 or /patch/0.2.0", param ))) } #[get("/")] fn fallback() -> status::BadRequest<String> { status::BadRequest(Some( ("Welcome to next_semver! This service gives you \ bumped version numbers. Are you on version 1.2.5 and have a new feature? Request \ /minor/1.2.5 and you get your next version: 1.3.0. It is extremely simple. First path \ part is major, minor or patch, second part is the current semantic version.") .to_owned(), )) } #[launch] fn rocket() -> Rocket<Build> { rocket::build().mount( "/", routes![ next, next_prefix, part_err, version_err, three_parts, four_parts, missing_part, fallback, ], ) }
match param { "ma" | "major" | "breaking" => BumpPart::Major, "mi" | "minor" | "feature" => BumpPart::Minor, "pa" | "patch" | "fix" => BumpPart::Patch, _ => return Err(()), }
if_condition
[ { "content": "pub fn bump(version: impl Borrow<Version>, part: Part) -> Version {\n\n let version = version.borrow();\n\n match part {\n\n Part::Major => Version {\n\n major: version.major + 1,\n\n minor: 0,\n\n patch: 0,\n\n pre: version.pre.clone(),\n\n...
Rust
plotters/src/coord/ranged2d/cartesian.rs
facorread/plotters
f86adaec5236551d9be3adf5c631549a1bc1c977
/*! The 2-dimensional cartesian coordinate system. This module provides the 2D cartesian coordinate system, which is composed by two independent ranged 1D coordinate sepcification. This types of coordinate system is used by the chart constructed with [ChartBuilder::build_cartesian_2d](../../chart/ChartBuilder.html#method.build_cartesian_2d). */ use crate::coord::ranged1d::{KeyPointHint, Ranged, ReversibleRanged}; use crate::coord::{CoordTranslate, ReverseCoordTranslate}; use crate::style::ShapeStyle; use plotters_backend::{BackendCoord, DrawingBackend, DrawingErrorKind}; use std::ops::Range; #[derive(Clone)] pub struct Cartesian2d<X: Ranged, Y: Ranged> { logic_x: X, logic_y: Y, back_x: (i32, i32), back_y: (i32, i32), } impl<X: Ranged, Y: Ranged> Cartesian2d<X, Y> { pub fn new<IntoX: Into<X>, IntoY: Into<Y>>( logic_x: IntoX, logic_y: IntoY, actual: (Range<i32>, Range<i32>), ) -> Self { Self { logic_x: logic_x.into(), logic_y: logic_y.into(), back_x: (actual.0.start, actual.0.end), back_y: (actual.1.start, actual.1.end), } } pub fn draw_mesh< E, DrawMesh: FnMut(MeshLine<X, Y>) -> Result<(), E>, XH: KeyPointHint, YH: KeyPointHint, >( &self, h_limit: YH, v_limit: XH, mut draw_mesh: DrawMesh, ) -> Result<(), E> { let (xkp, ykp) = ( self.logic_x.key_points(v_limit), self.logic_y.key_points(h_limit), ); for logic_x in xkp { let x = self.logic_x.map(&logic_x, self.back_x); draw_mesh(MeshLine::XMesh( (x, self.back_y.0), (x, self.back_y.1), &logic_x, ))?; } for logic_y in ykp { let y = self.logic_y.map(&logic_y, self.back_y); draw_mesh(MeshLine::YMesh( (self.back_x.0, y), (self.back_x.1, y), &logic_y, ))?; } Ok(()) } pub fn get_x_range(&self) -> Range<X::ValueType> { self.logic_x.range() } pub fn get_y_range(&self) -> Range<Y::ValueType> { self.logic_y.range() } pub fn get_x_axis_pixel_range(&self) -> Range<i32> { self.logic_x.axis_pixel_range(self.back_x) } pub fn get_y_axis_pixel_range(&self) -> Range<i32> { self.logic_y.axis_pixel_range(self.back_y) } pub fn x_spec(&self) -> &X { &self.logic_x } pub fn y_spec(&self) -> &Y { &self.logic_y } } impl<X: Ranged, Y: Ranged> CoordTranslate for Cartesian2d<X, Y> { type From = (X::ValueType, Y::ValueType); fn translate(&self, from: &Self::From) -> BackendCoord { ( self.logic_x.map(&from.0, self.back_x), self.logic_y.map(&from.1, self.back_y), ) } } impl<X: ReversibleRanged, Y: ReversibleRanged> ReverseCoordTranslate for Cartesian2d<X, Y> { fn reverse_translate(&self, input: BackendCoord) -> Option<Self::From> { Some(( self.logic_x.unmap(input.0, self.back_x)?, self.logic_y.unmap(input.1, self.back_y)?, )) } } pub enum MeshLine<'a, X: Ranged, Y: Ranged> { XMesh(BackendCoord, BackendCoord, &'a X::ValueType), YMesh(BackendCoord, BackendCoord, &'a Y::ValueType), } impl<'a, X: Ranged, Y: Ranged> MeshLine<'a, X, Y> { pub fn draw<DB: DrawingBackend>( &self, backend: &mut DB, style: &ShapeStyle, ) -> Result<(), DrawingErrorKind<DB::ErrorType>> { let (&left, &right) = match self { MeshLine::XMesh(a, b, _) => (a, b), MeshLine::YMesh(a, b, _) => (a, b), }; backend.draw_line(left, right, style) } }
/*! The 2-dimensional cartesian coordinate system. This module provides the 2D cartesian coordinate system, which is composed by two independent ranged 1D coordinate sepcification. This types of coordinate system is used by the chart constructed with [ChartBuilder::build_cartesian_2d](../../chart/ChartBuilder.html#method.build_cartesian_2d). */ use crate::coord::ranged1d::{KeyPointHint, Ranged, ReversibleRanged}; use crate::coord::{CoordTranslate, ReverseCoordTranslate}; use crate::style::ShapeStyle; use plotters_backend::{BackendCoord, DrawingBackend, DrawingErrorKind}; use std::ops::Range; #[derive(Clone)] pub struct Cartesian2d<X: Ranged, Y: Ranged> { logic_x: X, logic_y: Y, back_x: (i32, i32), back_y: (i32, i32), } impl<X: Ranged, Y: Ranged> Cartesian2d<X, Y> { pub fn new<IntoX: Into<X>, IntoY: Into<Y>>( logic_x: IntoX, logic_y: IntoY, actual: (Range<i32>, Range<i32>), ) -> Self { Self { logic_x: logic_x.into(), logic_y: logic_y.into(), back_x: (actual.0.start, actual.0.end), back_y: (actual.1.start, actual.1.end), } } pub fn draw_mesh< E, DrawMesh: FnMut(MeshLine<X, Y>) -> Result<(), E>, XH: KeyPointHint, YH: KeyPointHint, >( &self, h_limit: YH, v_limit: XH, mut draw_mesh: DrawMesh, ) -> Result<(), E> { let (xkp, ykp) = ( self.logic_x.key_points(v_limit), self.logic_y.key_points(h_limit), ); for logic_x in xkp { let x = self.logic_x.map(&logic_x, self.back_x); draw_mesh(MeshLine::XMesh( (x, self.back_y.0), (x, self.back_y.1), &logic_x, ))?; } for logic_y in ykp { let y = self.logic_y.map(&logic_y, self.back_y);
?; } Ok(()) } pub fn get_x_range(&self) -> Range<X::ValueType> { self.logic_x.range() } pub fn get_y_range(&self) -> Range<Y::ValueType> { self.logic_y.range() } pub fn get_x_axis_pixel_range(&self) -> Range<i32> { self.logic_x.axis_pixel_range(self.back_x) } pub fn get_y_axis_pixel_range(&self) -> Range<i32> { self.logic_y.axis_pixel_range(self.back_y) } pub fn x_spec(&self) -> &X { &self.logic_x } pub fn y_spec(&self) -> &Y { &self.logic_y } } impl<X: Ranged, Y: Ranged> CoordTranslate for Cartesian2d<X, Y> { type From = (X::ValueType, Y::ValueType); fn translate(&self, from: &Self::From) -> BackendCoord { ( self.logic_x.map(&from.0, self.back_x), self.logic_y.map(&from.1, self.back_y), ) } } impl<X: ReversibleRanged, Y: ReversibleRanged> ReverseCoordTranslate for Cartesian2d<X, Y> { fn reverse_translate(&self, input: BackendCoord) -> Option<Self::From> { Some(( self.logic_x.unmap(input.0, self.back_x)?, self.logic_y.unmap(input.1, self.back_y)?, )) } } pub enum MeshLine<'a, X: Ranged, Y: Ranged> { XMesh(BackendCoord, BackendCoord, &'a X::ValueType), YMesh(BackendCoord, BackendCoord, &'a Y::ValueType), } impl<'a, X: Ranged, Y: Ranged> MeshLine<'a, X, Y> { pub fn draw<DB: DrawingBackend>( &self, backend: &mut DB, style: &ShapeStyle, ) -> Result<(), DrawingErrorKind<DB::ErrorType>> { let (&left, &right) = match self { MeshLine::XMesh(a, b, _) => (a, b), MeshLine::YMesh(a, b, _) => (a, b), }; backend.draw_line(left, right, style) } }
draw_mesh(MeshLine::YMesh( (self.back_x.0, y), (self.back_x.1, y), &logic_y, ))
call_expression
[ { "content": "/// Draw power function f(x) = x^power.\n\npub fn draw(canvas_id: &str, power: i32) -> DrawResult<impl Fn((i32, i32)) -> Option<(f32, f32)>> {\n\n let backend = CanvasBackend::new(canvas_id).expect(\"cannot find canvas\");\n\n let root = backend.into_drawing_area();\n\n let font: FontDesc...
Rust
src/fs/mmap.rs
Ryanmtate/future-aio
be6fd0ab83358c808248ffb7f2b5d127a3aaa4cc
use std::fs::OpenOptions; use std::io::Error as IoError; use std::path::Path; use std::sync::Arc; use std::sync::RwLock; use std::sync::RwLockReadGuard; use std::sync::RwLockWriteGuard; use memmap::Mmap; use memmap::MmapMut; use crate::task::spawn_blocking; use crate::fs::File; pub struct MemoryMappedMutFile(Arc<RwLock<MmapMut>>); impl MemoryMappedMutFile { pub async fn create(m_path: &Path, len: u64) -> Result<(Self,File), IoError> { let owned_path = m_path.to_owned(); let (m_map, mfile,_) = spawn_blocking ( move || { let inner_path = owned_path.clone(); let mfile = OpenOptions::new() .read(true) .write(true) .create(true) .open(inner_path) .unwrap(); mfile.set_len(len)?; unsafe { MmapMut::map_mut(&mfile) }.map(|mm_file| (mm_file, mfile,owned_path)) }).await?; Ok(( MemoryMappedMutFile::from_mmap(m_map), mfile.into() )) } fn from_mmap(mmap: MmapMut) -> MemoryMappedMutFile { MemoryMappedMutFile(Arc::new(RwLock::new(mmap))) } pub fn inner(&self) -> RwLockReadGuard<MmapMut> { self.0.read().unwrap() } pub fn inner_map(&self) -> Arc<RwLock<MmapMut>> { self.0.clone() } pub fn mut_inner(&self) -> RwLockWriteGuard<MmapMut> { self.0.write().unwrap() } pub fn write_bytes(&mut self, pos: usize, bytes: &Vec<u8>) { let mut m_file = self.mut_inner(); let m_array = &mut m_file[..]; for i in 0..bytes.len() { m_array[i + pos] = bytes[i]; } } pub async fn flush_ft(&self) -> Result<(),IoError> { let inner = self.0.clone(); spawn_blocking(move || { let inner_map = inner.write().unwrap(); let res = inner_map.flush(); drop(inner_map); res }).await } pub async fn flush_async_ft(&self) -> Result<(), IoError> { let inner = self.0.clone(); spawn_blocking(move || { let inner_map = inner.write().unwrap(); inner_map.flush_async() }).await } pub async fn flush_range_ft( &self, offset: usize, len: usize, ) -> Result<(), IoError> { let inner = self.0.clone(); spawn_blocking(move || { let inner_map = inner.write().unwrap(); inner_map.flush_range(offset, len) }).await } } pub struct MemoryMappedFile(Arc<RwLock<Mmap>>); impl MemoryMappedFile { pub async fn open<P>(path: P,min_len: u64) -> Result<(Self, File), IoError> where P: AsRef<Path> { let m_path = path.as_ref().to_owned(); let (m_map, mfile,_) = spawn_blocking (move || { let mfile = OpenOptions::new().read(true).open(&m_path).unwrap(); let meta = mfile.metadata().unwrap(); if meta.len() == 0 { mfile.set_len(min_len)?; } unsafe { Mmap::map(&mfile) }.map(|mm_file| (mm_file, mfile,m_path)) }).await?; Ok(( MemoryMappedFile::from_mmap(m_map), mfile.into() )) } fn from_mmap(mmap: Mmap) -> MemoryMappedFile { MemoryMappedFile(Arc::new(RwLock::new(mmap))) } pub fn inner(&self) -> RwLockReadGuard<Mmap> { self.0.read().unwrap() } } #[cfg(test)] mod tests { use std::env::temp_dir; use std::fs::File; use std::io::Error as IoError; use std::io::Read; use flv_util::fixture::ensure_clean_file; use crate::test_async; use super::MemoryMappedMutFile; #[test_async] async fn test_mmap_write_slice() -> Result<(),IoError> { let index_path = temp_dir().join("test.index"); ensure_clean_file(&index_path.clone()); let result = MemoryMappedMutFile::create(&index_path,3).await; assert!(result.is_ok()); let (mm_file, _) = result.unwrap(); { let mut mm = mm_file.mut_inner(); let src = [0x01, 0x02, 0x03]; mm.copy_from_slice(&src); } mm_file.flush_ft().await?; let mut f = File::open(&index_path)?; let mut buffer = vec![0; 3]; f.read(&mut buffer)?; assert_eq!(buffer[0], 0x01); assert_eq!(buffer[1], 0x02); assert_eq!(buffer[2], 0x03); Ok(()) } #[test_async] async fn test_mmap_write_pair_slice() -> Result<(),IoError> { let index_path = temp_dir().join("pairslice.index"); ensure_clean_file(&index_path.clone()); let result = MemoryMappedMutFile::create(&index_path, 24).await; assert!(result.is_ok()); let (mm_file, _) = result.unwrap(); { let mut mm = mm_file.mut_inner(); let src: [(u32, u32); 3] = [(5, 10), (11, 22), (50, 100)]; let (_, bytes, _) = unsafe { src.align_to::<u8>() }; assert_eq!(bytes.len(), 24); mm.copy_from_slice(&bytes); } mm_file.flush_ft().await?; let (mm_file2, _) = MemoryMappedMutFile::create(&index_path, 24).await?; let mm2 = mm_file2.mut_inner(); let (_, pairs, _) = unsafe { mm2.align_to::<(u32, u32)>() }; assert_eq!(pairs.len(), 3); assert_eq!(pairs[0].0, 5); assert_eq!(pairs[2].1, 100); Ok(()) } #[test_async] async fn test_mmap_write_with_pos() -> Result<(),IoError> { let index_path = temp_dir().join("testpos.index"); ensure_clean_file(&index_path.clone()); let (mut mm_file, _) = MemoryMappedMutFile::create(&index_path, 10).await?; let src = vec![0x05, 0x10, 0x44]; mm_file.write_bytes(5, &src); mm_file.flush_ft().await?; let mut f = File::open(&index_path)?; let mut buffer = vec![0; 10]; f.read(&mut buffer)?; assert_eq!(buffer[5], 0x05); assert_eq!(buffer[6], 0x10); assert_eq!(buffer[7], 0x44); Ok(()) } /* use std::fs::OpenOptions; use std::path::PathBuf; use memmap::MmapMut; #[test] fn debug_kafka_inspect() -> io::Result<()> { let path = "/tmp/kafka-logs/test-0/00000000000000000000.index"; let file = OpenOptions::new() .read(true) .write(true) .open(path)?; let mut mmap = unsafe { MmapMut::map_mut(&file)? }; println!("file size: {}",mmap.len()); Ok(()) } #[test] fn debug_file_inspect() -> io::Result<()> { let path = "/tmp/kafka-logs/test-0/00000000000000000000.index"; let file = File::open(path)?; let metadata = file.metadata()?; println!("file len: {:#?}",metadata.len()); Ok(()) } */ }
use std::fs::OpenOptions; use std::io::Error as IoError; use std::path::Path; use std::sync::Arc; use std::sync::RwLock; use std::sync::RwLockReadGuard; use std::sync::RwLockWriteGuard; use memmap::Mmap; use memmap::MmapMut; use crate::task::spawn_blocking; use crate::fs::File; pub struct MemoryMappedMutFile(Arc<RwLock<MmapMut>>); impl MemoryMappedMutFile { pub async fn create(m_path: &Path, len: u64) -> Result<(Self,File), IoError> { let owned_path = m_path.to_owned(); let (m_map, mfile,_) = spawn_blocking ( move || { let inner_path = owned_path.clone(); let mfile = OpenOptions::new() .read(true) .write(true) .create(true) .open(inner_path) .unwrap(); mfile.set_len(len)?; unsafe { MmapMut::map_mut(&mfile) }.map(|mm_file| (mm_file, mfile,owned_path)) }).await?; Ok(( MemoryMappedMutFile::from_mmap(m_map), mfile.into() )) } fn from_mmap(mmap: MmapMut) -> MemoryMappedMutFile { MemoryMappedMutFile(Arc::new(RwLock::new(mmap))) } pub fn inner(&self) -> RwLockReadGuard<MmapMut> { self.0.read().unwrap() } pub fn inner_map(&self) -> Arc<RwLock<MmapMut>> { self.0.clone() } pub fn mut_inner(&self) -> RwLockWriteGuard<MmapMut> { self.0.write().unwrap() } pub fn write_bytes(&mut self, pos: usize, bytes: &Vec<u8>) { let mut m_file = self.mut_inner(); let m_array = &mut m_file[..]; for i in 0..bytes.len() { m_array[i + pos] = bytes[i]; } } pub async fn flush_ft(&self) -> Result<(),IoError> { let inner = self.0.clone(); spawn_blocking(move || { let inner_map = inner.write().unwrap(); let res = inner_map.flush(); drop(inner_map); res }).await } pub async fn flush_async_ft(&self) -> Result<(), IoError> { let inner = self.0.clone(); spawn_blocking(move || { let inner_map = inner.write().unwrap(); inner_map.flush_async() }).await } pub async fn flush_range_ft( &self, offset: usize, len: usize, ) -> Result<(), IoError> { let inner = self.0.clone(); spawn_blocking(move || { let inner_map = inner.write().unwrap(); inner_map.flush_range(offset, len) }).await } } pub struct MemoryMappedFile(Arc<RwLock<Mmap>>); impl MemoryMappedFile {
fn from_mmap(mmap: Mmap) -> MemoryMappedFile { MemoryMappedFile(Arc::new(RwLock::new(mmap))) } pub fn inner(&self) -> RwLockReadGuard<Mmap> { self.0.read().unwrap() } } #[cfg(test)] mod tests { use std::env::temp_dir; use std::fs::File; use std::io::Error as IoError; use std::io::Read; use flv_util::fixture::ensure_clean_file; use crate::test_async; use super::MemoryMappedMutFile; #[test_async] async fn test_mmap_write_slice() -> Result<(),IoError> { let index_path = temp_dir().join("test.index"); ensure_clean_file(&index_path.clone()); let result = MemoryMappedMutFile::create(&index_path,3).await; assert!(result.is_ok()); let (mm_file, _) = result.unwrap(); { let mut mm = mm_file.mut_inner(); let src = [0x01, 0x02, 0x03]; mm.copy_from_slice(&src); } mm_file.flush_ft().await?; let mut f = File::open(&index_path)?; let mut buffer = vec![0; 3]; f.read(&mut buffer)?; assert_eq!(buffer[0], 0x01); assert_eq!(buffer[1], 0x02); assert_eq!(buffer[2], 0x03); Ok(()) } #[test_async] async fn test_mmap_write_pair_slice() -> Result<(),IoError> { let index_path = temp_dir().join("pairslice.index"); ensure_clean_file(&index_path.clone()); let result = MemoryMappedMutFile::create(&index_path, 24).await; assert!(result.is_ok()); let (mm_file, _) = result.unwrap(); { let mut mm = mm_file.mut_inner(); let src: [(u32, u32); 3] = [(5, 10), (11, 22), (50, 100)]; let (_, bytes, _) = unsafe { src.align_to::<u8>() }; assert_eq!(bytes.len(), 24); mm.copy_from_slice(&bytes); } mm_file.flush_ft().await?; let (mm_file2, _) = MemoryMappedMutFile::create(&index_path, 24).await?; let mm2 = mm_file2.mut_inner(); let (_, pairs, _) = unsafe { mm2.align_to::<(u32, u32)>() }; assert_eq!(pairs.len(), 3); assert_eq!(pairs[0].0, 5); assert_eq!(pairs[2].1, 100); Ok(()) } #[test_async] async fn test_mmap_write_with_pos() -> Result<(),IoError> { let index_path = temp_dir().join("testpos.index"); ensure_clean_file(&index_path.clone()); let (mut mm_file, _) = MemoryMappedMutFile::create(&index_path, 10).await?; let src = vec![0x05, 0x10, 0x44]; mm_file.write_bytes(5, &src); mm_file.flush_ft().await?; let mut f = File::open(&index_path)?; let mut buffer = vec![0; 10]; f.read(&mut buffer)?; assert_eq!(buffer[5], 0x05); assert_eq!(buffer[6], 0x10); assert_eq!(buffer[7], 0x44); Ok(()) } /* use std::fs::OpenOptions; use std::path::PathBuf; use memmap::MmapMut; #[test] fn debug_kafka_inspect() -> io::Result<()> { let path = "/tmp/kafka-logs/test-0/00000000000000000000.index"; let file = OpenOptions::new() .read(true) .write(true) .open(path)?; let mut mmap = unsafe { MmapMut::map_mut(&file)? }; println!("file size: {}",mmap.len()); Ok(()) } #[test] fn debug_file_inspect() -> io::Result<()> { let path = "/tmp/kafka-logs/test-0/00000000000000000000.index"; let file = File::open(path)?; let metadata = file.metadata()?; println!("file len: {:#?}",metadata.len()); Ok(()) } */ }
pub async fn open<P>(path: P,min_len: u64) -> Result<(Self, File), IoError> where P: AsRef<Path> { let m_path = path.as_ref().to_owned(); let (m_map, mfile,_) = spawn_blocking (move || { let mfile = OpenOptions::new().read(true).open(&m_path).unwrap(); let meta = mfile.metadata().unwrap(); if meta.len() == 0 { mfile.set_len(min_len)?; } unsafe { Mmap::map(&mfile) }.map(|mm_file| (mm_file, mfile,m_path)) }).await?; Ok(( MemoryMappedFile::from_mmap(m_map), mfile.into() )) }
function_block-full_function
[ { "content": "#[proc_macro_attribute]\n\npub fn test_async(_attr: TokenStream, item: TokenStream) -> TokenStream {\n\n\n\n let input = syn::parse_macro_input!(item as ItemFn);\n\n let name = &input.sig.ident;\n\n let sync_name = format!(\"{}_sync\",name);\n\n let out_fn_iden = Ident::new(&sync_name,...
Rust
src/board.rs
jstnlef/rustris
0133b9e43c22cf26f640da8e74cef9c365c5b7ad
use std::collections::VecDeque; use piston_window::{Context, G2d, Line, Transformed, types, Rectangle}; use piston_window::rectangle; use piston_window::grid::Grid; use colors::GREY; use tetromino::{Piece, Block}; use settings::*; type GridRow = [CellState; WIDTH_IN_BLOCKS as usize]; pub struct Board { grid: VecDeque<GridRow> } impl Board { pub fn new() -> Board { Board { grid: Self::create_empty_grid() } } pub fn set_piece(&mut self, piece: &Piece) { for block in piece.blocks_iter() { self.set_cell_state(block, CellState::Block(piece.get_color())); } } pub fn is_space_occupied(&self, block: Block) -> bool { match self.get_cell_state(block.x, block.y) { CellState::Block(_) => true, CellState::Empty => false } } pub fn remove_completed_rows(&mut self) -> u32 { let completed_row_indexes = self.find_completed_row_indexes(); for i in &completed_row_indexes { self.grid.remove(*i); } for _ in &completed_row_indexes { self.grid.push_front(Self::create_empty_row()); } debug_assert!(self.grid.len() == HEIGHT_IN_BLOCKS as usize); completed_row_indexes.len() as u32 } fn find_completed_row_indexes(&self) -> Vec<usize> { let mut completed_row_indexes = Vec::new(); let rows = self.grid.iter().rev().take_while(|&row| !Self::row_is_empty(row)); for (i, row) in rows.enumerate() { if Self::row_is_complete(row) { let grid_index = (HEIGHT_IN_BLOCKS - 1) as usize - i; completed_row_indexes.push(grid_index); } } completed_row_indexes } fn get_cell_state(&self, x: i32, y: i32) -> CellState { self.grid[y as usize][x as usize] } fn set_cell_state(&mut self, block: Block, cell_state: CellState) { self.grid[block.y as usize][block.x as usize] = cell_state; } fn create_empty_grid() -> VecDeque<GridRow> { let mut grid = VecDeque::with_capacity(HEIGHT_IN_BLOCKS as usize); for _ in 0..HEIGHT_IN_BLOCKS { grid.push_back(Self::create_empty_row()); } grid } fn create_empty_row() -> GridRow { [CellState::Empty; WIDTH_IN_BLOCKS as usize] } fn row_is_empty(row: &GridRow) -> bool { row.iter().all(|&block| block == CellState::Empty) } fn row_is_complete(row: &GridRow) -> bool { row.iter().all(|&block| block != CellState::Empty) } pub fn render(&self, context: Context, graphics: &mut G2d) { let grid = Grid { cols: WIDTH_IN_BLOCKS as u32, rows: HEIGHT_IN_BLOCKS as u32, units: BLOCK_SIZE }; let line = Line::new(GREY, GRID_LINE_WIDTH); let transform = context.transform.trans(GRID_X_OFFSET, GRID_Y_OFFSET); grid.draw(&line, &Default::default(), transform, graphics); for x in 0..WIDTH_IN_BLOCKS { for y in 0..HEIGHT_IN_BLOCKS { self.get_cell_state(x, y).render(x, y, context, graphics); } } } } #[derive(Clone, Copy, Debug, PartialEq)] pub enum CellState { Empty, Block(types::Color) } impl CellState { fn render(&self, x: i32, y: i32, context: Context, graphics: &mut G2d) { match *self { CellState::Block(color) => { let rect = Rectangle { color: color, shape: rectangle::Shape::Square, border: None }; Block::new(x, y).render_in_grid(rect, context, graphics); }, CellState::Empty => {} } } } #[cfg(test)] mod tests { use super::*; use colors::{RED, CYAN}; use tetromino::{Block, Piece, I}; use settings::*; #[test] fn test_set_piece() { let mut board = Board::new(); let piece = Piece::create(&I); board.set_piece(&piece); assert_eq!(board.get_cell_state(2, 1), CellState::Empty); assert_eq!(board.get_cell_state(3, 1), CellState::Block(CYAN)); assert_eq!(board.get_cell_state(4, 1), CellState::Block(CYAN)); assert_eq!(board.get_cell_state(5, 1), CellState::Block(CYAN)); assert_eq!(board.get_cell_state(6, 1), CellState::Block(CYAN)); assert_eq!(board.get_cell_state(7, 1), CellState::Empty); } #[test] fn test_is_space_occupied() { let mut board = Board::new(); let block = Block{x: 2, y: 2}; board.set_cell_state(block, CellState::Block(RED)); assert!(board.is_space_occupied(block)); assert!(!board.is_space_occupied(Block{x: 0, y: 0})); } #[test] fn test_set_cell_state() { let mut board = Board::new(); assert_eq!(board.get_cell_state(0, 0), CellState::Empty); board.set_cell_state(Block{x: 2, y: 2}, CellState::Block(RED)); assert_eq!(board.get_cell_state(2, 2), CellState::Block(RED)); } #[test] fn test_row_is_empty() { let mut empty_row = [CellState::Empty; WIDTH_IN_BLOCKS as usize]; assert!(Board::row_is_empty(&empty_row)); empty_row[1] = CellState::Block(RED); assert!(!Board::row_is_empty(&empty_row)); } #[test] fn test_row_is_complete() { let mut complete_row = [CellState::Block(RED); WIDTH_IN_BLOCKS as usize]; assert!(Board::row_is_complete(&complete_row)); complete_row[1] = CellState::Empty; assert!(!Board::row_is_complete(&complete_row)); } #[test] fn test_create_empty_row() { let row = Board::create_empty_row(); assert!(Board::row_is_empty(&row)); } #[test] fn test_find_completed_row_indexes_simple() { let i = (HEIGHT_IN_BLOCKS - 1) as usize; let mut board = Board::new(); let complete_row = [CellState::Block(RED); WIDTH_IN_BLOCKS as usize]; board.grid[i] = complete_row; let result = board.find_completed_row_indexes(); let expected = vec![i]; assert_eq!(result, expected); } #[test] fn test_find_completed_row_indexes_multiple() { let i = (HEIGHT_IN_BLOCKS - 1) as usize; let mut board = Board::new(); let complete_row = [CellState::Block(RED); WIDTH_IN_BLOCKS as usize]; board.grid[i] = complete_row; board.grid[i - 1] = complete_row; let result = board.find_completed_row_indexes(); let expected = vec![i, i - 1]; assert_eq!(result, expected); } #[test] fn test_find_completed_row_indexes_skip_row() { let i = (HEIGHT_IN_BLOCKS - 1) as usize; let mut board = Board::new(); let complete_row = [CellState::Block(RED); WIDTH_IN_BLOCKS as usize]; let mut incomplete_row = complete_row; incomplete_row[4] = CellState::Empty; board.grid[i] = complete_row; board.grid[i - 1] = incomplete_row; board.grid[i - 2] = complete_row; let result = board.find_completed_row_indexes(); let expected = vec![i, i - 2]; assert_eq!(result, expected); } #[test] fn test_remove_completed_rows_simple() { let i = (HEIGHT_IN_BLOCKS - 1) as usize; let mut board = Board::new(); let complete_row = [CellState::Block(RED); WIDTH_IN_BLOCKS as usize]; board.grid[i] = complete_row; let n = board.remove_completed_rows(); assert_eq!(n, 1); assert_eq!(board.grid, Board::create_empty_grid()); } #[test] fn test_remove_completed_rows_moves_down() { let i = (HEIGHT_IN_BLOCKS - 1) as usize; let mut board = Board::new(); let complete_row = [CellState::Block(RED); WIDTH_IN_BLOCKS as usize]; board.grid[i] = complete_row; board.grid[i-1][0] = CellState::Block(RED); let n = board.remove_completed_rows(); let mut expected_grid = Board::create_empty_grid(); expected_grid[i][0] = CellState::Block(RED); assert_eq!(n, 1); assert_eq!(board.grid, expected_grid); } #[test] fn test_remove_completed_rows_moves_two_down() { let i = (HEIGHT_IN_BLOCKS - 1) as usize; let mut board = Board::new(); let complete_row = [CellState::Block(RED); WIDTH_IN_BLOCKS as usize]; board.grid[i] = complete_row; board.grid[i-1][0] = CellState::Block(RED); board.grid[i-2] = complete_row; board.grid[i-3][1] = CellState::Block(RED); let n = board.remove_completed_rows(); let mut expected_grid = Board::create_empty_grid(); expected_grid[i][0] = CellState::Block(RED); expected_grid[i-1][1] = CellState::Block(RED); assert_eq!(n, 2); assert_eq!(board.grid, expected_grid); } }
use std::collections::VecDeque; use piston_window::{Context, G2d, Line, Transformed, types, Rectangle}; use piston_window::rectangle; use piston_window::grid::Grid; use colors::GREY; use tetromino::{Piece, Block}; use settings::*; type GridRow = [CellState; WIDTH_IN_BLOCKS as usize]; pub struct Board { grid: VecDeque<GridRow> } impl Board { pub fn new() -> Board { Board { grid: Self::create_empty_grid() } } pub fn set_piece(&mut self, piece: &Piece) { for block in piece.blocks_iter() { self.set_cell_state(block, CellState::Block(piece.get_color())); } } pub fn is_space_occupied(&self, block: Block) -> bool { match self.get_cell_state(block.x, block.y) { CellState::Block(_) => true, CellState::Empty => false } } pub fn remove_completed_rows(&mut self) -> u32 { let completed_row_indexes = self.find_completed_row_indexes(); for i in &completed_row_indexes { self.grid.remove(*i); } for _ in &completed_row_indexes { self.grid.push_front(Self::create_empty_row()); } debug_assert!(self.grid.len() == HEIGHT_IN_BLOCKS as usize); completed_row_indexes.len() as u32 } fn find_completed_row_indexes(&self) -> Vec<usize> { let mut completed_row_indexes = Vec::new(); let rows = self.grid.iter().rev().take_while(|&row| !Self::row_is_empty(row)); for (i, row) in rows.enumerate() { if Self::row_is_complete(row) { let grid_index = (HEIGHT_IN_BLOCKS - 1) as usize - i; completed_row_indexes.push(grid_index); } } completed_row_indexes } fn get_cell_state(&self, x: i32, y: i32) -> CellState { self.grid[y as usize][x as usize] } fn set_cell_state(&mut self, block: Block, cell_state: CellState) { self.grid[block.y as usize][block.x as usize] = cell_state; }
fn create_empty_row() -> GridRow { [CellState::Empty; WIDTH_IN_BLOCKS as usize] } fn row_is_empty(row: &GridRow) -> bool { row.iter().all(|&block| block == CellState::Empty) } fn row_is_complete(row: &GridRow) -> bool { row.iter().all(|&block| block != CellState::Empty) } pub fn render(&self, context: Context, graphics: &mut G2d) { let grid = Grid { cols: WIDTH_IN_BLOCKS as u32, rows: HEIGHT_IN_BLOCKS as u32, units: BLOCK_SIZE }; let line = Line::new(GREY, GRID_LINE_WIDTH); let transform = context.transform.trans(GRID_X_OFFSET, GRID_Y_OFFSET); grid.draw(&line, &Default::default(), transform, graphics); for x in 0..WIDTH_IN_BLOCKS { for y in 0..HEIGHT_IN_BLOCKS { self.get_cell_state(x, y).render(x, y, context, graphics); } } } } #[derive(Clone, Copy, Debug, PartialEq)] pub enum CellState { Empty, Block(types::Color) } impl CellState { fn render(&self, x: i32, y: i32, context: Context, graphics: &mut G2d) { match *self { CellState::Block(color) => { let rect = Rectangle { color: color, shape: rectangle::Shape::Square, border: None }; Block::new(x, y).render_in_grid(rect, context, graphics); }, CellState::Empty => {} } } } #[cfg(test)] mod tests { use super::*; use colors::{RED, CYAN}; use tetromino::{Block, Piece, I}; use settings::*; #[test] fn test_set_piece() { let mut board = Board::new(); let piece = Piece::create(&I); board.set_piece(&piece); assert_eq!(board.get_cell_state(2, 1), CellState::Empty); assert_eq!(board.get_cell_state(3, 1), CellState::Block(CYAN)); assert_eq!(board.get_cell_state(4, 1), CellState::Block(CYAN)); assert_eq!(board.get_cell_state(5, 1), CellState::Block(CYAN)); assert_eq!(board.get_cell_state(6, 1), CellState::Block(CYAN)); assert_eq!(board.get_cell_state(7, 1), CellState::Empty); } #[test] fn test_is_space_occupied() { let mut board = Board::new(); let block = Block{x: 2, y: 2}; board.set_cell_state(block, CellState::Block(RED)); assert!(board.is_space_occupied(block)); assert!(!board.is_space_occupied(Block{x: 0, y: 0})); } #[test] fn test_set_cell_state() { let mut board = Board::new(); assert_eq!(board.get_cell_state(0, 0), CellState::Empty); board.set_cell_state(Block{x: 2, y: 2}, CellState::Block(RED)); assert_eq!(board.get_cell_state(2, 2), CellState::Block(RED)); } #[test] fn test_row_is_empty() { let mut empty_row = [CellState::Empty; WIDTH_IN_BLOCKS as usize]; assert!(Board::row_is_empty(&empty_row)); empty_row[1] = CellState::Block(RED); assert!(!Board::row_is_empty(&empty_row)); } #[test] fn test_row_is_complete() { let mut complete_row = [CellState::Block(RED); WIDTH_IN_BLOCKS as usize]; assert!(Board::row_is_complete(&complete_row)); complete_row[1] = CellState::Empty; assert!(!Board::row_is_complete(&complete_row)); } #[test] fn test_create_empty_row() { let row = Board::create_empty_row(); assert!(Board::row_is_empty(&row)); } #[test] fn test_find_completed_row_indexes_simple() { let i = (HEIGHT_IN_BLOCKS - 1) as usize; let mut board = Board::new(); let complete_row = [CellState::Block(RED); WIDTH_IN_BLOCKS as usize]; board.grid[i] = complete_row; let result = board.find_completed_row_indexes(); let expected = vec![i]; assert_eq!(result, expected); } #[test] fn test_find_completed_row_indexes_multiple() { let i = (HEIGHT_IN_BLOCKS - 1) as usize; let mut board = Board::new(); let complete_row = [CellState::Block(RED); WIDTH_IN_BLOCKS as usize]; board.grid[i] = complete_row; board.grid[i - 1] = complete_row; let result = board.find_completed_row_indexes(); let expected = vec![i, i - 1]; assert_eq!(result, expected); } #[test] fn test_find_completed_row_indexes_skip_row() { let i = (HEIGHT_IN_BLOCKS - 1) as usize; let mut board = Board::new(); let complete_row = [CellState::Block(RED); WIDTH_IN_BLOCKS as usize]; let mut incomplete_row = complete_row; incomplete_row[4] = CellState::Empty; board.grid[i] = complete_row; board.grid[i - 1] = incomplete_row; board.grid[i - 2] = complete_row; let result = board.find_completed_row_indexes(); let expected = vec![i, i - 2]; assert_eq!(result, expected); } #[test] fn test_remove_completed_rows_simple() { let i = (HEIGHT_IN_BLOCKS - 1) as usize; let mut board = Board::new(); let complete_row = [CellState::Block(RED); WIDTH_IN_BLOCKS as usize]; board.grid[i] = complete_row; let n = board.remove_completed_rows(); assert_eq!(n, 1); assert_eq!(board.grid, Board::create_empty_grid()); } #[test] fn test_remove_completed_rows_moves_down() { let i = (HEIGHT_IN_BLOCKS - 1) as usize; let mut board = Board::new(); let complete_row = [CellState::Block(RED); WIDTH_IN_BLOCKS as usize]; board.grid[i] = complete_row; board.grid[i-1][0] = CellState::Block(RED); let n = board.remove_completed_rows(); let mut expected_grid = Board::create_empty_grid(); expected_grid[i][0] = CellState::Block(RED); assert_eq!(n, 1); assert_eq!(board.grid, expected_grid); } #[test] fn test_remove_completed_rows_moves_two_down() { let i = (HEIGHT_IN_BLOCKS - 1) as usize; let mut board = Board::new(); let complete_row = [CellState::Block(RED); WIDTH_IN_BLOCKS as usize]; board.grid[i] = complete_row; board.grid[i-1][0] = CellState::Block(RED); board.grid[i-2] = complete_row; board.grid[i-3][1] = CellState::Block(RED); let n = board.remove_completed_rows(); let mut expected_grid = Board::create_empty_grid(); expected_grid[i][0] = CellState::Block(RED); expected_grid[i-1][1] = CellState::Block(RED); assert_eq!(n, 2); assert_eq!(board.grid, expected_grid); } }
fn create_empty_grid() -> VecDeque<GridRow> { let mut grid = VecDeque::with_capacity(HEIGHT_IN_BLOCKS as usize); for _ in 0..HEIGHT_IN_BLOCKS { grid.push_back(Self::create_empty_row()); } grid }
function_block-full_function
[ { "content": "pub fn set_ui(ref mut ui: UICell, game: &mut Rustris) {\n\n Canvas::new().flow_right(&[\n\n (LEFT_COLUMN, Canvas::new().color(color::DARK_CHARCOAL).pad(20.0)),\n\n (MIDDLE_COLUMN, Canvas::new().color(color::TRANSPARENT).length(300.0)),\n\n (RIGHT_COLUMN, Canvas::new().color...
Rust
src/text_input.rs
khyperia/scopie
af97b9e1286583c095f3e5f0c2665bdb326f8fe6
use crate::Result; use glutin::{self, event::VirtualKeyCode as Key}; use khygl::{render_text::TextRenderer, render_texture::TextureRenderer, Rect}; use std::{convert::TryInto, mem::replace}; pub struct TextInput { old_inputs: Vec<String>, old_input_index: usize, input_text: String, message: String, exec: bool, okay: bool, } impl TextInput { pub fn new() -> Self { Self { old_inputs: Vec::new(), old_input_index: 0, input_text: String::new(), message: String::new(), okay: true, exec: false, } } pub fn try_get_exec_cmd(&mut self) -> Option<String> { if self.exec { self.exec = false; let result = replace(&mut self.input_text, String::new()); if self.old_inputs.last() != Some(&result) { self.old_inputs.push(result.clone()); if self.old_inputs.len() > 100 { self.old_inputs.remove(0); } } self.old_input_index = self.old_inputs.len(); Some(result) } else { None } } pub fn set_exec_result(&mut self, message: String, okay: bool) { self.message = message; self.okay = okay; } pub fn key_down(&mut self, key: Key) { match key { Key::Back => { self.old_input_index = self.old_inputs.len(); self.input_text.pop(); } Key::Return => self.exec = true, Key::Up => { if self.old_input_index > 0 { self.old_input_index -= 1; } self.set_input_text(); } Key::Down => { self.old_input_index += 1; if self.old_input_index > self.old_inputs.len() { self.old_input_index = self.old_inputs.len(); } self.set_input_text(); } _ => (), } } fn set_input_text(&mut self) { self.input_text = self .old_inputs .get(self.old_input_index) .cloned() .unwrap_or_default(); } pub fn received_character(&mut self, ch: char) { if ch >= ' ' { self.old_input_index = self.old_inputs.len(); self.input_text.push(ch); } } pub fn render( &mut self, texture_renderer: &TextureRenderer, text_renderer: &mut TextRenderer, screen_size: (usize, usize), ) -> Result<usize> { let input_pos_y = screen_size.1 as isize - text_renderer.spacing as isize - 1; let input_pos_y = input_pos_y.try_into().unwrap_or(0); let input_pos = (10, input_pos_y); text_renderer.render( texture_renderer, &self.input_text, [1.0, 1.0, 1.0, 1.0], input_pos, screen_size, )?; let error_pos_y = screen_size.1 as isize - 2 * text_renderer.spacing as isize - 1; let error_pos_y = error_pos_y.try_into().unwrap_or(0); let error_pos = (10, error_pos_y); text_renderer.render( texture_renderer, &self.message, [1.0, 1.0, 1.0, 1.0], error_pos, screen_size, )?; let command_color = if self.okay { [0.5, 0.5, 0.5, 1.0] } else { [1.0, 0.5, 0.5, 1.0] }; texture_renderer.rect( Rect::new( input_pos.0, input_pos.1, (screen_size.0 as isize - input_pos.0 as isize * 2) .try_into() .unwrap_or(2), text_renderer.spacing, ), command_color, (screen_size.0 as f32, screen_size.1 as f32), )?; Ok(if self.message.is_empty() { input_pos_y } else { error_pos_y }) } }
use crate::Result; use glutin::{self, event::VirtualKeyCode as Key}; use khygl::{render_text::TextRenderer, render_texture::TextureRenderer, Rect}; use std::{convert::TryInto, mem::replace}; pub struct TextInput { old_inputs: Vec<String>, old_input_index: usize, input_text: String, message: String, exec: bool, okay: bool, } impl TextInput { pub fn new() -> Self { Self { old_inputs: Vec::new(), old_input_index: 0, input_text: String::new(), message: String::new(), okay: true, exec: false, } } pub fn try_get_exec_cmd(&mut self) -> Option<String> { if self.exec { self.exec = false; let result = replace(&mut self.input_text, String::new()); if self.old_inputs.last() != Some(&result) { self.old_inputs.push(result.clone()); if self.old_inputs.len() > 100 { self.old_inputs.remove(0); } } self.old_input_index = self.old_inputs.len(); Some(result) } else { None } } pub fn set_exec_result(&mut self, message: String, okay: bool) { self.message = message; self.okay = okay; } pub fn key_down(&mut self, key: Key) { match key { Key::Back => { self.old_input_index = self.old_inputs.len(); self.input_text.pop(); } Key::Return => self.exec = true, Key::Up => { if self.old_input_index > 0 { self.old_input_index -= 1; } self.set_input_text(); } Key::Down => { self.old_input_index += 1; if self.old_input_index > self.old_inputs.len() { self.old_input_index = self.old_inputs.len(); } self.set_input_text(); } _ => (), } } fn set_input_text(&mut self) { self.input_text = self .old_inputs .get(self.old_input_index) .cloned() .unwrap_or_default(); } pub fn received_character(&mut self, ch: char) { if ch >= ' ' { self.old_input_index = self.old_inputs.len(); self.input_text.push(ch); } }
}
pub fn render( &mut self, texture_renderer: &TextureRenderer, text_renderer: &mut TextRenderer, screen_size: (usize, usize), ) -> Result<usize> { let input_pos_y = screen_size.1 as isize - text_renderer.spacing as isize - 1; let input_pos_y = input_pos_y.try_into().unwrap_or(0); let input_pos = (10, input_pos_y); text_renderer.render( texture_renderer, &self.input_text, [1.0, 1.0, 1.0, 1.0], input_pos, screen_size, )?; let error_pos_y = screen_size.1 as isize - 2 * text_renderer.spacing as isize - 1; let error_pos_y = error_pos_y.try_into().unwrap_or(0); let error_pos = (10, error_pos_y); text_renderer.render( texture_renderer, &self.message, [1.0, 1.0, 1.0, 1.0], error_pos, screen_size, )?; let command_color = if self.okay { [0.5, 0.5, 0.5, 1.0] } else { [1.0, 0.5, 0.5, 1.0] }; texture_renderer.rect( Rect::new( input_pos.0, input_pos.1, (screen_size.0 as isize - input_pos.0 as isize * 2) .try_into() .unwrap_or(2), text_renderer.spacing, ), command_color, (screen_size.0 as f32, screen_size.1 as f32), )?; Ok(if self.message.is_empty() { input_pos_y } else { error_pos_y }) }
function_block-full_function
[ { "content": "pub fn autoconnect(live: bool) -> Result<Camera> {\n\n init_qhyccd_resource();\n\n let mut best = None;\n\n for id in 0..Camera::num_cameras() {\n\n let info = CameraInfo::new(id)?;\n\n let is163 = info.name.contains(\"163\");\n\n if best.is_none() || is163 {\n\n ...
Rust
src/app/lua/render.rs
nokevair/nokevair
40494bbe843394f3757f0f525bdcac5acfda4538
use hyper::{Response, Body}; use rlua::Value as LV; use std::fs; use std::path::PathBuf; use crate::conv; use crate::utils::SourceChain; use super::{Ctx, Version, Result, AppState}; pub fn with_entries<F: FnMut(String, PathBuf)>(app_ctx: &Ctx, mut f: F) { let dir = match fs::read_dir(&app_ctx.cfg.paths.render) { Ok(dir) => dir, Err(e) => { app_ctx.log.err(format_args!("failed to read render dir: {}", e)); return } }; for entry in dir { let entry = match entry { Ok(entry) => entry, Err(e) => { app_ctx.log.err(format_args!("failed while reading render dir: {}", e)); continue } }; let path = entry.path(); if !path.is_dir() { continue } let name = match entry.file_name().to_str() { Some(s) => s.to_string(), None => { app_ctx.log.err(format_args!( "failed to load focus at '{}': invalid UTF-8", path.display())); continue } }; f(name, path); } } impl super::Backend { pub(super) fn unload_focuses(&mut self) { self.focuses.clear(); self.lua.context(|ctx| ctx.expire_registry_values()); } pub(super) fn load_focuses(&mut self, app_ctx: &Ctx) { with_entries(app_ctx, |name, mut path| { path.push("focus.lua"); let code = match fs::read_to_string(&path) { Ok(code) => code, Err(e) => { app_ctx.log.err(format_args!( "failed to read file '{}': {}", path.display(), e )); return } }; let focuses = &mut self.focuses; let res = self.lua.context(|ctx| { let focus_fn = ctx.load(&code) .eval::<rlua::Function>()?; let key = ctx.create_registry_value(focus_fn)?; focuses.insert(name, key); Ok::<(), rlua::Error>(()) }); if let Err(e) = res { app_ctx.log.err(format_args!( "lua ('{}' -> focus):\n{}", path.display(), SourceChain(e), )); } }); let len = self.focuses.len(); app_ctx.log.info(format_args!( "loaded {} focus function{}", len, if len == 1 { "" } else { "s" } )); } pub(super) fn render( &mut self, ver: Version, name: &str, query_param: Option<String>, app_state: &AppState, ) -> Result<Response<Body>> { macro_rules! render_call { () => { match &query_param { Some(param) => format!("'{}' with arg '{}'", name, param), None => format!("'{}'", name), } } } self.ensure_loaded(ver, &app_state.ctx); self.lua.context(|ctx| { let focus_fn_key = self.focuses.get(name) .ok_or(()) .or_else(|_| app_state.error_404())?; let focus_fn: rlua::Function = ctx.registry_value(focus_fn_key) .or_else(|_| app_state.error_500("invalid focus fn key"))?; let state_key = self.state_versions.get(ver.as_usize()) .ok_or(()) .or_else(|_| app_state.error_404_no_state(ver))?; let state: LV = ctx.registry_value(state_key) .or_else(|_| app_state.error_500("invalid state key"))?; let ctx: Option<rlua::Table> = focus_fn.call((state, query_param.clone())) .or_else(|e| app_state.error_500(format_args!( "lua (focus {}):\n{}", render_call!(), SourceChain(e), )))?; let ctx = ctx.ok_or(()).or_else(|_| app_state.error_404())?; let ctx = conv::lua_to_json(LV::Table(ctx)) .or_else(|e| app_state.error_500(format_args!( "lua (focus {} -> JSON):\n{}", render_call!(), SourceChain(e) )))?; let ctx = tera::Context::from_serialize(ctx) .or_else(|e| app_state.error_500(format_args!( "tera (focus {} -> Tera ctx):\n{}", render_call!(), SourceChain(e), )))?; let template = format!("render/{}.html", name); app_state.render(&template, &ctx) }) } }
use hyper::{Response, Body}; use rlua::Value as LV; use std::fs; use std::path::PathBuf; use crate::conv; use crate::utils::SourceChain; use super::{Ctx, Version, Result, AppState}; pub fn with_entries<F: FnMut(String, PathBuf)>(app_ctx: &Ctx, mut f: F) { let dir = match fs::read_dir(&app_ctx.cfg.paths.render) { Ok(dir) => dir, Err(e) => { app_ctx.log.err(format_args!("failed to read render dir: {}", e)); return } }; for entry in dir { let entry = match entry { Ok(entry) => entry, Err(e) => { app_ctx.log.err(format_args!("failed while reading render dir: {}", e)); continue } }; let path = entry.path(); if !path.is_dir() { continue } let name = match entry.file_name().to_str() { Some(s) => s.to_string(), None => { app_ctx.log.err(format_args!( "failed to load focus at '{}': invalid UTF-8", path.display())); continue } }; f(name, path); } } impl super::Backend { pub(super) fn unload_focuses(&mut self) { self.focuses.clear()
ocus.lua"); let code = match fs::read_to_string(&path) { Ok(code) => code, Err(e) => { app_ctx.log.err(format_args!( "failed to read file '{}': {}", path.display(), e )); return } }; let focuses = &mut self.focuses; let res = self.lua.context(|ctx| { let focus_fn = ctx.load(&code) .eval::<rlua::Function>()?; let key = ctx.create_registry_value(focus_fn)?; focuses.insert(name, key); Ok::<(), rlua::Error>(()) }); if let Err(e) = res { app_ctx.log.err(format_args!( "lua ('{}' -> focus):\n{}", path.display(), SourceChain(e), )); } }); let len = self.focuses.len(); app_ctx.log.info(format_args!( "loaded {} focus function{}", len, if len == 1 { "" } else { "s" } )); } pub(super) fn render( &mut self, ver: Version, name: &str, query_param: Option<String>, app_state: &AppState, ) -> Result<Response<Body>> { macro_rules! render_call { () => { match &query_param { Some(param) => format!("'{}' with arg '{}'", name, param), None => format!("'{}'", name), } } } self.ensure_loaded(ver, &app_state.ctx); self.lua.context(|ctx| { let focus_fn_key = self.focuses.get(name) .ok_or(()) .or_else(|_| app_state.error_404())?; let focus_fn: rlua::Function = ctx.registry_value(focus_fn_key) .or_else(|_| app_state.error_500("invalid focus fn key"))?; let state_key = self.state_versions.get(ver.as_usize()) .ok_or(()) .or_else(|_| app_state.error_404_no_state(ver))?; let state: LV = ctx.registry_value(state_key) .or_else(|_| app_state.error_500("invalid state key"))?; let ctx: Option<rlua::Table> = focus_fn.call((state, query_param.clone())) .or_else(|e| app_state.error_500(format_args!( "lua (focus {}):\n{}", render_call!(), SourceChain(e), )))?; let ctx = ctx.ok_or(()).or_else(|_| app_state.error_404())?; let ctx = conv::lua_to_json(LV::Table(ctx)) .or_else(|e| app_state.error_500(format_args!( "lua (focus {} -> JSON):\n{}", render_call!(), SourceChain(e) )))?; let ctx = tera::Context::from_serialize(ctx) .or_else(|e| app_state.error_500(format_args!( "tera (focus {} -> Tera ctx):\n{}", render_call!(), SourceChain(e), )))?; let template = format!("render/{}.html", name); app_state.render(&template, &ctx) }) } }
; self.lua.context(|ctx| ctx.expire_registry_values()); } pub(super) fn load_focuses(&mut self, app_ctx: &Ctx) { with_entries(app_ctx, |name, mut path| { path.push("f
random
[]
Rust
src/bin/debugger.rs
Hexilee/tifs
184363cf1cc8ece62421d376d8cd97eb001b25a8
use std::fmt::Debug; use std::io::{stdin, stdout, BufRead, BufReader, Write}; use anyhow::{anyhow, Result}; use clap::{crate_version, App, Arg}; use tifs::fs::inode::Inode; use tifs::fs::key::{ScopedKey, ROOT_INODE}; use tifs::fs::tikv_fs::TiFs; use tifs::fs::transaction::Txn; use tikv_client::TransactionClient; use tracing_subscriber::EnvFilter; #[async_std::main] async fn main() -> Result<()> { let matches = App::new("TiFS Debugger") .version(crate_version!()) .author("Hexi Lee") .arg( Arg::with_name("pd") .long("pd-endpoints") .multiple(true) .value_name("ENDPOINTS") .default_value("127.0.0.1:2379") .help("set all pd endpoints of the tikv cluster") .takes_value(true), ) .get_matches(); tracing_subscriber::fmt() .with_env_filter(EnvFilter::from_default_env()) .try_init() .unwrap(); let endpoints: Vec<&str> = matches .values_of("pd") .unwrap_or_default() .to_owned() .collect(); let console = Console::construct(endpoints).await?; loop { match console.interact().await { Ok(true) => break Ok(()), Err(err) => eprintln!("{}", err), _ => continue, } } } struct Console { pd_endpoints: Vec<String>, client: TransactionClient, } impl Console { async fn construct<S>(pd_endpoints: Vec<S>) -> Result<Self> where S: Clone + Debug + Into<String>, { let client = TransactionClient::new_with_config(pd_endpoints.clone(), Default::default()) .await .map_err(|err| anyhow!("{}", err))?; Ok(Self { client, pd_endpoints: pd_endpoints.into_iter().map(Into::into).collect(), }) } async fn interact(&self) -> Result<bool> { let mut txn = Txn::begin_optimistic( &self.client, TiFs::DEFAULT_BLOCK_SIZE, None, TiFs::MAX_NAME_LEN, ) .await?; match self.interact_with_txn(&mut txn).await { Ok(exit) => { txn.commit().await?; Ok(exit) } Err(err) => { txn.rollback().await?; Err(err) } } } async fn interact_with_txn(&self, txn: &mut Txn) -> Result<bool> { print!("{:?}> ", &self.pd_endpoints); stdout().flush()?; let mut buffer = String::new(); BufReader::new(stdin()).read_line(&mut buffer)?; let commands: Vec<&str> = buffer.split(" ").map(|seg| seg.trim()).collect(); if commands.len() == 0 { return Ok(false); } match commands[0] { "exit" => return Ok(true), "reset" => self.reset(txn).await?, "get" => self.get_block(txn, &commands[1..]).await?, "get_str" => self.get_block_str(txn, &commands[1..]).await?, "get_attr" => self.get_attr(txn, &commands[1..]).await?, "get_raw" => self.get_attr_raw(txn, &commands[1..]).await?, "get_inline" => self.get_inline(txn, &commands[1..]).await?, "rm" => self.delete_block(txn, &commands[1..]).await?, cmd => return Err(anyhow!("unknow command `{}`", cmd)), } Ok(false) } async fn reset(&self, txn: &mut Txn) -> Result<()> { let next_inode = txn .read_meta() .await? .map(|meta| meta.inode_next) .unwrap_or(ROOT_INODE); for inode in txn .scan( ScopedKey::inode_range(ROOT_INODE..next_inode), (next_inode - ROOT_INODE) as u32, ) .await? .map(|pair| Inode::deserialize(pair.value())) { let inode = inode?; txn.clear_data(inode.ino).await?; txn.remove_inode(inode.ino).await?; } txn.delete(ScopedKey::meta()).await?; Ok(()) } async fn get_block(&self, txn: &mut Txn, args: &[&str]) -> Result<()> { if args.len() < 2 { return Err(anyhow!("invalid arguments `{:?}`", args)); } match txn .get(ScopedKey::block(args[0].parse()?, args[1].parse()?)) .await? { Some(value) => println!("{:?}", &value[args.get(2).unwrap_or(&"0").parse()?..]), None => println!("Not Found"), } Ok(()) } async fn get_block_str(&self, txn: &mut Txn, args: &[&str]) -> Result<()> { if args.len() < 2 { return Err(anyhow!("invalid arguments `{:?}`", args)); } match txn .get(ScopedKey::block(args[0].parse()?, args[1].parse()?)) .await? { Some(value) => println!("{:?}", String::from_utf8_lossy(&value)), None => println!("Not Found"), } Ok(()) } async fn get_attr(&self, txn: &mut Txn, args: &[&str]) -> Result<()> { if args.len() < 1 { return Err(anyhow!("invalid arguments `{:?}`", args)); } match txn.get(ScopedKey::inode(args[0].parse()?)).await? { Some(value) => println!("{:?}", Inode::deserialize(&value)?), None => println!("Not Found"), } Ok(()) } async fn get_attr_raw(&self, txn: &mut Txn, args: &[&str]) -> Result<()> { if args.len() < 1 { return Err(anyhow!("invalid arguments `{:?}`", args)); } match txn.get(ScopedKey::inode(args[0].parse()?)).await? { Some(value) => println!("{}", &*String::from_utf8_lossy(&value)), None => println!("Not Found"), } Ok(()) } async fn get_inline(&self, txn: &mut Txn, args: &[&str]) -> Result<()> { if args.len() < 1 { return Err(anyhow!("invalid arguments `{:?}`", args)); } match txn.get(ScopedKey::inode(args[0].parse()?)).await? { Some(value) => { let inline = Inode::deserialize(&value)? .inline_data .unwrap_or_else(Vec::new); println!("{}", String::from_utf8_lossy(&inline)); } None => println!("Not Found"), } Ok(()) } async fn delete_block(&self, txn: &mut Txn, args: &[&str]) -> Result<()> { if args.len() < 2 { return Err(anyhow!("invalid arguments `{:?}`", args)); } txn.delete(ScopedKey::block(args[0].parse()?, args[1].parse()?)) .await?; Ok(()) } }
use std::fmt::Debug; use std::io::{stdin, stdout, BufRead, BufReader, Write}; use anyhow::{anyhow, Result}; use clap::{crate_version, App, Arg}; use tifs::fs::inode::Inode; use tifs::fs::key::{ScopedKey, ROOT_INODE}; use tifs::fs::tikv_fs::TiFs; use tifs::fs::transaction::Txn; use tikv_client::TransactionClient; use tracing_subscriber::EnvFilter; #[async_std::main] async fn main() -> Result<()> { let matches = App::new("TiFS Debugger") .version(crate_version!()) .author("Hexi Lee") .arg( Arg::with_name("pd") .long("pd-endpoints") .multiple(true) .value
=> return Ok(true), "reset" => self.reset(txn).await?, "get" => self.get_block(txn, &commands[1..]).await?, "get_str" => self.get_block_str(txn, &commands[1..]).await?, "get_attr" => self.get_attr(txn, &commands[1..]).await?, "get_raw" => self.get_attr_raw(txn, &commands[1..]).await?, "get_inline" => self.get_inline(txn, &commands[1..]).await?, "rm" => self.delete_block(txn, &commands[1..]).await?, cmd => return Err(anyhow!("unknow command `{}`", cmd)), } Ok(false) } async fn reset(&self, txn: &mut Txn) -> Result<()> { let next_inode = txn .read_meta() .await? .map(|meta| meta.inode_next) .unwrap_or(ROOT_INODE); for inode in txn .scan( ScopedKey::inode_range(ROOT_INODE..next_inode), (next_inode - ROOT_INODE) as u32, ) .await? .map(|pair| Inode::deserialize(pair.value())) { let inode = inode?; txn.clear_data(inode.ino).await?; txn.remove_inode(inode.ino).await?; } txn.delete(ScopedKey::meta()).await?; Ok(()) } async fn get_block(&self, txn: &mut Txn, args: &[&str]) -> Result<()> { if args.len() < 2 { return Err(anyhow!("invalid arguments `{:?}`", args)); } match txn .get(ScopedKey::block(args[0].parse()?, args[1].parse()?)) .await? { Some(value) => println!("{:?}", &value[args.get(2).unwrap_or(&"0").parse()?..]), None => println!("Not Found"), } Ok(()) } async fn get_block_str(&self, txn: &mut Txn, args: &[&str]) -> Result<()> { if args.len() < 2 { return Err(anyhow!("invalid arguments `{:?}`", args)); } match txn .get(ScopedKey::block(args[0].parse()?, args[1].parse()?)) .await? { Some(value) => println!("{:?}", String::from_utf8_lossy(&value)), None => println!("Not Found"), } Ok(()) } async fn get_attr(&self, txn: &mut Txn, args: &[&str]) -> Result<()> { if args.len() < 1 { return Err(anyhow!("invalid arguments `{:?}`", args)); } match txn.get(ScopedKey::inode(args[0].parse()?)).await? { Some(value) => println!("{:?}", Inode::deserialize(&value)?), None => println!("Not Found"), } Ok(()) } async fn get_attr_raw(&self, txn: &mut Txn, args: &[&str]) -> Result<()> { if args.len() < 1 { return Err(anyhow!("invalid arguments `{:?}`", args)); } match txn.get(ScopedKey::inode(args[0].parse()?)).await? { Some(value) => println!("{}", &*String::from_utf8_lossy(&value)), None => println!("Not Found"), } Ok(()) } async fn get_inline(&self, txn: &mut Txn, args: &[&str]) -> Result<()> { if args.len() < 1 { return Err(anyhow!("invalid arguments `{:?}`", args)); } match txn.get(ScopedKey::inode(args[0].parse()?)).await? { Some(value) => { let inline = Inode::deserialize(&value)? .inline_data .unwrap_or_else(Vec::new); println!("{}", String::from_utf8_lossy(&inline)); } None => println!("Not Found"), } Ok(()) } async fn delete_block(&self, txn: &mut Txn, args: &[&str]) -> Result<()> { if args.len() < 2 { return Err(anyhow!("invalid arguments `{:?}`", args)); } txn.delete(ScopedKey::block(args[0].parse()?, args[1].parse()?)) .await?; Ok(()) } }
_name("ENDPOINTS") .default_value("127.0.0.1:2379") .help("set all pd endpoints of the tikv cluster") .takes_value(true), ) .get_matches(); tracing_subscriber::fmt() .with_env_filter(EnvFilter::from_default_env()) .try_init() .unwrap(); let endpoints: Vec<&str> = matches .values_of("pd") .unwrap_or_default() .to_owned() .collect(); let console = Console::construct(endpoints).await?; loop { match console.interact().await { Ok(true) => break Ok(()), Err(err) => eprintln!("{}", err), _ => continue, } } } struct Console { pd_endpoints: Vec<String>, client: TransactionClient, } impl Console { async fn construct<S>(pd_endpoints: Vec<S>) -> Result<Self> where S: Clone + Debug + Into<String>, { let client = TransactionClient::new_with_config(pd_endpoints.clone(), Default::default()) .await .map_err(|err| anyhow!("{}", err))?; Ok(Self { client, pd_endpoints: pd_endpoints.into_iter().map(Into::into).collect(), }) } async fn interact(&self) -> Result<bool> { let mut txn = Txn::begin_optimistic( &self.client, TiFs::DEFAULT_BLOCK_SIZE, None, TiFs::MAX_NAME_LEN, ) .await?; match self.interact_with_txn(&mut txn).await { Ok(exit) => { txn.commit().await?; Ok(exit) } Err(err) => { txn.rollback().await?; Err(err) } } } async fn interact_with_txn(&self, txn: &mut Txn) -> Result<bool> { print!("{:?}> ", &self.pd_endpoints); stdout().flush()?; let mut buffer = String::new(); BufReader::new(stdin()).read_line(&mut buffer)?; let commands: Vec<&str> = buffer.split(" ").map(|seg| seg.trim()).collect(); if commands.len() == 0 { return Ok(false); } match commands[0] { "exit"
random
[ { "content": "fn default_tls_config_path() -> anyhow::Result<PathBuf> {\n\n Ok(DEFAULT_TLS_CONFIG_PATH.parse()?)\n\n}\n\n\n\nmacro_rules! define_options {\n\n {\n\n $name: ident ($type: ident) {\n\n $(builtin $($optname: literal)? $opt: ident,)*\n\n $(define $($newoptname: lit...
Rust
src/service.rs
wpbrown/ntex-mqtt
be783119479e532705848ee224297a19ec524184
use std::task::{Context, Poll}; use std::{fmt, future::Future, marker::PhantomData, pin::Pin, rc::Rc}; use ntex::codec::{AsyncRead, AsyncWrite, Decoder, Encoder}; use ntex::service::{IntoServiceFactory, Service, ServiceFactory}; use ntex::time::{Millis, Seconds, Sleep}; use ntex::util::{select, Either, Pool}; use super::io::{DispatchItem, Dispatcher, State, Timer}; type ResponseItem<U> = Option<<U as Encoder>::Item>; pub(crate) struct FramedService<St, C, T, Io, Codec> { connect: C, handler: Rc<T>, disconnect_timeout: Seconds, time: Timer, pool: Pool, _t: PhantomData<(St, Io, Codec)>, } impl<St, C, T, Io, Codec> FramedService<St, C, T, Io, Codec> { pub(crate) fn new(connect: C, service: T, pool: Pool, disconnect_timeout: Seconds) -> Self { FramedService { pool, connect, disconnect_timeout, handler: Rc::new(service), time: Timer::new(Millis::ONE_SEC), _t: PhantomData, } } } impl<St, C, T, Io, Codec> ServiceFactory for FramedService<St, C, T, Io, Codec> where Io: AsyncRead + AsyncWrite + Unpin + 'static, C: ServiceFactory<Config = (), Request = Io, Response = (Io, State, Codec, St, Seconds)>, C::Error: fmt::Debug, C::Future: 'static, <C::Service as Service>::Future: 'static, T: ServiceFactory< Config = St, Request = DispatchItem<Codec>, Response = ResponseItem<Codec>, Error = C::Error, InitError = C::Error, > + 'static, <T::Service as Service>::Error: 'static, <T::Service as Service>::Future: 'static, Codec: Decoder + Encoder + Clone + 'static, <Codec as Encoder>::Item: 'static, { type Config = (); type Request = Io; type Response = (); type Error = C::Error; type InitError = C::InitError; type Service = FramedServiceImpl<St, C::Service, T, Io, Codec>; type Future = Pin<Box<dyn Future<Output = Result<Self::Service, Self::InitError>>>>; fn new_service(&self, _: ()) -> Self::Future { let fut = self.connect.new_service(()); let handler = self.handler.clone(); let disconnect_timeout = self.disconnect_timeout; let time = self.time.clone(); let pool = self.pool.clone(); Box::pin(async move { Ok(FramedServiceImpl { handler, disconnect_timeout, pool, time, connect: fut.await?, _t: PhantomData, }) }) } } pub(crate) struct FramedServiceImpl<St, C, T, Io, Codec> { connect: C, handler: Rc<T>, disconnect_timeout: Seconds, pool: Pool, time: Timer, _t: PhantomData<(St, Io, Codec)>, } impl<St, C, T, Io, Codec> Service for FramedServiceImpl<St, C, T, Io, Codec> where Io: AsyncRead + AsyncWrite + Unpin + 'static, C: Service<Request = Io, Response = (Io, State, Codec, St, Seconds)>, C::Error: fmt::Debug, C::Future: 'static, T: ServiceFactory< Config = St, Request = DispatchItem<Codec>, Response = ResponseItem<Codec>, Error = C::Error, InitError = C::Error, > + 'static, <T::Service as Service>::Error: 'static, <T::Service as Service>::Future: 'static, Codec: Decoder + Encoder + Clone + 'static, <Codec as Encoder>::Item: 'static, { type Request = Io; type Response = (); type Error = C::Error; type Future = Pin<Box<dyn Future<Output = Result<(), Self::Error>>>>; #[inline] fn poll_ready(&self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> { let ready1 = self.connect.poll_ready(cx)?.is_ready(); let ready2 = self.pool.poll_ready(cx).is_ready(); if ready1 && ready2 { Poll::Ready(Ok(())) } else { Poll::Pending } } #[inline] fn poll_shutdown(&self, cx: &mut Context<'_>, is_error: bool) -> Poll<()> { self.connect.poll_shutdown(cx, is_error) } #[inline] fn call(&self, req: Io) -> Self::Future { log::trace!("Start connection handshake"); let handler = self.handler.clone(); let timeout = self.disconnect_timeout; let handshake = self.connect.call(req); let time = self.time.clone(); Box::pin(async move { let (io, st, codec, session, keepalive) = handshake.await.map_err(|e| { log::trace!("Connection handshake failed: {:?}", e); e })?; log::trace!("Connection handshake succeeded"); let handler = handler.new_service(session).await?; log::trace!("Connection handler is created, starting dispatcher"); Dispatcher::with(io, st, codec, handler, time) .keepalive_timeout(keepalive) .disconnect_timeout(timeout) .await }) } } pub(crate) struct FramedService2<St, C, T, Io, Codec> { connect: C, handler: Rc<T>, disconnect_timeout: Seconds, pool: Pool, time: Timer, _t: PhantomData<(St, Io, Codec)>, } impl<St, C, T, Io, Codec> FramedService2<St, C, T, Io, Codec> { pub(crate) fn new(connect: C, service: T, pool: Pool, disconnect_timeout: Seconds) -> Self { FramedService2 { connect, pool, disconnect_timeout, handler: Rc::new(service), time: Timer::new(Millis::ONE_SEC), _t: PhantomData, } } } impl<St, C, T, Io, Codec> ServiceFactory for FramedService2<St, C, T, Io, Codec> where Io: AsyncRead + AsyncWrite + Unpin + 'static, C: ServiceFactory< Config = (), Request = (Io, State), Response = (Io, State, Codec, St, Seconds), >, C::Error: fmt::Debug, C::Future: 'static, <C::Service as Service>::Future: 'static, T: ServiceFactory< Config = St, Request = DispatchItem<Codec>, Response = ResponseItem<Codec>, Error = C::Error, InitError = C::Error, > + 'static, <T::Service as Service>::Error: 'static, <T::Service as Service>::Future: 'static, Codec: Decoder + Encoder + Clone + 'static, <Codec as Encoder>::Item: 'static, { type Config = (); type Request = (Io, State, Option<Sleep>); type Response = (); type Error = C::Error; type InitError = C::InitError; type Service = FramedServiceImpl2<St, C::Service, T, Io, Codec>; type Future = Pin<Box<dyn Future<Output = Result<Self::Service, Self::InitError>>>>; fn new_service(&self, _: ()) -> Self::Future { let fut = self.connect.new_service(()); let handler = self.handler.clone(); let disconnect_timeout = self.disconnect_timeout; let time = self.time.clone(); let pool = self.pool.clone(); Box::pin(async move { Ok(FramedServiceImpl2 { handler, disconnect_timeout, time, pool, connect: fut.await?, _t: PhantomData, }) }) } } pub(crate) struct FramedServiceImpl2<St, C, T, Io, Codec> { connect: C, handler: Rc<T>, pool: Pool, disconnect_timeout: Seconds, time: Timer, _t: PhantomData<(St, Io, Codec)>, } impl<St, C, T, Io, Codec> Service for FramedServiceImpl2<St, C, T, Io, Codec> where Io: AsyncRead + AsyncWrite + Unpin + 'static, C: Service<Request = (Io, State), Response = (Io, State, Codec, St, Seconds)>, C::Error: fmt::Debug, C::Future: 'static, T: ServiceFactory< Config = St, Request = DispatchItem<Codec>, Response = ResponseItem<Codec>, Error = C::Error, InitError = C::Error, > + 'static, <T::Service as Service>::Error: 'static, <T::Service as Service>::Future: 'static, Codec: Decoder + Encoder + Clone + 'static, <Codec as Encoder>::Item: 'static, { type Request = (Io, State, Option<Sleep>); type Response = (); type Error = C::Error; type Future = Pin<Box<dyn Future<Output = Result<(), Self::Error>>>>; #[inline] fn poll_ready(&self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> { let ready1 = self.connect.poll_ready(cx)?.is_ready(); let ready2 = self.pool.poll_ready(cx).is_ready(); if ready1 && ready2 { Poll::Ready(Ok(())) } else { Poll::Pending } } #[inline] fn poll_shutdown(&self, cx: &mut Context<'_>, is_error: bool) -> Poll<()> { self.connect.poll_shutdown(cx, is_error) } #[inline] fn call(&self, (req, state, delay): (Io, State, Option<Sleep>)) -> Self::Future { log::trace!("Start connection handshake"); let handler = self.handler.clone(); let timeout = self.disconnect_timeout; let handshake = self.connect.call((req, state)); let time = self.time.clone(); Box::pin(async move { let (io, state, codec, ka, handler) = if let Some(delay) = delay { let res = select( delay, Box::pin(async { let (io, state, codec, st, ka) = handshake.await.map_err(|e| { log::trace!("Connection handshake failed: {:?}", e); e })?; log::trace!("Connection handshake succeeded"); let handler = handler.new_service(st).await?; log::trace!("Connection handler is created, starting dispatcher"); Ok::<_, C::Error>((io, state, codec, ka, handler)) }), ) .await; match res { Either::Left(_) => { log::warn!("Handshake timed out"); return Ok(()); } Either::Right(item) => item?, } } else { let (io, state, codec, st, ka) = handshake.await.map_err(|e| { log::trace!("Connection handshake failed: {:?}", e); e })?; log::trace!("Connection handshake succeeded"); let handler = handler.new_service(st).await?; log::trace!("Connection handler is created, starting dispatcher"); (io, state, codec, ka, handler) }; Dispatcher::with(io, state, codec, handler, time) .keepalive_timeout(ka) .disconnect_timeout(timeout) .await }) } }
use std::task::{Context, Poll}; use std::{fmt, future::Future, marker::PhantomData, pin::Pin, rc::Rc}; use ntex::codec::{AsyncRead, AsyncWrite, Decoder, Encoder}; use ntex::service::{IntoServiceFactory, Service, ServiceFactory}; use ntex::time::{Millis, Seconds, Sleep}; use ntex::util::{select, Either, Pool}; use super::io::{DispatchItem, Dispatcher, State, Timer}; type ResponseItem<U> = Option<<U as Encoder>::Item>; pub(crate) struct FramedService<St, C, T, Io, Codec> { connect: C, handler: Rc<T>, disconnect_timeout: Seconds, time: Timer, pool: Pool, _t: PhantomData<(St, Io, Codec)>, } impl<St, C, T, Io, Codec> FramedService<St, C, T, Io, Codec> { pub(crate) fn new(connect: C, service: T, pool: Pool, disconnect_timeout: Seconds) -> Self { FramedService { pool, connect, disconnect_timeout, handler: Rc::new(service), time: Timer::new(Millis::ONE_SEC), _t: PhantomData, } } } impl<St, C, T, Io, Codec> ServiceFactory for FramedService<St, C, T, Io, Codec> where Io: AsyncRead + AsyncWrite + Unpin + 'static, C: ServiceFactory<Config = (), Request = Io, Response = (Io, State, Codec, St, Seconds)>, C::Error: fmt::Debug, C::Future: 'static, <C::Service as Service>::Future: 'static, T: ServiceFactory< Config = St, Request = DispatchItem<Codec>, Response = ResponseItem<Codec>, Error = C::Error, InitError = C::Error, > + 'static, <T::Service as Service>::Error: 'static, <T::Service as Service>::Future: 'static, Codec: Decoder + Encoder + Clone + 'static, <Codec as Encoder>::Item: 'static, { type Config = (); type Request = Io; type Response = (); type Error = C::Error; type InitError = C::InitError; type Service = FramedServiceImpl<St, C::Service, T, Io, Codec>; type Future = Pin<Box<dyn Future<Output = Result<Self::Service, Self::InitError>>>>; fn new_service(&self, _: ()) -> Self::Future { let fut = self.connect.new_service(()); let handler = self.handler.clone(); let disconnect_timeout = self.disconnect_timeout; let time = self.time.clone(); let pool = self.pool.clone(); Box::pin(async move { Ok(FramedServiceImpl { handler, disconnect_timeout, pool, time, connect: fut.await?, _t: PhantomData, }) }) } } pub(crate) struct FramedServiceImpl<St, C, T, Io, Codec> { connect: C, handler: Rc<T>, disconnect_timeout: Seconds, pool: Poo
{:?}", e); e })?; log::trace!("Connection handshake succeeded"); let handler = handler.new_service(st).await?; log::trace!("Connection handler is created, starting dispatcher"); (io, state, codec, ka, handler) }; Dispatcher::with(io, state, codec, handler, time) .keepalive_timeout(ka) .disconnect_timeout(timeout) .await }) } }
l, time: Timer, _t: PhantomData<(St, Io, Codec)>, } impl<St, C, T, Io, Codec> Service for FramedServiceImpl<St, C, T, Io, Codec> where Io: AsyncRead + AsyncWrite + Unpin + 'static, C: Service<Request = Io, Response = (Io, State, Codec, St, Seconds)>, C::Error: fmt::Debug, C::Future: 'static, T: ServiceFactory< Config = St, Request = DispatchItem<Codec>, Response = ResponseItem<Codec>, Error = C::Error, InitError = C::Error, > + 'static, <T::Service as Service>::Error: 'static, <T::Service as Service>::Future: 'static, Codec: Decoder + Encoder + Clone + 'static, <Codec as Encoder>::Item: 'static, { type Request = Io; type Response = (); type Error = C::Error; type Future = Pin<Box<dyn Future<Output = Result<(), Self::Error>>>>; #[inline] fn poll_ready(&self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> { let ready1 = self.connect.poll_ready(cx)?.is_ready(); let ready2 = self.pool.poll_ready(cx).is_ready(); if ready1 && ready2 { Poll::Ready(Ok(())) } else { Poll::Pending } } #[inline] fn poll_shutdown(&self, cx: &mut Context<'_>, is_error: bool) -> Poll<()> { self.connect.poll_shutdown(cx, is_error) } #[inline] fn call(&self, req: Io) -> Self::Future { log::trace!("Start connection handshake"); let handler = self.handler.clone(); let timeout = self.disconnect_timeout; let handshake = self.connect.call(req); let time = self.time.clone(); Box::pin(async move { let (io, st, codec, session, keepalive) = handshake.await.map_err(|e| { log::trace!("Connection handshake failed: {:?}", e); e })?; log::trace!("Connection handshake succeeded"); let handler = handler.new_service(session).await?; log::trace!("Connection handler is created, starting dispatcher"); Dispatcher::with(io, st, codec, handler, time) .keepalive_timeout(keepalive) .disconnect_timeout(timeout) .await }) } } pub(crate) struct FramedService2<St, C, T, Io, Codec> { connect: C, handler: Rc<T>, disconnect_timeout: Seconds, pool: Pool, time: Timer, _t: PhantomData<(St, Io, Codec)>, } impl<St, C, T, Io, Codec> FramedService2<St, C, T, Io, Codec> { pub(crate) fn new(connect: C, service: T, pool: Pool, disconnect_timeout: Seconds) -> Self { FramedService2 { connect, pool, disconnect_timeout, handler: Rc::new(service), time: Timer::new(Millis::ONE_SEC), _t: PhantomData, } } } impl<St, C, T, Io, Codec> ServiceFactory for FramedService2<St, C, T, Io, Codec> where Io: AsyncRead + AsyncWrite + Unpin + 'static, C: ServiceFactory< Config = (), Request = (Io, State), Response = (Io, State, Codec, St, Seconds), >, C::Error: fmt::Debug, C::Future: 'static, <C::Service as Service>::Future: 'static, T: ServiceFactory< Config = St, Request = DispatchItem<Codec>, Response = ResponseItem<Codec>, Error = C::Error, InitError = C::Error, > + 'static, <T::Service as Service>::Error: 'static, <T::Service as Service>::Future: 'static, Codec: Decoder + Encoder + Clone + 'static, <Codec as Encoder>::Item: 'static, { type Config = (); type Request = (Io, State, Option<Sleep>); type Response = (); type Error = C::Error; type InitError = C::InitError; type Service = FramedServiceImpl2<St, C::Service, T, Io, Codec>; type Future = Pin<Box<dyn Future<Output = Result<Self::Service, Self::InitError>>>>; fn new_service(&self, _: ()) -> Self::Future { let fut = self.connect.new_service(()); let handler = self.handler.clone(); let disconnect_timeout = self.disconnect_timeout; let time = self.time.clone(); let pool = self.pool.clone(); Box::pin(async move { Ok(FramedServiceImpl2 { handler, disconnect_timeout, time, pool, connect: fut.await?, _t: PhantomData, }) }) } } pub(crate) struct FramedServiceImpl2<St, C, T, Io, Codec> { connect: C, handler: Rc<T>, pool: Pool, disconnect_timeout: Seconds, time: Timer, _t: PhantomData<(St, Io, Codec)>, } impl<St, C, T, Io, Codec> Service for FramedServiceImpl2<St, C, T, Io, Codec> where Io: AsyncRead + AsyncWrite + Unpin + 'static, C: Service<Request = (Io, State), Response = (Io, State, Codec, St, Seconds)>, C::Error: fmt::Debug, C::Future: 'static, T: ServiceFactory< Config = St, Request = DispatchItem<Codec>, Response = ResponseItem<Codec>, Error = C::Error, InitError = C::Error, > + 'static, <T::Service as Service>::Error: 'static, <T::Service as Service>::Future: 'static, Codec: Decoder + Encoder + Clone + 'static, <Codec as Encoder>::Item: 'static, { type Request = (Io, State, Option<Sleep>); type Response = (); type Error = C::Error; type Future = Pin<Box<dyn Future<Output = Result<(), Self::Error>>>>; #[inline] fn poll_ready(&self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> { let ready1 = self.connect.poll_ready(cx)?.is_ready(); let ready2 = self.pool.poll_ready(cx).is_ready(); if ready1 && ready2 { Poll::Ready(Ok(())) } else { Poll::Pending } } #[inline] fn poll_shutdown(&self, cx: &mut Context<'_>, is_error: bool) -> Poll<()> { self.connect.poll_shutdown(cx, is_error) } #[inline] fn call(&self, (req, state, delay): (Io, State, Option<Sleep>)) -> Self::Future { log::trace!("Start connection handshake"); let handler = self.handler.clone(); let timeout = self.disconnect_timeout; let handshake = self.connect.call((req, state)); let time = self.time.clone(); Box::pin(async move { let (io, state, codec, ka, handler) = if let Some(delay) = delay { let res = select( delay, Box::pin(async { let (io, state, codec, st, ka) = handshake.await.map_err(|e| { log::trace!("Connection handshake failed: {:?}", e); e })?; log::trace!("Connection handshake succeeded"); let handler = handler.new_service(st).await?; log::trace!("Connection handler is created, starting dispatcher"); Ok::<_, C::Error>((io, state, codec, ka, handler)) }), ) .await; match res { Either::Left(_) => { log::warn!("Handshake timed out"); return Ok(()); } Either::Right(item) => item?, } } else { let (io, state, codec, st, ka) = handshake.await.map_err(|e| { log::trace!("Connection handshake failed:
random
[ { "content": "struct DispatcherState<S: Service, U: Encoder + Decoder> {\n\n error: Option<IoDispatcherError<S::Error, <U as Encoder>::Error>>,\n\n base: usize,\n\n queue: VecDeque<ServiceResult<Result<S::Response, S::Error>>>,\n\n}\n\n\n", "file_path": "src/io.rs", "rank": 0, "score": 2365...
Rust
imxrt1062-pac/imxrt1062-dmamux/src/chcfg.rs
Shock-1/teensy4-rs
effc3b290f1be3c7aef62a78e82dbfbc27aa6370
#[doc = "Reader of register CHCFG[%s]"] pub type R = crate::R<u32, super::CHCFG>; #[doc = "Writer for register CHCFG[%s]"] pub type W = crate::W<u32, super::CHCFG>; #[doc = "Register CHCFG[%s] `reset()`'s with value 0"] impl crate::ResetValue for super::CHCFG { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `SOURCE`"] pub type SOURCE_R = crate::R<u8, u8>; #[doc = "Write proxy for field `SOURCE`"] pub struct SOURCE_W<'a> { w: &'a mut W, } impl<'a> SOURCE_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !0x7f) | ((value as u32) & 0x7f); self.w } } #[doc = "DMA Channel Always Enable\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum A_ON_A { #[doc = "0: DMA Channel Always ON function is disabled"] A_ON_0 = 0, #[doc = "1: DMA Channel Always ON function is enabled"] A_ON_1 = 1, } impl From<A_ON_A> for bool { #[inline(always)] fn from(variant: A_ON_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `A_ON`"] pub type A_ON_R = crate::R<bool, A_ON_A>; impl A_ON_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> A_ON_A { match self.bits { false => A_ON_A::A_ON_0, true => A_ON_A::A_ON_1, } } #[doc = "Checks if the value of the field is `A_ON_0`"] #[inline(always)] pub fn is_a_on_0(&self) -> bool { *self == A_ON_A::A_ON_0 } #[doc = "Checks if the value of the field is `A_ON_1`"] #[inline(always)] pub fn is_a_on_1(&self) -> bool { *self == A_ON_A::A_ON_1 } } #[doc = "Write proxy for field `A_ON`"] pub struct A_ON_W<'a> { w: &'a mut W, } impl<'a> A_ON_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: A_ON_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "DMA Channel Always ON function is disabled"] #[inline(always)] pub fn a_on_0(self) -> &'a mut W { self.variant(A_ON_A::A_ON_0) } #[doc = "DMA Channel Always ON function is enabled"] #[inline(always)] pub fn a_on_1(self) -> &'a mut W { self.variant(A_ON_A::A_ON_1) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 29)) | (((value as u32) & 0x01) << 29); self.w } } #[doc = "DMA Channel Trigger Enable\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum TRIG_A { #[doc = "0: Triggering is disabled. If triggering is disabled and ENBL is set, the DMA Channel will simply route the specified source to the DMA channel. (Normal mode)"] TRIG_0 = 0, #[doc = "1: Triggering is enabled. If triggering is enabled and ENBL is set, the DMA_CH_MUX is in Periodic Trigger mode."] TRIG_1 = 1, } impl From<TRIG_A> for bool { #[inline(always)] fn from(variant: TRIG_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `TRIG`"] pub type TRIG_R = crate::R<bool, TRIG_A>; impl TRIG_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> TRIG_A { match self.bits { false => TRIG_A::TRIG_0, true => TRIG_A::TRIG_1, } } #[doc = "Checks if the value of the field is `TRIG_0`"] #[inline(always)] pub fn is_trig_0(&self) -> bool { *self == TRIG_A::TRIG_0 } #[doc = "Checks if the value of the field is `TRIG_1`"] #[inline(always)] pub fn is_trig_1(&self) -> bool { *self == TRIG_A::TRIG_1 } } #[doc = "Write proxy for field `TRIG`"] pub struct TRIG_W<'a> { w: &'a mut W, } impl<'a> TRIG_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: TRIG_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "Triggering is disabled. If triggering is disabled and ENBL is set, the DMA Channel will simply route the specified source to the DMA channel. (Normal mode)"] #[inline(always)] pub fn trig_0(self) -> &'a mut W { self.variant(TRIG_A::TRIG_0) } #[doc = "Triggering is enabled. If triggering is enabled and ENBL is set, the DMA_CH_MUX is in Periodic Trigger mode."] #[inline(always)] pub fn trig_1(self) -> &'a mut W { self.variant(TRIG_A::TRIG_1) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 30)) | (((value as u32) & 0x01) << 30); self.w } } #[doc = "DMA Mux Channel Enable\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum ENBL_A { #[doc = "0: DMA Mux channel is disabled"] ENBL_0 = 0, #[doc = "1: DMA Mux channel is enabled"] ENBL_1 = 1, } impl From<ENBL_A> for bool { #[inline(always)] fn from(variant: ENBL_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `ENBL`"] pub type ENBL_R = crate::R<bool, ENBL_A>; impl ENBL_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> ENBL_A { match self.bits { false => ENBL_A::ENBL_0, true => ENBL_A::ENBL_1, } } #[doc = "Checks if the value of the field is `ENBL_0`"] #[inline(always)] pub fn is_enbl_0(&self) -> bool { *self == ENBL_A::ENBL_0 } #[doc = "Checks if the value of the field is `ENBL_1`"] #[inline(always)] pub fn is_enbl_1(&self) -> bool { *self == ENBL_A::ENBL_1 } } #[doc = "Write proxy for field `ENBL`"] pub struct ENBL_W<'a> { w: &'a mut W, } impl<'a> ENBL_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: ENBL_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "DMA Mux channel is disabled"] #[inline(always)] pub fn enbl_0(self) -> &'a mut W { self.variant(ENBL_A::ENBL_0) } #[doc = "DMA Mux channel is enabled"] #[inline(always)] pub fn enbl_1(self) -> &'a mut W { self.variant(ENBL_A::ENBL_1) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 31)) | (((value as u32) & 0x01) << 31); self.w } } impl R { #[doc = "Bits 0:6 - DMA Channel Source (Slot Number)"] #[inline(always)] pub fn source(&self) -> SOURCE_R { SOURCE_R::new((self.bits & 0x7f) as u8) } #[doc = "Bit 29 - DMA Channel Always Enable"] #[inline(always)] pub fn a_on(&self) -> A_ON_R { A_ON_R::new(((self.bits >> 29) & 0x01) != 0) } #[doc = "Bit 30 - DMA Channel Trigger Enable"] #[inline(always)] pub fn trig(&self) -> TRIG_R { TRIG_R::new(((self.bits >> 30) & 0x01) != 0) } #[doc = "Bit 31 - DMA Mux Channel Enable"] #[inline(always)] pub fn enbl(&self) -> ENBL_R { ENBL_R::new(((self.bits >> 31) & 0x01) != 0) } } impl W { #[doc = "Bits 0:6 - DMA Channel Source (Slot Number)"] #[inline(always)] pub fn source(&mut self) -> SOURCE_W { SOURCE_W { w: self } } #[doc = "Bit 29 - DMA Channel Always Enable"] #[inline(always)] pub fn a_on(&mut self) -> A_ON_W { A_ON_W { w: self } } #[doc = "Bit 30 - DMA Channel Trigger Enable"] #[inline(always)] pub fn trig(&mut self) -> TRIG_W { TRIG_W { w: self } } #[doc = "Bit 31 - DMA Mux Channel Enable"] #[inline(always)] pub fn enbl(&mut self) -> ENBL_W { ENBL_W { w: self } } }
#[doc = "Reader of register CHCFG[%s]"] pub type R = crate::R<u32, super::CHCFG>; #[doc = "Writer for register CHCFG[%s]"] pub type W = crate::W<u32, super::CHCFG>; #[doc = "Register CHCFG[%s] `reset()`'s with value 0"] impl crate::ResetValue for super::CHCFG { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `SOURCE`"] pub type SOURCE_R = crate::R<u8, u8>; #[doc = "Write proxy for field `SOURCE`"] pub struct SOURCE_W<'a> { w: &'a mut W, } impl<'a> SOURCE_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !0x7f) | ((value as u32) & 0x7f); self.w } } #[doc = "DMA Channel Always Enable\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum A_ON_A { #[doc = "0: DMA Channel Always ON function is disabled"] A_ON_0 = 0, #[doc = "1: DMA Channel Always ON function is enabled"] A_ON_1 = 1, } impl From<A_ON_A> for bool { #[inline(always)] fn from(variant: A_ON_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `A_ON`"] pub type A_ON_R = crate::R<bool, A_ON_A>; impl A_ON_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> A_ON_A { match self.bits { false => A_ON_A::A_ON_0, true => A_ON_A::A_ON_1, } } #[doc = "Checks if the value of the field is `A_ON_0`"] #[inline(always)] pub fn is_a_on_0(&self) -> bool { *self == A_ON_A::A_ON_0 } #[doc = "Checks if the value of the field is `A_ON_1`"] #[inline(always)] pub fn is_a_on_1(&self) -> bool { *self == A_ON_A::A_ON_1 } } #[doc = "Write proxy for field `A_ON`"] pub struct A_ON_W<'a> { w: &'a mut W, } impl<'a> A_ON_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: A_ON_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "DMA Channel Always ON function is disabled"] #[inline(always)] pub fn a_on_0(self) -> &'a mut W { self.variant(A_ON_A::A_ON_0) } #[doc = "DMA Channel Always ON function is enabled"] #[inline(always)] pub fn a_on_1(self) -> &'a mut W { self.variant(A_ON_A::A_ON_1) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 29)) | (((value as u32) & 0x01) << 29); self.w } } #[doc = "DMA Channel Trigger Enable\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum TRIG_A { #[doc = "0: Triggering is disabled. If triggering is disabled and ENBL is set, the DMA Channel will simply route the specified source to the DMA channel. (Normal mode)"] TRIG_0 = 0, #[doc = "1: Triggering is enabled. If triggering is enabled and ENBL is set, the DMA_CH_MUX is in Periodic Trigger mode."] TRIG_1 = 1, } impl From<TRIG_A> for bool { #[inline(always)] fn from(variant: TRIG_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `TRIG`"] pub type TRIG_R = crate::R<bool, TRIG_A>; impl TRIG_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> TRIG_A { match self.bits { false => TRIG_A::TRIG_0, true => TRIG_A::TRIG_1, } } #[doc = "Checks if the value of the field is `TRIG_0`"] #[inline(always)] pub fn is_trig_0(&self) -> bool { *self == TRIG_A::TRIG_0 } #[doc = "Checks if the value of the field is `TRIG_1`"] #[inline(always)] pub fn is_trig_1(&self) -> bool { *self == TRIG_A::TRIG_1 } } #[doc = "Write proxy for field `TRIG`"] pub struct TRIG_W<'a> { w: &'a mut W, } impl<'a> TRIG_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: TRIG_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "Triggering is disabled. If triggering is disabled and ENBL is set, the DMA Channel will simply route the specified source to the DMA channel.
L_0) } #[doc = "DMA Mux channel is enabled"] #[inline(always)] pub fn enbl_1(self) -> &'a mut W { self.variant(ENBL_A::ENBL_1) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 31)) | (((value as u32) & 0x01) << 31); self.w } } impl R { #[doc = "Bits 0:6 - DMA Channel Source (Slot Number)"] #[inline(always)] pub fn source(&self) -> SOURCE_R { SOURCE_R::new((self.bits & 0x7f) as u8) } #[doc = "Bit 29 - DMA Channel Always Enable"] #[inline(always)] pub fn a_on(&self) -> A_ON_R { A_ON_R::new(((self.bits >> 29) & 0x01) != 0) } #[doc = "Bit 30 - DMA Channel Trigger Enable"] #[inline(always)] pub fn trig(&self) -> TRIG_R { TRIG_R::new(((self.bits >> 30) & 0x01) != 0) } #[doc = "Bit 31 - DMA Mux Channel Enable"] #[inline(always)] pub fn enbl(&self) -> ENBL_R { ENBL_R::new(((self.bits >> 31) & 0x01) != 0) } } impl W { #[doc = "Bits 0:6 - DMA Channel Source (Slot Number)"] #[inline(always)] pub fn source(&mut self) -> SOURCE_W { SOURCE_W { w: self } } #[doc = "Bit 29 - DMA Channel Always Enable"] #[inline(always)] pub fn a_on(&mut self) -> A_ON_W { A_ON_W { w: self } } #[doc = "Bit 30 - DMA Channel Trigger Enable"] #[inline(always)] pub fn trig(&mut self) -> TRIG_W { TRIG_W { w: self } } #[doc = "Bit 31 - DMA Mux Channel Enable"] #[inline(always)] pub fn enbl(&mut self) -> ENBL_W { ENBL_W { w: self } } }
(Normal mode)"] #[inline(always)] pub fn trig_0(self) -> &'a mut W { self.variant(TRIG_A::TRIG_0) } #[doc = "Triggering is enabled. If triggering is enabled and ENBL is set, the DMA_CH_MUX is in Periodic Trigger mode."] #[inline(always)] pub fn trig_1(self) -> &'a mut W { self.variant(TRIG_A::TRIG_1) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 30)) | (((value as u32) & 0x01) << 30); self.w } } #[doc = "DMA Mux Channel Enable\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum ENBL_A { #[doc = "0: DMA Mux channel is disabled"] ENBL_0 = 0, #[doc = "1: DMA Mux channel is enabled"] ENBL_1 = 1, } impl From<ENBL_A> for bool { #[inline(always)] fn from(variant: ENBL_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `ENBL`"] pub type ENBL_R = crate::R<bool, ENBL_A>; impl ENBL_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> ENBL_A { match self.bits { false => ENBL_A::ENBL_0, true => ENBL_A::ENBL_1, } } #[doc = "Checks if the value of the field is `ENBL_0`"] #[inline(always)] pub fn is_enbl_0(&self) -> bool { *self == ENBL_A::ENBL_0 } #[doc = "Checks if the value of the field is `ENBL_1`"] #[inline(always)] pub fn is_enbl_1(&self) -> bool { *self == ENBL_A::ENBL_1 } } #[doc = "Write proxy for field `ENBL`"] pub struct ENBL_W<'a> { w: &'a mut W, } impl<'a> ENBL_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: ENBL_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "DMA Mux channel is disabled"] #[inline(always)] pub fn enbl_0(self) -> &'a mut W { self.variant(ENBL_A::ENB
random
[ { "content": "/// Migrate the `lib.rs` of the PAC subscrate, adding\n\n/// our necessary header to the top of the file.\n\nfn write_lib<R: Read>(crate_path: &Path, mut src: R) {\n\n static LIB_PRELUDE: &str = r#\"#![deny(warnings)]\n\n#![allow(non_camel_case_types)]\n\n#![allow(clippy::all)]\n\n#![no_std]\n\...
Rust
all-crate-storage/blob_storage.rs
est31/cargo-local-serve
eabb70eb45ce390d927a781b2a24bbd9101df52f
use std::io::{Read, Write, Seek, SeekFrom, Result as IoResult, ErrorKind}; use std::collections::HashMap; use std::collections::hash_map::Entry; use byteorder::{ReadBytesExt, WriteBytesExt, BigEndian}; use super::hash_ctx::Digest; pub struct BlobStorage<S> { blob_offsets :HashMap<Digest, u64>, pub name_index :HashMap<String, Digest>, pub digest_to_multi_blob :HashMap<Digest, Digest>, storage :S, index_offset :u64, } pub(crate) fn write_delim_byte_slice<W :Write>(mut wtr :W, sl :&[u8]) -> IoResult<()> { try!(wtr.write_u64::<BigEndian>(sl.len() as u64)); try!(wtr.write(sl)); Ok(()) } pub(crate) fn read_delim_byte_slice<R :Read>(mut rdr :R) -> IoResult<Vec<u8>> { let len = try!(rdr.read_u64::<BigEndian>()); let mut res = vec![0; len as usize]; try!(rdr.read_exact(&mut res)); Ok(res) } impl<S :Read + Seek> BlobStorage<S> { pub fn empty(storage :S) -> Self { BlobStorage { name_index : HashMap::new(), digest_to_multi_blob : HashMap::new(), blob_offsets : HashMap::new(), storage, index_offset : 64, } } pub fn new(mut storage :S) -> IoResult<Self> { try!(storage.seek(SeekFrom::Start(0))); match storage.read_u64::<BigEndian>() { Ok(v) if v == BLOB_MAGIC => BlobStorage::load(storage), Ok(_) => panic!("Invalid header"), Err(ref e) if e.kind() == ErrorKind::UnexpectedEof => Ok(BlobStorage::empty(storage)), Err(e) => Err(e), } } pub fn load(mut storage :S) -> IoResult<Self> { try!(storage.seek(SeekFrom::Start(0))); let index_offset = try!(read_hdr(&mut storage)); try!(storage.seek(SeekFrom::Start(index_offset))); let blob_offsets = try!(read_offset_table(&mut storage)); let name_index = try!(read_name_idx(&mut storage)); let digest_to_multi_blob = try!(read_digest_to_multi_blob(&mut storage)); Ok(BlobStorage { blob_offsets, name_index, digest_to_multi_blob, storage, index_offset, }) } pub fn has(&self, digest :&Digest) -> bool { self.blob_offsets.get(digest).is_some() } pub fn get(&mut self, digest :&Digest) -> IoResult<Option<Vec<u8>>> { let blob_offs = match self.blob_offsets.get(digest) { Some(d) => *d, None => return Ok(None), }; try!(self.storage.seek(SeekFrom::Start(blob_offs))); let content = try!(read_delim_byte_slice(&mut self.storage)); Ok(Some(content)) } } impl<S :Seek + Write> BlobStorage<S> { pub fn insert_named_blob(&mut self, name :Option<String>, digest :Digest, content :&[u8]) -> IoResult<()> { if let Some(n) = name { self.name_index.insert(n, digest); } try!(self.insert(digest, &content)); Ok(()) } pub fn insert(&mut self, digest :Digest, content :&[u8]) -> IoResult<bool> { let e = self.blob_offsets.entry(digest); match e { Entry::Occupied(_) => return Ok(false), Entry::Vacant(v) => v.insert(self.index_offset), }; try!(self.storage.seek(SeekFrom::Start(self.index_offset))); try!(write_delim_byte_slice(&mut self.storage, content)); self.index_offset = try!(self.storage.seek(SeekFrom::Current(0))); Ok(true) } pub fn write_header_and_index(&mut self) -> IoResult<()> { try!(self.storage.seek(SeekFrom::Start(0))); try!(write_hdr(&mut self.storage, self.index_offset)); try!(self.storage.seek(SeekFrom::Start(self.index_offset))); try!(write_offset_table(&mut self.storage, &self.blob_offsets)); try!(write_name_idx(&mut self.storage, &self.name_index)); try!(write_digest_to_multi_blob(&mut self.storage, &self.digest_to_multi_blob)); Ok(()) } } const BLOB_MAGIC :u64 = 0x42_4C_4F_42_53_54_52_45; fn read_hdr<R :Read>(mut rdr :R) -> IoResult<u64> { let magic = try!(rdr.read_u64::<BigEndian>()); assert_eq!(magic, BLOB_MAGIC); let index_offset = try!(rdr.read_u64::<BigEndian>()); Ok(index_offset) } fn write_hdr<W :Write>(mut wtr :W, index_offset :u64) -> IoResult<()> { try!(wtr.write_u64::<BigEndian>(BLOB_MAGIC)); try!(wtr.write_u64::<BigEndian>(index_offset)); Ok(()) } fn read_offset_table<R :Read>(mut rdr :R) -> IoResult<HashMap<Digest, u64>> { let len = try!(rdr.read_u64::<BigEndian>()); let mut tbl = HashMap::new(); for _ in 0 .. len { let mut d :Digest = [0; 32]; try!(rdr.read_exact(&mut d)); let offset = try!(rdr.read_u64::<BigEndian>()); tbl.insert(d, offset); } Ok(tbl) } fn write_offset_table<W :Write>(mut wtr :W, tbl :&HashMap<Digest, u64>) -> IoResult<()> { try!(wtr.write_u64::<BigEndian>(tbl.len() as u64)); for (d, o) in tbl.iter() { try!(wtr.write(d)); try!(wtr.write_u64::<BigEndian>(*o)); } Ok(()) } fn read_name_idx<R :Read>(mut rdr :R) -> IoResult<HashMap<String, Digest>> { let nidx_len = try!(rdr.read_u64::<BigEndian>()); let mut nidx = HashMap::new(); for _ in 0 .. nidx_len { let s_bytes = try!(read_delim_byte_slice(&mut rdr)); let mut d :Digest = [0; 32]; try!(rdr.read_exact(&mut d)); let s = String::from_utf8(s_bytes).unwrap(); nidx.insert(s, d); } Ok(nidx) } fn write_name_idx<W :Write>(mut wtr :W, nidx :&HashMap<String, Digest>) -> IoResult<()> { try!(wtr.write_u64::<BigEndian>(nidx.len() as u64)); for (s,d) in nidx.iter() { try!(write_delim_byte_slice(&mut wtr, s.as_bytes())); try!(wtr.write(d)); } Ok(()) } fn read_digest_to_multi_blob<R :Read>(mut rdr :R) -> IoResult<HashMap<Digest, Digest>> { let res_len = try!(rdr.read_u64::<BigEndian>()); let mut res = HashMap::new(); for _ in 0 .. res_len { let mut d :Digest = [0; 32]; let mut d_multi :Digest = [0; 32]; try!(rdr.read_exact(&mut d)); try!(rdr.read_exact(&mut d_multi)); res.insert(d, d_multi); } Ok(res) } fn write_digest_to_multi_blob<W :Write>(mut wtr :W, nidx :&HashMap<Digest, Digest>) -> IoResult<()> { try!(wtr.write_u64::<BigEndian>(nidx.len() as u64)); for (d, d_multi) in nidx.iter() { try!(wtr.write(d)); try!(wtr.write(d_multi)); } Ok(()) }
use std::io::{Read, Write, Seek, SeekFrom, Result as IoResult, ErrorKind}; use std::collections::HashMap; use std::collections::hash_map::Entry; use byteorder::{ReadBytesExt, WriteBytesExt, BigEndian}; use super::hash_ctx::Digest; pub struct BlobStorage<S> { blob_offsets :HashMap<Digest, u64>, pub name_index :HashMap<String, Digest>, pub digest_to_multi_blob :HashMap<Digest, Digest>, storage :S, index_offset :u64, } pub(crate) fn write_delim_byte_slice<W :Write>(mut wtr :W, sl :&[u8]) -> IoResult<()> { try!(wtr.write_u64::<BigEndian>(sl.len() as u64)); try!(wtr.write(sl)); Ok(()) } pub(crate) fn read_delim_byte_slice<R :Read>(mut rdr :R) -> IoResult<Vec<u8>> { let len = try!(rdr.read_u64::<BigEndian>()); let mut res = vec![0; len as usize]; try!(rdr.read_exact(&mut res)); Ok(res) } impl<S :Read + Seek> BlobStorage<S> { pub fn empty(storage :S) -> Self { BlobStorage { name_index : HashMap::new(), digest_to_multi_blob : HashMap::new(), blob_offsets : HashMap::new(), storage, index_offset : 64, } } pub fn new(mut storage :S) -> IoResult<Self> { try!(storage.seek(SeekFrom::Start(0))); match storage.read_u64::<BigEndian>() { Ok(v) if v == BLOB_MAGIC => BlobStorage::load(storage), Ok(_) => panic!("Invalid header"), Err(ref e) if e.kind() == ErrorKind::UnexpectedEof => Ok(BlobStorage::empty(storage)), Err(e) => Err(e), } } pub fn load(mut storage :S) -> IoResult<Self> { try!(storage.seek(SeekFrom::Start(0))); let index_offset = try!(read_hdr(&mut storage)); try!(storage.seek(SeekFrom::Start(index_offset))); let blob_offsets = try!(read_offset_table(&mut storage)); let name_index = try!(read_name_idx(&mut storage)); let digest_to_multi_blob = try!(read_digest_to_multi_blob(&mut storage)); Ok(BlobStorage { blob_offsets, name_index, dig
&mut d)); let s = String::from_utf8(s_bytes).unwrap(); nidx.insert(s, d); } Ok(nidx) } fn write_name_idx<W :Write>(mut wtr :W, nidx :&HashMap<String, Digest>) -> IoResult<()> { try!(wtr.write_u64::<BigEndian>(nidx.len() as u64)); for (s,d) in nidx.iter() { try!(write_delim_byte_slice(&mut wtr, s.as_bytes())); try!(wtr.write(d)); } Ok(()) } fn read_digest_to_multi_blob<R :Read>(mut rdr :R) -> IoResult<HashMap<Digest, Digest>> { let res_len = try!(rdr.read_u64::<BigEndian>()); let mut res = HashMap::new(); for _ in 0 .. res_len { let mut d :Digest = [0; 32]; let mut d_multi :Digest = [0; 32]; try!(rdr.read_exact(&mut d)); try!(rdr.read_exact(&mut d_multi)); res.insert(d, d_multi); } Ok(res) } fn write_digest_to_multi_blob<W :Write>(mut wtr :W, nidx :&HashMap<Digest, Digest>) -> IoResult<()> { try!(wtr.write_u64::<BigEndian>(nidx.len() as u64)); for (d, d_multi) in nidx.iter() { try!(wtr.write(d)); try!(wtr.write(d_multi)); } Ok(()) }
est_to_multi_blob, storage, index_offset, }) } pub fn has(&self, digest :&Digest) -> bool { self.blob_offsets.get(digest).is_some() } pub fn get(&mut self, digest :&Digest) -> IoResult<Option<Vec<u8>>> { let blob_offs = match self.blob_offsets.get(digest) { Some(d) => *d, None => return Ok(None), }; try!(self.storage.seek(SeekFrom::Start(blob_offs))); let content = try!(read_delim_byte_slice(&mut self.storage)); Ok(Some(content)) } } impl<S :Seek + Write> BlobStorage<S> { pub fn insert_named_blob(&mut self, name :Option<String>, digest :Digest, content :&[u8]) -> IoResult<()> { if let Some(n) = name { self.name_index.insert(n, digest); } try!(self.insert(digest, &content)); Ok(()) } pub fn insert(&mut self, digest :Digest, content :&[u8]) -> IoResult<bool> { let e = self.blob_offsets.entry(digest); match e { Entry::Occupied(_) => return Ok(false), Entry::Vacant(v) => v.insert(self.index_offset), }; try!(self.storage.seek(SeekFrom::Start(self.index_offset))); try!(write_delim_byte_slice(&mut self.storage, content)); self.index_offset = try!(self.storage.seek(SeekFrom::Current(0))); Ok(true) } pub fn write_header_and_index(&mut self) -> IoResult<()> { try!(self.storage.seek(SeekFrom::Start(0))); try!(write_hdr(&mut self.storage, self.index_offset)); try!(self.storage.seek(SeekFrom::Start(self.index_offset))); try!(write_offset_table(&mut self.storage, &self.blob_offsets)); try!(write_name_idx(&mut self.storage, &self.name_index)); try!(write_digest_to_multi_blob(&mut self.storage, &self.digest_to_multi_blob)); Ok(()) } } const BLOB_MAGIC :u64 = 0x42_4C_4F_42_53_54_52_45; fn read_hdr<R :Read>(mut rdr :R) -> IoResult<u64> { let magic = try!(rdr.read_u64::<BigEndian>()); assert_eq!(magic, BLOB_MAGIC); let index_offset = try!(rdr.read_u64::<BigEndian>()); Ok(index_offset) } fn write_hdr<W :Write>(mut wtr :W, index_offset :u64) -> IoResult<()> { try!(wtr.write_u64::<BigEndian>(BLOB_MAGIC)); try!(wtr.write_u64::<BigEndian>(index_offset)); Ok(()) } fn read_offset_table<R :Read>(mut rdr :R) -> IoResult<HashMap<Digest, u64>> { let len = try!(rdr.read_u64::<BigEndian>()); let mut tbl = HashMap::new(); for _ in 0 .. len { let mut d :Digest = [0; 32]; try!(rdr.read_exact(&mut d)); let offset = try!(rdr.read_u64::<BigEndian>()); tbl.insert(d, offset); } Ok(tbl) } fn write_offset_table<W :Write>(mut wtr :W, tbl :&HashMap<Digest, u64>) -> IoResult<()> { try!(wtr.write_u64::<BigEndian>(tbl.len() as u64)); for (d, o) in tbl.iter() { try!(wtr.write(d)); try!(wtr.write_u64::<BigEndian>(*o)); } Ok(()) } fn read_name_idx<R :Read>(mut rdr :R) -> IoResult<HashMap<String, Digest>> { let nidx_len = try!(rdr.read_u64::<BigEndian>()); let mut nidx = HashMap::new(); for _ in 0 .. nidx_len { let s_bytes = try!(read_delim_byte_slice(&mut rdr)); let mut d :Digest = [0; 32]; try!(rdr.read_exact(
random
[ { "content": "fn handle_blocking_task<ET :FnMut(ParallelTask), S :Read + Seek + Write>(task :BlockingTask,\n\n\t\tblob_store :&mut BlobStorage<S>, blobs_to_store :&mut HashSet<Digest>,\n\n\t\tmut emit_task :ET) {\n\n\tmatch task {\n\n\t\tBlockingTask::StoreCrateUndeduplicated(crate_file_name, crate_blob) => {\n...
Rust
chain/cosmos/src/adapter.rs
Perpetual-Altruism-Ltd/graph-node
abbb2d04713d9e988419814d2e6ca433ee165bd1
use std::collections::HashSet; use prost::Message; use prost_types::Any; use crate::capabilities::NodeCapabilities; use crate::{data_source::DataSource, Chain}; use graph::blockchain as bc; use graph::firehose::EventTypeFilter; use graph::prelude::*; const EVENT_TYPE_FILTER_TYPE_URL: &str = "type.googleapis.com/sf.cosmos.transform.v1.EventTypeFilter"; #[derive(Clone, Debug, Default)] pub struct TriggerFilter { pub(crate) event_type_filter: CosmosEventTypeFilter, pub(crate) block_filter: CosmosBlockFilter, } impl bc::TriggerFilter<Chain> for TriggerFilter { fn extend<'a>(&mut self, data_sources: impl Iterator<Item = &'a DataSource> + Clone) { self.event_type_filter .extend_from_data_sources(data_sources.clone()); self.block_filter.extend_from_data_sources(data_sources); } fn node_capabilities(&self) -> NodeCapabilities { NodeCapabilities {} } fn extend_with_template( &mut self, _data_source: impl Iterator<Item = <Chain as bc::Blockchain>::DataSourceTemplate>, ) { } fn to_firehose_filter(self) -> Vec<prost_types::Any> { if self.block_filter.trigger_every_block { return vec![]; } if self.event_type_filter.event_types.is_empty() { return vec![]; } let filter = EventTypeFilter { event_types: Vec::from_iter(self.event_type_filter.event_types), }; vec![Any { type_url: EVENT_TYPE_FILTER_TYPE_URL.to_string(), value: filter.encode_to_vec(), }] } } pub type EventType = String; #[derive(Clone, Debug, Default)] pub(crate) struct CosmosEventTypeFilter { pub event_types: HashSet<EventType>, } impl CosmosEventTypeFilter { pub(crate) fn matches(&self, event_type: &EventType) -> bool { self.event_types.contains(event_type) } fn extend_from_data_sources<'a>(&mut self, data_sources: impl Iterator<Item = &'a DataSource>) { self.event_types.extend( data_sources.flat_map(|data_source| data_source.events().map(ToString::to_string)), ); } } #[derive(Clone, Debug, Default)] pub(crate) struct CosmosBlockFilter { pub trigger_every_block: bool, } impl CosmosBlockFilter { fn extend_from_data_sources<'a>( &mut self, mut data_sources: impl Iterator<Item = &'a DataSource>, ) { if !self.trigger_every_block { self.trigger_every_block = data_sources.any(DataSource::has_block_handler); } } } #[cfg(test)] mod test { use graph::blockchain::TriggerFilter as _; use super::*; #[test] fn test_trigger_filters() { let cases = [ (TriggerFilter::test_new(false, &[]), None), (TriggerFilter::test_new(true, &[]), None), (TriggerFilter::test_new(true, &["event_1", "event_2"]), None), ( TriggerFilter::test_new(false, &["event_1", "event_2", "event_3"]), Some(event_type_filter_with(&["event_1", "event_2", "event_3"])), ), ]; for (trigger_filter, expected_filter) in cases { let firehose_filter = trigger_filter.to_firehose_filter(); let decoded_filter = decode_filter(firehose_filter); assert_eq!(decoded_filter.is_some(), expected_filter.is_some()); if let (Some(mut expected_filter), Some(mut decoded_filter)) = (expected_filter, decoded_filter) { expected_filter.event_types.sort(); decoded_filter.event_types.sort(); assert_eq!(decoded_filter, expected_filter); } } } impl TriggerFilter { pub(crate) fn test_new(trigger_every_block: bool, event_types: &[&str]) -> TriggerFilter { TriggerFilter { event_type_filter: CosmosEventTypeFilter { event_types: event_types.iter().map(ToString::to_string).collect(), }, block_filter: CosmosBlockFilter { trigger_every_block, }, } } } fn event_type_filter_with(event_types: &[&str]) -> EventTypeFilter { EventTypeFilter { event_types: event_types.iter().map(ToString::to_string).collect(), } } fn decode_filter(proto_filters: Vec<Any>) -> Option<EventTypeFilter> { assert!(proto_filters.len() <= 1); let proto_filter = proto_filters.get(0)?; assert_eq!(proto_filter.type_url, EVENT_TYPE_FILTER_TYPE_URL); let firehose_filter = EventTypeFilter::decode(&*proto_filter.value) .expect("Could not decode EventTypeFilter from protobuf Any"); Some(firehose_filter) } }
use std::collections::HashSet; use prost::Message; use prost_types::Any; use crate::capabilities::NodeCapabilities; use crate::{data_source::DataSource, Chain}; use graph::blockchain as bc; use graph::firehose::EventTypeFilter; use graph::prelude::*; const EVENT_TYPE_FILTER_TYPE_URL: &str = "type.googleapis.com/sf.cosmos.transform.v1.EventTypeFilter"; #[derive(Clone, Debug, Default)] pub struct TriggerFilter { pub(crate) event_type_filter: CosmosEventTypeFilter, pub(crate) block_filter: CosmosBlockFilter, } impl bc::TriggerFilter<Chain> for TriggerFilter { fn extend<'a>(&mut self, data_sources: impl Iterator<Item = &'a DataSource> + Clone) { self.event_type_filter .extend_from_data_sources(data_sources.clone()); self.block_filter.extend_from_data_sources(data_sources); } fn node_capabilities(&self) -> NodeCapabilities { NodeCapabilities {} } fn extend_with_template( &mut self, _data_source: impl Iterator<Item = <Chain as bc::Blockchain>::DataSourceTemplate>, ) { } fn to_firehose_filter(self) -> Vec<prost_types::Any> { if self.block_filter.trigger_every_block { return vec![]; } if self.event_type_filter.event_types.is_empty() { return vec![]; } let filter = EventTypeFilter { event_types: Vec::from_iter(self.event_type_filter.event_types), }; vec![Any { type_url: EVENT_TYPE_FILTER_TYPE_URL.to_string(), value: filter.encode_to_vec(), }] } } pub type EventType = String; #[derive(Clone, Debug, Default)] pub(crate) struct CosmosEventTypeFilter { pub event_types: HashSet<EventType>, } impl CosmosEventTypeFilter { pub(crate) fn matches(&self, event_type: &EventType) -> bool { self.event_types.contains(event_type) } fn extend_from_data_sources<'a>(&mut self, data_sources: impl Iterator<Item = &'a DataSource>) { self.event_types.extend( data_sources.flat_map(|data_source| data_source.events().map(ToString::to_string)), ); } } #[derive(Clone, Debug, Default)] pub(crate) struct CosmosBlockFilter { pub trigger_every_block: bool, } impl CosmosBlockFilter { fn extend_from_data_sources<'a>( &mut self, mut data_sources: impl Iterator<Item = &'a DataSource>, ) { if !self.trigger_every_block { self.trigger_every_block = data_sources.any(DataSource::has_block_handler); } } } #[cfg(test)] mod test { use graph::blockchain::TriggerFilter as _; use super::*; #[test] fn test_trigger_filters() { let cases = [ (TriggerFilter::test_new(false, &[]), None), (TriggerFilter::test_new(true, &[]), None),
vent_types: event_types.iter().map(ToString::to_string).collect(), }, block_filter: CosmosBlockFilter { trigger_every_block, }, } } } fn event_type_filter_with(event_types: &[&str]) -> EventTypeFilter { EventTypeFilter { event_types: event_types.iter().map(ToString::to_string).collect(), } } fn decode_filter(proto_filters: Vec<Any>) -> Option<EventTypeFilter> { assert!(proto_filters.len() <= 1); let proto_filter = proto_filters.get(0)?; assert_eq!(proto_filter.type_url, EVENT_TYPE_FILTER_TYPE_URL); let firehose_filter = EventTypeFilter::decode(&*proto_filter.value) .expect("Could not decode EventTypeFilter from protobuf Any"); Some(firehose_filter) } }
(TriggerFilter::test_new(true, &["event_1", "event_2"]), None), ( TriggerFilter::test_new(false, &["event_1", "event_2", "event_3"]), Some(event_type_filter_with(&["event_1", "event_2", "event_3"])), ), ]; for (trigger_filter, expected_filter) in cases { let firehose_filter = trigger_filter.to_firehose_filter(); let decoded_filter = decode_filter(firehose_filter); assert_eq!(decoded_filter.is_some(), expected_filter.is_some()); if let (Some(mut expected_filter), Some(mut decoded_filter)) = (expected_filter, decoded_filter) { expected_filter.event_types.sort(); decoded_filter.event_types.sort(); assert_eq!(decoded_filter, expected_filter); } } } impl TriggerFilter { pub(crate) fn test_new(trigger_every_block: bool, event_types: &[&str]) -> TriggerFilter { TriggerFilter { event_type_filter: CosmosEventTypeFilter { e
random
[ { "content": "pub fn place(name: &str) -> Result<Option<(Vec<Shard>, Vec<NodeId>)>, String> {\n\n CONFIG.deployment.place(name, NETWORK_NAME)\n\n}\n\n\n\npub async fn create_subgraph(\n\n subgraph_id: &DeploymentHash,\n\n schema: &str,\n\n base: Option<(DeploymentHash, BlockPtr)>,\n\n) -> Result<Dep...
Rust
sudachi/src/dic/build/conn.rs
bignumorg/sudachi.rs
df9997ed6b95af8dc9f9cc77c60c359b78a7105b
/* * Copyright (c) 2021 Works Applications Co., Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ use std::fs::File; use std::io::{BufReader, Write}; use std::path::Path; use lazy_static::lazy_static; use regex::Regex; use crate::dic::build::error::{BuildFailure, DicBuildError, DicCompilationCtx, DicWriteResult}; use crate::dic::build::parse::{it_next, parse_i16}; use crate::error::SudachiResult; pub struct ConnBuffer { matrix: Vec<u8>, ctx: DicCompilationCtx, line: String, num_left: i16, num_right: i16, } lazy_static! { static ref SPLIT_REGEX: Regex = Regex::new(r"\s+").unwrap(); static ref EMPTY_LINE: Regex = Regex::new(r"^\s*$").unwrap(); } impl ConnBuffer { pub fn new() -> Self { Self { matrix: Vec::new(), ctx: DicCompilationCtx::default(), line: String::new(), num_left: 0, num_right: 0, } } #[allow(unused)] pub fn matrix(&self) -> &[u8] { &self.matrix } #[allow(unused)] pub fn left(&self) -> i16 { self.num_left } #[allow(unused)] pub fn right(&self) -> i16 { self.num_right } pub fn write_to<W: Write>(&self, writer: &mut W) -> SudachiResult<usize> { if self.num_left < 0 { return num_error("left", self.num_left); } if self.num_right < 0 { return num_error("right", self.num_right); } writer.write_all(&i16::to_le_bytes(self.num_left))?; writer.write_all(&i16::to_le_bytes(self.num_right))?; writer.write_all(&self.matrix)?; Ok(4 + self.matrix.len()) } pub fn read_file(&mut self, path: &Path) -> SudachiResult<()> { let file = File::open(path)?; let bufrd = BufReader::with_capacity(32 * 1024, file); let filename = path.to_str().unwrap_or("unknown").to_owned(); let old = self.ctx.set_filename(filename); let status = self.read(bufrd); self.ctx.set_filename(old); status } pub fn read<R: std::io::BufRead>(&mut self, mut reader: R) -> SudachiResult<()> { self.ctx.set_line(0); loop { let nread = reader.read_line(&mut self.line)?; if nread == 0 { todo!() } self.ctx.add_line(1); if !EMPTY_LINE.is_match(&self.line) { break; } } let result = self.parse_header(); let (left, right) = self.ctx.transform(result)?; if left < 0 { return num_error("left", left); } if right < 0 { return num_error("right", right); } let size = left as usize * right as usize * 2; self.matrix.resize(size, 0); self.num_left = left; self.num_right = right; loop { self.line.clear(); let nread = reader.read_line(&mut self.line)?; if nread == 0 { break; } self.ctx.add_line(1); if EMPTY_LINE.is_match(&self.line) { continue; } let status = self.parse_line(); self.ctx.transform(status)?; } Ok(()) } fn parse_header(&mut self) -> DicWriteResult<(i16, i16)> { let mut items = SPLIT_REGEX.splitn(&self.line.trim(), 2); let left = it_next(&self.line, &mut items, "left_num", parse_i16)?; let right = it_next(&self.line, &mut items, "right_num", parse_i16)?; Ok((left, right)) } fn parse_line(&mut self) -> DicWriteResult<()> { let mut items = SPLIT_REGEX.splitn(&self.line.trim(), 3); let left = it_next(&self.line, &mut items, "left", parse_i16)?; let right = it_next(&self.line, &mut items, "right", parse_i16)?; let cost = it_next(&self.line, &mut items, "cost", parse_i16)?; self.write_elem(left, right, cost) } fn write_elem(&mut self, left: i16, right: i16, cost: i16) -> DicWriteResult<()> { let index = right as usize * self.num_left as usize + left as usize; let index = index * 2; let bytes = cost.to_le_bytes(); self.matrix[index] = bytes[0]; self.matrix[index + 1] = bytes[1]; Ok(()) } } fn num_error<T>(part: &'static str, value: i16) -> SudachiResult<T> { return Err(DicBuildError { file: "<connection>".to_owned(), line: 0, cause: BuildFailure::InvalidConnSize(part, value), } .into()); } #[cfg(test)] mod test { use crate::dic::build::conn::ConnBuffer; use crate::dic::connect::ConnectionMatrix; #[test] fn parse_simple2x2() { let data = " 2 2 0 0 0 0 1 1 1 0 2 1 1 3"; let mut parser = ConnBuffer::new(); parser.read(data.as_bytes()).unwrap(); let cost = ConnectionMatrix::from_offset_size( parser.matrix(), 0, parser.left() as _, parser.right() as _, ) .unwrap(); assert_eq!(cost.cost(0, 0), 0); assert_eq!(cost.cost(0, 1), 1); assert_eq!(cost.cost(1, 0), 2); assert_eq!(cost.cost(1, 1), 3); } }
/* * Copyright (c) 2021 Works Applications Co., Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ use std::fs::File; use std::io::{BufReader, Write}; use std::path::Path; use lazy_static::lazy_static; use regex::Regex; use crate::dic::build::error::{BuildFailure, DicBuildError, DicCompilationCtx, DicWriteResult}; use crate::dic::build::parse::{it_next, parse_i16}; use crate::error::SudachiResult; pub struct ConnBuffer { matrix: Vec<u8>, ctx: DicCompilationCtx, line: String, num_left: i16, num_right: i16, } lazy_static! { static ref SPLIT_REGEX: Regex = Regex::new(r"\s+").unwrap(); static ref EMPTY_LINE: Regex = Regex::new(r"^\s*$").unwrap(); } impl ConnBuffer { pub fn new() -> Self { Self { matrix: Vec::new(), ctx: DicCompilationCtx::default(), line: String::new(), num_left: 0, num_right: 0, } } #[allow(unused)] pub fn matrix(&self) -> &[u8] { &self.matrix } #[allow(unused)] pub fn left(&self) -> i16 { self.num_left } #[allow(unused)] pub fn right(&self) -> i16 { self.num_right } pub fn write_to<W: Write>(&self, writer: &mut W) -> SudachiResult<usize> { if self.num_left < 0 { return num_error("left", self.num_left); } if self.num_right < 0 { return num_error("right", self.num_right); } writer.write_all(&i16::to_le_bytes(self.num_left))?; writer.write_all(&i16::to_le_bytes(self.num_right))?; writer.write_all(&self.matrix)?; Ok(4 + self.matrix.len()) } pub fn read_file(&mut self, path: &Path) -> SudachiResult<()> { let file = File::open(path)?; let bufrd = BufReader::with_capacity(32 * 1024, file); let filename = path.to_str().unwrap_or("unknown").to_owned(); let old = self.ctx.set_filename(filename); let status = self.read(bufrd); self.ctx.set_filename(old); status } pub fn read<R: std::io::BufRead>(&mut self, mut reader: R) -> SudachiResult<()> { self.ctx.set_line(0); loop { let nread = reader.read_line(&mut self.line)?; if nread == 0 { todo!() } self.ctx.add_line(1); if !EMPTY_LINE.is_match(&self.line) { break; } } let result = self.parse_header(); let (left, right) = self.ctx.transform(result)?; if left < 0 { return num_error("left", left); } if right < 0 { return num_error("right", right); } let size = left as usize * right as usize * 2; self.matrix.resize(size, 0); self.num_left = left; self.num_right = right; loop { self.line.clear(); let nread = reader.read_line(&mut self.line)?; if nread == 0 { break; } self.ctx.add_line(1); if EMPTY_LINE.is_match(&self.line) { continue; } let status = self.parse_line(); self.ctx.transform(status)?; } Ok(()) } fn parse_header(&mut self) -> DicWriteResult<(i16, i16)> { let mut items = SPLIT_REGEX.splitn(&self.line.trim(), 2); let left = it_next(&self.line, &mut items, "left_num", parse_i16)?; let right = it_next(&self.line, &mut items, "right_num", parse_i16)?; Ok((left, right)) } fn parse_line(&mut self) -> DicWriteResult<()> { let mut items = SPLIT_REGEX.splitn(&self.line.trim(), 3); let left = it_next(&self.line, &mut items, "left", parse_i16)?; let right = it_next(&self.line, &mut items, "right", parse_i16)?; let cost = it_next(&self.line, &mut items, "cost", parse_i16)?; self.write_elem(left, right, cost) } fn write_elem(&mut self, left: i16, right: i16, cost: i16) -> DicWriteResult<()> { le
self.matrix[index] = bytes[0]; self.matrix[index + 1] = bytes[1]; Ok(()) } } fn num_error<T>(part: &'static str, value: i16) -> SudachiResult<T> { return Err(DicBuildError { file: "<connection>".to_owned(), line: 0, cause: BuildFailure::InvalidConnSize(part, value), } .into()); } #[cfg(test)] mod test { use crate::dic::build::conn::ConnBuffer; use crate::dic::connect::ConnectionMatrix; #[test] fn parse_simple2x2() { let data = " 2 2 0 0 0 0 1 1 1 0 2 1 1 3"; let mut parser = ConnBuffer::new(); parser.read(data.as_bytes()).unwrap(); let cost = ConnectionMatrix::from_offset_size( parser.matrix(), 0, parser.left() as _, parser.right() as _, ) .unwrap(); assert_eq!(cost.cost(0, 0), 0); assert_eq!(cost.cost(0, 1), 1); assert_eq!(cost.cost(1, 0), 2); assert_eq!(cost.cost(1, 1), 3); } }
t index = right as usize * self.num_left as usize + left as usize; let index = index * 2; let bytes = cost.to_le_bytes();
function_block-random_span
[ { "content": "pub fn dictionary_bytes_from_path<P: AsRef<Path>>(dictionary_path: P) -> SudachiResult<Vec<u8>> {\n\n let dictionary_path = dictionary_path.as_ref();\n\n let dictionary_stat = fs::metadata(&dictionary_path)?;\n\n let mut dictionary_file = File::open(dictionary_path)?;\n\n let mut dicti...
Rust
miri/bin/miri.rs
oli-obk/miri
4654d6d04e2b2bf1fc72f6fe5a3c4353c560d2c6
#![feature(rustc_private, i128_type)] extern crate getopts; extern crate miri; extern crate rustc; extern crate rustc_driver; extern crate rustc_errors; extern crate env_logger; extern crate log_settings; extern crate syntax; extern crate log; use rustc::session::Session; use rustc::middle::cstore::CrateStore; use rustc_driver::{Compilation, CompilerCalls, RustcDefaultCalls}; use rustc_driver::driver::{CompileState, CompileController}; use rustc::session::config::{self, Input, ErrorOutputType}; use rustc::hir::{self, itemlikevisit}; use rustc::ty::TyCtxt; use syntax::ast::{self, MetaItemKind, NestedMetaItemKind}; use std::path::PathBuf; struct MiriCompilerCalls { default: RustcDefaultCalls, } impl<'a> CompilerCalls<'a> for MiriCompilerCalls { fn early_callback( &mut self, matches: &getopts::Matches, sopts: &config::Options, cfg: &ast::CrateConfig, descriptions: &rustc_errors::registry::Registry, output: ErrorOutputType, ) -> Compilation { self.default.early_callback( matches, sopts, cfg, descriptions, output, ) } fn no_input( &mut self, matches: &getopts::Matches, sopts: &config::Options, cfg: &ast::CrateConfig, odir: &Option<PathBuf>, ofile: &Option<PathBuf>, descriptions: &rustc_errors::registry::Registry, ) -> Option<(Input, Option<PathBuf>)> { self.default.no_input( matches, sopts, cfg, odir, ofile, descriptions, ) } fn late_callback( &mut self, matches: &getopts::Matches, sess: &Session, cstore: &CrateStore, input: &Input, odir: &Option<PathBuf>, ofile: &Option<PathBuf>, ) -> Compilation { self.default.late_callback(matches, sess, cstore, input, odir, ofile) } fn build_controller( &mut self, sess: &Session, matches: &getopts::Matches, ) -> CompileController<'a> { let mut control = self.default.build_controller(sess, matches); control.after_hir_lowering.callback = Box::new(after_hir_lowering); control.after_analysis.callback = Box::new(after_analysis); if sess.target.target != sess.host { control.after_analysis.stop = Compilation::Stop; } control } } fn after_hir_lowering(state: &mut CompileState) { let attr = ( String::from("miri"), syntax::feature_gate::AttributeType::Whitelisted, ); state.session.plugin_attributes.borrow_mut().push(attr); } fn after_analysis<'a, 'tcx>(state: &mut CompileState<'a, 'tcx>) { state.session.abort_if_errors(); let tcx = state.tcx.unwrap(); let limits = resource_limits_from_attributes(state); if std::env::args().any(|arg| arg == "--test") { struct Visitor<'a, 'tcx: 'a>( miri::ResourceLimits, TyCtxt<'a, 'tcx, 'tcx>, &'a CompileState<'a, 'tcx> ); impl<'a, 'tcx: 'a, 'hir> itemlikevisit::ItemLikeVisitor<'hir> for Visitor<'a, 'tcx> { fn visit_item(&mut self, i: &'hir hir::Item) { if let hir::Item_::ItemFn(_, _, _, _, _, body_id) = i.node { if i.attrs.iter().any(|attr| { attr.name().map_or(false, |n| n == "test") }) { let did = self.1.hir.body_owner_def_id(body_id); println!( "running test: {}", self.1.def_path_debug_str(did), ); miri::eval_main(self.1, did, None, self.0); self.2.session.abort_if_errors(); } } } fn visit_trait_item(&mut self, _trait_item: &'hir hir::TraitItem) {} fn visit_impl_item(&mut self, _impl_item: &'hir hir::ImplItem) {} } state.hir_crate.unwrap().visit_all_item_likes( &mut Visitor(limits, tcx, state), ); } else if let Some((entry_node_id, _)) = *state.session.entry_fn.borrow() { let entry_def_id = tcx.hir.local_def_id(entry_node_id); let start_wrapper = tcx.lang_items().start_fn().and_then(|start_fn| { if tcx.is_mir_available(start_fn) { Some(start_fn) } else { None } }); miri::eval_main(tcx, entry_def_id, start_wrapper, limits); state.session.abort_if_errors(); } else { println!("no main function found, assuming auxiliary build"); } } fn resource_limits_from_attributes(state: &CompileState) -> miri::ResourceLimits { let mut limits = miri::ResourceLimits::default(); let krate = state.hir_crate.as_ref().unwrap(); let err_msg = "miri attributes need to be in the form `miri(key = value)`"; let extract_int = |lit: &syntax::ast::Lit| -> u128 { match lit.node { syntax::ast::LitKind::Int(i, _) => i, _ => { state.session.span_fatal( lit.span, "expected an integer literal", ) } } }; for attr in krate.attrs.iter().filter(|a| { a.name().map_or(false, |n| n == "miri") }) { if let Some(items) = attr.meta_item_list() { for item in items { if let NestedMetaItemKind::MetaItem(ref inner) = item.node { if let MetaItemKind::NameValue(ref value) = inner.node { match &inner.name().as_str()[..] { "memory_size" => limits.memory_size = extract_int(value) as u64, "step_limit" => limits.step_limit = extract_int(value) as u64, "stack_limit" => limits.stack_limit = extract_int(value) as usize, _ => state.session.span_err(item.span, "unknown miri attribute"), } } else { state.session.span_err(inner.span, err_msg); } } else { state.session.span_err(item.span, err_msg); } } } else { state.session.span_err(attr.span, err_msg); } } limits } fn init_logger() { let format = |record: &log::LogRecord| { if record.level() == log::LogLevel::Trace { let indentation = log_settings::settings().indentation; format!( "{indentation}:{lvl}:{module}: {text}", lvl = record.level(), module = record.location().module_path(), indentation = indentation, text = record.args(), ) } else { format!( "{lvl}:{module}: {text}", lvl = record.level(), module = record.location().module_path(), text = record.args(), ) } }; let mut builder = env_logger::LogBuilder::new(); builder.format(format).filter( None, log::LogLevelFilter::Info, ); if std::env::var("MIRI_LOG").is_ok() { builder.parse(&std::env::var("MIRI_LOG").unwrap()); } builder.init().unwrap(); } fn find_sysroot() -> String { if let Ok(sysroot) = std::env::var("MIRI_SYSROOT") { return sysroot; } let home = option_env!("RUSTUP_HOME").or(option_env!("MULTIRUST_HOME")); let toolchain = option_env!("RUSTUP_TOOLCHAIN").or(option_env!("MULTIRUST_TOOLCHAIN")); match (home, toolchain) { (Some(home), Some(toolchain)) => format!("{}/toolchains/{}", home, toolchain), _ => { option_env!("RUST_SYSROOT") .expect( "need to specify RUST_SYSROOT env var or use rustup or multirust", ) .to_owned() } } } fn main() { init_logger(); let mut args: Vec<String> = std::env::args().collect(); let sysroot_flag = String::from("--sysroot"); if !args.contains(&sysroot_flag) { args.push(sysroot_flag); args.push(find_sysroot()); } args.push("-Zalways-encode-mir".to_owned()); rustc_driver::run_compiler(&args, &mut MiriCompilerCalls { default: RustcDefaultCalls, }, None, None); }
#![feature(rustc_private, i128_type)] extern crate getopts; extern crate miri; extern crate rustc; extern crate rustc_driver; extern crate rustc_errors; extern crate env_logger; extern crate log_settings; extern crate syntax; extern crate log; use rustc::session::Session; use rustc::middle::cstore::CrateStore; use rustc_driver::{Compilation, CompilerCalls, RustcDefaultCalls}; use rustc_driver::driver::{CompileState, CompileController}; use rustc::session::config::{self, Input, ErrorOutputType}; use rustc::hir::{self, itemlikevisit}; use rustc::ty::TyCtxt; use syntax::ast::{self, MetaItemKind, NestedMetaItemKind}; use std::path::PathBuf; struct MiriCompilerCalls { default: RustcDefaultCalls, } impl<'a> CompilerCalls<'a> for MiriCompilerCalls { fn early_callback( &mut self, matches: &getopts::Matches, sopts: &config::Options, cfg: &ast::CrateConfig, descriptions: &rustc_errors::registry::Registry, output: ErrorOutputType, ) -> Compilation { self.default.early_callback( matches, sopts, cfg, descriptions, output, ) } fn no_input( &mut self, matches: &getopts::Matches, sopts: &config::Options, cfg: &ast::CrateConfig, odir: &Option<PathBuf>, ofile: &Option<PathBuf>, descriptions: &rustc_errors::registry::Registry, ) -> Option<(Input, Option<PathBuf>)> { self.default.no_input( matches, sopts, cfg, odir, ofile, descriptions, ) } fn late_callback( &mut self, matches: &getopts::Matches, sess: &Session, cstore: &CrateStore, input: &Input, odir: &Option<PathBuf>, ofile: &Option<PathBuf>, ) -> Compilation { self.default.late_callback(matches, sess, cstore, input, odir, ofile) } fn build_controller( &mut self, sess: &Session, matches: &getopts::Matches, ) -> CompileController<'a> { let mut control = self.default.build_controller(sess, matches); control.after_hir_lowering.callback = Box::new(after_hir_lowering); control.after_analysis.callback = Box::new(after_analysis); if sess.target.target != sess.host { control.after_analysis.stop = Compilation::Stop; } control } } fn after_hir_lowering(state: &mut CompileState) { let attr = ( String::from("miri"), syntax::feature_gate::AttributeType::Whitelisted, ); state.session.plugin_attributes.borrow_mut().push(attr); } fn after_analysis<'a, 'tcx>(state: &mut CompileState<'a, 'tcx>) { state.session.abort_if_errors(); let tcx = state.tcx.unwrap(); let limits = resource_limits_from_attributes(state); if std::env::args().any(|arg| arg == "--test") { struct Visitor<'a, 'tcx: 'a>( miri::ResourceLimits, TyCtxt<'a, 'tcx, 'tcx>, &'a CompileState<'a, 'tcx> ); impl<'a, 'tcx: 'a, 'hir> itemlikevisit::ItemLikeVisitor<'hir> for Visitor<'a, 'tcx> { fn visit_item(&mut self, i: &'hir hir::Item) { if let hir::Item_::ItemFn(_, _, _, _, _, body_id) = i.node { if i.attrs.iter().any(|attr| { attr.name().map_or(false, |n| n == "test") }) { let did = self.1.hir.body_owner_def_id(body_id); println!( "running test: {}", self.1.def_path_debug_str(did), ); miri::eval_main(self.1, did, None, self.0); self.2.session.abort_if_errors(); } } } fn visit_trait_item(&mut self, _trait_item: &'hir hir::TraitItem) {} fn visit_impl_item(&mut self, _impl_item: &'hir hir::ImplItem) {} } state.hir_crate.unwrap().visit_all_item_likes( &mut Visitor(limits, tcx, state), ); } else if let Some((entry_node_id, _)) = *state.session.entry_fn.borrow() { let entry_def_id = tcx.hir.local_def_id(entry_node_id); let start_wrapper = tcx.lang_items().start_fn().and_then(|start_fn| { if tcx.is_mir_available(start_fn) { Some(start_fn) } else { None } }); miri::eval_main(tcx, entry_def_id, start_wrapper, limits); state.session.abort_if_errors(); } else { println!("no main function found, assuming auxiliary build"); } } fn resource_limits_from_attributes(state: &CompileState) -> miri::ResourceLimits { let mut limits = miri::ResourceLimits::default(); let krate = state.hir_crate.as_ref().unwrap(); let err_msg = "miri attributes need to be in the form `miri(key = value)`"; let extract_int = |lit: &syntax::ast::Lit| -> u128 { match lit.node { syntax::ast::LitKind::Int(i, _) => i, _ => { state.session.span_fatal( lit.span, "expected an integer literal", ) } } }; for attr in krate.attrs.iter().filter(|a| { a.name().map_or(false, |n| n == "miri") }) { if let Some(items) = attr.meta_item_list() { for item in items { if let NestedMetaItemKind::MetaItem(ref inner) = item.node { if let MetaItemKind::NameValue(ref value) = inner.node { match &inner.name().as_str()[..] { "memory_size" => limits.memory_size = extract_int(value) as u64, "step_limit" => limits.step_limit = extract_int(value) as u64, "stack_limit" => limits.stack_limit = extract_int(value) as usize, _ => state.session.span_err(item.span, "unknown miri attribute"), } } else { state.session.span_err(inner.span, err_msg); } } else { state.session.span_err(item.span, err_msg); } } } else { state.session.span_err(attr.span, err_msg); } } limits } fn init_logger() { let format = |record: &log::LogRecord| { if record.level() == log::LogLevel::Trace { let indentation = log_settings::settings().indentation; format!( "{indentation}:{lvl}:{module}: {text}", lvl = record.level(), module = record.location().module_path(), indentation = indentation, text = record.args(), ) } else { format!( "{lvl}:{module}: {text}", lvl = record.level(), module = record.location().module_path(), text = record.args(), ) } }; let mut builder = env_logger::LogBuilder::new(); builder.format(format).filter( None, log::LogLevelFilter::Info, ); if std::env::var("MIRI_LOG").is_ok() { builder.parse(&std::env::var("MIRI_LOG").unwrap()); } builder.init().unwrap(); } fn find_sysroot() -> String { if let Ok(sysroot) = std::env::var("MIRI_SYSRO
up or multirust", ) .to_owned() } } } fn main() { init_logger(); let mut args: Vec<String> = std::env::args().collect(); let sysroot_flag = String::from("--sysroot"); if !args.contains(&sysroot_flag) { args.push(sysroot_flag); args.push(find_sysroot()); } args.push("-Zalways-encode-mir".to_owned()); rustc_driver::run_compiler(&args, &mut MiriCompilerCalls { default: RustcDefaultCalls, }, None, None); }
OT") { return sysroot; } let home = option_env!("RUSTUP_HOME").or(option_env!("MULTIRUST_HOME")); let toolchain = option_env!("RUSTUP_TOOLCHAIN").or(option_env!("MULTIRUST_TOOLCHAIN")); match (home, toolchain) { (Some(home), Some(toolchain)) => format!("{}/toolchains/{}", home, toolchain), _ => { option_env!("RUST_SYSROOT") .expect( "need to specify RUST_SYSROOT env var or use rust
function_block-random_span
[ { "content": "fn after_analysis<'a, 'tcx>(state: &mut CompileState<'a, 'tcx>) {\n\n state.session.abort_if_errors();\n\n\n\n let tcx = state.tcx.unwrap();\n\n let limits = Default::default();\n\n\n\n if std::env::args().any(|arg| arg == \"--test\") {\n\n struct Visitor<'a, 'tcx: 'a>(miri::Res...
Rust
src/profiles.rs
rafamatias/rogcat
d90d1e02784b965c49e6ea67815e60b5c11a0e76
use clap::ArgMatches; use failure::{err_msg, Error}; use std::collections::HashMap; use std::convert::Into; use std::env::var; use std::fs::File; use std::io::prelude::*; use std::ops::AddAssign; use std::path::PathBuf; use toml::{from_str, to_string}; const EXTEND_LIMIT: u32 = 1000; #[derive(Clone, Debug, Default, Deserialize, Serialize)] pub struct ProfileFile { extends: Option<Vec<String>>, comment: Option<String>, highlight: Option<Vec<String>>, message: Option<Vec<String>>, tag: Option<Vec<String>>, } impl Into<Profile> for ProfileFile { fn into(self) -> Profile { Profile { comment: self.comment, extends: self.extends.unwrap_or_else(|| vec![]), highlight: self.highlight.unwrap_or_else(|| vec![]), message: self.message.unwrap_or_else(|| vec![]), tag: self.tag.unwrap_or_else(|| vec![]), } } } #[derive(Debug, Default, Deserialize, Serialize)] struct ConfigurationFile { profile: HashMap<String, ProfileFile>, } #[derive(Clone, Debug, Default, PartialEq)] pub struct Profile { comment: Option<String>, extends: Vec<String>, highlight: Vec<String>, message: Vec<String>, tag: Vec<String>, } impl Profile { pub fn comment(&self) -> &Option<String> { &self.comment } pub fn highlight(&self) -> &Vec<String> { &self.highlight } pub fn message(&self) -> &Vec<String> { &self.message } pub fn tag(&self) -> &Vec<String> { &self.tag } } impl AddAssign for Profile { fn add_assign(&mut self, other: Profile) { macro_rules! vec_extend { ($x:expr, $y:expr) => { $x.extend($y); $x.sort(); $x.dedup(); }; } vec_extend!(self.extends, other.extends); vec_extend!(self.highlight, other.highlight); vec_extend!(self.message, other.message); vec_extend!(self.tag, other.tag); } } #[derive(Debug, Default)] pub struct Profiles { file: PathBuf, profile: Profile, profiles: HashMap<String, Profile>, } impl Profiles { pub fn new(args: &ArgMatches) -> Result<Self, Error> { let file = Self::file(Some(args))?; if !file.exists() { Ok(Profiles { file, ..Default::default() }) } else { let mut config = String::new(); File::open(file.clone()) .map_err(|e| format_err!("Failed to open {}: {}", file.display(), e))? .read_to_string(&mut config)?; let mut config_file: ConfigurationFile = from_str(&config) .map_err(|e| format_err!("Failed to parse {}: {}", file.display(), e))?; let profiles: HashMap<String, Profile> = config_file .profile .drain() .map(|(k, v)| (k, v.into())) .collect(); let mut profile = Profile::default(); if let Some(n) = args.value_of("profile") { profile = profiles .get(n) .ok_or_else(|| format_err!("Unknown profile {}", n))? .clone(); Self::expand(n, &mut profile, &profiles)?; } Ok(Profiles { file, profile, profiles, }) } } fn expand(n: &str, p: &mut Profile, a: &HashMap<String, Profile>) -> Result<(), Error> { let mut loops = EXTEND_LIMIT; while !p.extends.is_empty() { let extends = p.extends.clone(); p.extends.clear(); for e in &extends { let f = a.get(e).ok_or_else(|| { format_err!("Unknown extend profile name {} used in {}", e, n) })?; *p += f.clone(); } loops -= 1; if loops == 0 { return Err(format_err!( "Reached recursion limit while resolving profile {} extends", n )); } } Ok(()) } pub fn profile(&self) -> Profile { self.profile.clone() } pub fn subcommand(self, args: &ArgMatches) -> Result<i32, Error> { if args.is_present("list") { if self.profiles.is_empty() { println!("No profiles present in \"{}\".", self.file.display()); } else { println!("Available profiles in \"{}\":", self.file.display()); for (k, v) in self.profiles { println!( " * {}{}", k, v.comment() .clone() .map(|c| format!(": {}", c)) .unwrap_or_else(|| "".into()) ); } } Ok(0) } else if args.is_present("examples") { let mut example = ConfigurationFile::default(); example.profile.insert( "W hitespace".into(), ProfileFile { comment: Some( "Profile names can contain whitespaces. Quote on command line...".into(), ), ..Default::default() }, ); example.profile.insert( "rogcat".into(), ProfileFile { comment: Some("Only tag \"rogcat\"".into()), tag: Some(vec!["^rogcat$".into()]), ..Default::default() }, ); example.profile.insert( "Comments are optional".into(), ProfileFile { tag: Some(vec!["rogcat".into()]), ..Default::default() }, ); example.profile.insert( "A".into(), ProfileFile { comment: Some("Messages starting with A".into()), message: Some(vec!["^A.*".into()]), ..Default::default() }, ); example.profile.insert( "B".into(), ProfileFile { comment: Some("Messages starting with B".into()), message: Some(vec!["^B.*".into()]), ..Default::default() }, ); example.profile.insert( "ABC".into(), ProfileFile { extends: Some(vec!["A".into(), "B".into()]), comment: Some("Profiles A, B plus the following filter (^C.*)".into()), message: Some(vec!["^C.*".into()]), ..Default::default() }, ); example.profile.insert( "complex".into(), ProfileFile { comment: Some( "Profiles can be complex. This one is probably very useless.".into(), ), tag: Some(vec!["b*".into(), "!adb".into()]), message: Some(vec!["^R.*".into(), "!^A.*".into(), "!^A.*".into()]), highlight: Some(vec!["blah".into()]), ..Default::default() }, ); to_string(&example) .map_err(|e| format_err!("Internal example serialization error: {}", e)) .map(|s| { println!("Example profiles:"); println!(); println!("{}", s); 0 }) } else { Err(err_msg("Missing option for profiles subcommand!")) } } pub fn file(args: Option<&ArgMatches>) -> Result<PathBuf, Error> { if let Some(args) = args { if args.is_present("profiles_path") { let f = PathBuf::from(value_t!(args, "profiles_path", String)?); if f.exists() { return Ok(f); } else { return Err(format_err!( "Cannot find {}. Use --profiles_path to specify the path manually!", f.display() )); } } } if let Ok(f) = var("ROGCAT_PROFILES").map(PathBuf::from) { if f.exists() { return Ok(f); } else { Err(format_err!( "Cannot find {} set in ROGCAT_PROFILES!", f.display() )) } } else { Ok(::config_dir().join("profiles.toml")) } } }
use clap::ArgMatches; use failure::{err_msg, Error}; use std::collections::HashMap; use std::convert::Into; use std::env::var; use std::fs::File; use std::io::prelude::*; use std::ops::AddAssign; use std::path::PathBuf; use toml::{from_str, to_string}; const EXTEND_LIMIT: u32 = 1000; #[derive(Clone, Debug, Default, Deserialize, Serialize)] pub struct ProfileFile { extends: Option<Vec<String>>, comment: Option<String>, highlight: Option<Vec<String>>, message: Option<Vec<String>>, tag: Option<Vec<String>>, } impl Into<Profile> for ProfileFile { fn into(self) -> Profile { Profile { comment: self.comment, extends: self.extends.unwrap_or_else(|| vec![]), highlight: self.highlight.unwrap_or_else(|| vec![]), message: self.message.unwrap_or_else(|| vec![]), tag: self.tag.unwrap_or_else(|| vec![]), } } } #[derive(Debug, Default, Deserialize, Serialize)] struct ConfigurationFile { profile: HashMap<String, ProfileFile>, } #[derive(Clone, Debug, Default, PartialEq)] pub struct Profile { comment: Option<String>, extends: Vec<String>, highlight: Vec<String>, message: Vec<String>, tag: Vec<String>, } impl Profile { pub fn comment(&self) -> &Option<String> { &self.comment } pub fn highlight(&self) -> &Vec<String> { &self.highlight } pub fn message(&self) -> &Vec<String> { &self.message } pub fn tag(&self) -> &Vec<String> { &self.tag } } impl AddAssign for Profile { fn add_assign(&mut self, other: Profile) { macro_rules! vec_extend { ($x:expr, $y:expr) => { $x.extend($y); $x.sort(); $x.dedup(); }; } vec_extend!(self.extends, other.extends); vec_extend!(self.highlight, other.highlight); vec_extend!(self.message, other.message); vec_extend!(self.tag, other.tag); } } #[derive(Debug, Default)] pub struct Profiles { file: PathBuf, profile: Profile, profiles: HashMap<String, Profile>, } impl Profiles { pub fn new(args: &ArgMatches) -> Result<Self, Error> { let file = Self::file(Some(args))?; if !file.exists() { Ok(Profiles { file, ..Default::default() }) } else { let mut config = String::new(); File::open(file.clone()) .map_err(|e| format_err!("Failed to open {}: {}", file.display(), e))? .read_to_string(&mut config)?; let mut config_file: ConfigurationFile = from_str(&config) .map_err(|e| format_err!("Failed to parse {}: {}", file.display(), e))?; let profiles: HashMap<String, Profile> = config_file .profile .drain() .map(|(k, v)| (k, v.into())) .collect(); let mut profile = Profile::default();
Ok(Profiles { file, profile, profiles, }) } } fn expand(n: &str, p: &mut Profile, a: &HashMap<String, Profile>) -> Result<(), Error> { let mut loops = EXTEND_LIMIT; while !p.extends.is_empty() { let extends = p.extends.clone(); p.extends.clear(); for e in &extends { let f = a.get(e).ok_or_else(|| { format_err!("Unknown extend profile name {} used in {}", e, n) })?; *p += f.clone(); } loops -= 1; if loops == 0 { return Err(format_err!( "Reached recursion limit while resolving profile {} extends", n )); } } Ok(()) } pub fn profile(&self) -> Profile { self.profile.clone() } pub fn subcommand(self, args: &ArgMatches) -> Result<i32, Error> { if args.is_present("list") { if self.profiles.is_empty() { println!("No profiles present in \"{}\".", self.file.display()); } else { println!("Available profiles in \"{}\":", self.file.display()); for (k, v) in self.profiles { println!( " * {}{}", k, v.comment() .clone() .map(|c| format!(": {}", c)) .unwrap_or_else(|| "".into()) ); } } Ok(0) } else if args.is_present("examples") { let mut example = ConfigurationFile::default(); example.profile.insert( "W hitespace".into(), ProfileFile { comment: Some( "Profile names can contain whitespaces. Quote on command line...".into(), ), ..Default::default() }, ); example.profile.insert( "rogcat".into(), ProfileFile { comment: Some("Only tag \"rogcat\"".into()), tag: Some(vec!["^rogcat$".into()]), ..Default::default() }, ); example.profile.insert( "Comments are optional".into(), ProfileFile { tag: Some(vec!["rogcat".into()]), ..Default::default() }, ); example.profile.insert( "A".into(), ProfileFile { comment: Some("Messages starting with A".into()), message: Some(vec!["^A.*".into()]), ..Default::default() }, ); example.profile.insert( "B".into(), ProfileFile { comment: Some("Messages starting with B".into()), message: Some(vec!["^B.*".into()]), ..Default::default() }, ); example.profile.insert( "ABC".into(), ProfileFile { extends: Some(vec!["A".into(), "B".into()]), comment: Some("Profiles A, B plus the following filter (^C.*)".into()), message: Some(vec!["^C.*".into()]), ..Default::default() }, ); example.profile.insert( "complex".into(), ProfileFile { comment: Some( "Profiles can be complex. This one is probably very useless.".into(), ), tag: Some(vec!["b*".into(), "!adb".into()]), message: Some(vec!["^R.*".into(), "!^A.*".into(), "!^A.*".into()]), highlight: Some(vec!["blah".into()]), ..Default::default() }, ); to_string(&example) .map_err(|e| format_err!("Internal example serialization error: {}", e)) .map(|s| { println!("Example profiles:"); println!(); println!("{}", s); 0 }) } else { Err(err_msg("Missing option for profiles subcommand!")) } } pub fn file(args: Option<&ArgMatches>) -> Result<PathBuf, Error> { if let Some(args) = args { if args.is_present("profiles_path") { let f = PathBuf::from(value_t!(args, "profiles_path", String)?); if f.exists() { return Ok(f); } else { return Err(format_err!( "Cannot find {}. Use --profiles_path to specify the path manually!", f.display() )); } } } if let Ok(f) = var("ROGCAT_PROFILES").map(PathBuf::from) { if f.exists() { return Ok(f); } else { Err(format_err!( "Cannot find {} set in ROGCAT_PROFILES!", f.display() )) } } else { Ok(::config_dir().join("profiles.toml")) } } }
if let Some(n) = args.value_of("profile") { profile = profiles .get(n) .ok_or_else(|| format_err!("Unknown profile {}", n))? .clone(); Self::expand(n, &mut profile, &profiles)?; }
if_condition
[ { "content": "pub fn file_content(file: &PathBuf) -> Result<SVec, Error> {\n\n let content = BufReader::new(File::open(file)?)\n\n .lines()\n\n .map(|e| e.unwrap())\n\n .collect();\n\n Ok(content)\n\n}\n\n\n", "file_path": "src/tests/utils.rs", "rank": 0, "score": 187593.4...
Rust
src/yuv444i/mod.rs
dunkelstern/grapho-bitplane
4db3789284fa89c85e6b1b1c972aa4d42eda9bbf
use crate::*; pub use grapho_color::DigitalYCbCrColor; pub use crate::yuv422i::YUVComponent; #[derive(Debug, PartialEq)] pub struct YUV444iPixelBuffer<'a> { width: usize, height: usize, stride: usize, fourcc: &'a str, component_order: Vec<YUVComponent>, data: Vec<u8> } impl<'a> YUV444iPixelBuffer<'a> { fn decode_component_order(fourcc:&'a str) -> Vec<YUVComponent> { match fourcc { "VUY" => vec![YUVComponent::V, YUVComponent::U, YUVComponent::Y], "YVU" => vec![YUVComponent::Y, YUVComponent::V, YUVComponent::U], "UVY" => vec![YUVComponent::U, YUVComponent::V, YUVComponent::Y], "YUV" | "YUV444" | "YUV 4:4:4" | _ => vec![YUVComponent::Y, YUVComponent::U, YUVComponent::V], } } } impl<'a> PixelBuffer<'a> for YUV444iPixelBuffer<'a> { type ColorType = DigitalYCbCrColor; fn new(width: usize, height: usize, stride: Option<usize>, fourcc: Option<&'a str>) -> Self { let f = fourcc.unwrap_or("YUV444"); let component_order = YUV444iPixelBuffer::decode_component_order(f); let line_width = stride.unwrap_or(width * 3); YUV444iPixelBuffer { width, height, data: vec![0; line_width * height], stride: line_width, fourcc: f, component_order } } fn new_with_data(width: usize, height: usize, data: Vec<u8>, stride: Option<usize>, fourcc: Option<&'a str>) -> Result<Self, PixelBufferError> { let f = fourcc.unwrap_or("YUV444"); let component_order = YUV444iPixelBuffer::decode_component_order(f); if data.len() < stride.unwrap_or(width * 3) * height { return Err(PixelBufferError::BufferTooSmall); } Ok( YUV444iPixelBuffer { width, height, data, stride: stride.unwrap_or(width * 3), fourcc: f, component_order } ) } fn new_with_background(width: usize, height: usize, color: Self::ColorType, stride: Option<usize>, fourcc: Option<&'a str>) -> Self { let f = fourcc.unwrap_or("YUV444"); let component_order = YUV444iPixelBuffer::decode_component_order(f); let rep: [u8; 3] = color.into(); let line_width = stride.unwrap_or(width * component_order.len()); let data:Vec<u8>; let representation = vec![ rep[component_order[0] as usize], rep[component_order[1] as usize], rep[component_order[2] as usize], ]; if line_width > width * 3 { let mut line = representation.repeat(width); line.extend([0].repeat(line_width - width * 3)); data = line.repeat(height); } else { data = representation.repeat(width * height); } YUV444iPixelBuffer { width, height, data, stride: line_width, fourcc: f, component_order } } fn get_width(&self) -> usize { self.width } fn get_height(&self) -> usize { self.height } fn get_stride(&self) -> usize { self.stride } fn get_fourcc(&self) -> &'a str { self.fourcc } fn set_pixel(&mut self, x: usize, y: usize, color: Self::ColorType) -> Result<(), PixelBufferError> { if (x >= self.width) || (y >= self.width) { return Err(PixelBufferError::RequestOutOfBounds); } let start = x * self.component_order.len() + y * self.stride; let repr: [u8; 3] = color.into(); for i in 0..self.component_order.len() { self.data[start + i] = repr[self.component_order[i] as usize]; } Ok(()) } fn get_pixel(&self, x: usize, y: usize) -> Result<Self::ColorType, PixelBufferError> { if (x >= self.width) || (y >= self.width) { return Err(PixelBufferError::RequestOutOfBounds); } let start = x * 3 + y * self.stride; let mut color: [u8; 3] = [0, 0, 0]; for i in 0..self.component_order.len() { color[self.component_order[i] as usize] = self.data[start + i]; } Ok(DigitalYCbCrColor::from(color)) } } pub mod iter; mod tests;
use crate::*; pub use grapho_color::DigitalYCbCrColor; pub use crate::yuv422i::YUVComponent; #[derive(Debug, PartialEq)] pub struct YUV444iPixelBuffer<'a> { width: usize, height: usize, stride: usize, fourcc: &'a str, component_order: Vec<YUVComponent>, data: Vec<u8> } impl<'a> YUV444iPixelBuffer<'a> { fn decode_component_order(fourcc:&'a str) -> Vec<YUVComponent> { match fourcc { "VUY" => vec![YUVComponent::V, YUVComponent::U, YUVComponent::Y], "YVU" => vec![YUVComponent::Y, YUVComponent::V, YUVComponent::U], "UVY" => vec![YUVComponent::U, YUVComponent::V, YUVComponent::Y], "YUV" | "YUV444" | "YUV 4:4:4" | _ => vec![YUVComponent::Y, YUVComponent::U, YUVComponent::V], } } } impl<'a> PixelBuffer<'a> for YUV444iPixelBuffer<'a> { type ColorType = DigitalYCbCrColor;
fn new_with_data(width: usize, height: usize, data: Vec<u8>, stride: Option<usize>, fourcc: Option<&'a str>) -> Result<Self, PixelBufferError> { let f = fourcc.unwrap_or("YUV444"); let component_order = YUV444iPixelBuffer::decode_component_order(f); if data.len() < stride.unwrap_or(width * 3) * height { return Err(PixelBufferError::BufferTooSmall); } Ok( YUV444iPixelBuffer { width, height, data, stride: stride.unwrap_or(width * 3), fourcc: f, component_order } ) } fn new_with_background(width: usize, height: usize, color: Self::ColorType, stride: Option<usize>, fourcc: Option<&'a str>) -> Self { let f = fourcc.unwrap_or("YUV444"); let component_order = YUV444iPixelBuffer::decode_component_order(f); let rep: [u8; 3] = color.into(); let line_width = stride.unwrap_or(width * component_order.len()); let data:Vec<u8>; let representation = vec![ rep[component_order[0] as usize], rep[component_order[1] as usize], rep[component_order[2] as usize], ]; if line_width > width * 3 { let mut line = representation.repeat(width); line.extend([0].repeat(line_width - width * 3)); data = line.repeat(height); } else { data = representation.repeat(width * height); } YUV444iPixelBuffer { width, height, data, stride: line_width, fourcc: f, component_order } } fn get_width(&self) -> usize { self.width } fn get_height(&self) -> usize { self.height } fn get_stride(&self) -> usize { self.stride } fn get_fourcc(&self) -> &'a str { self.fourcc } fn set_pixel(&mut self, x: usize, y: usize, color: Self::ColorType) -> Result<(), PixelBufferError> { if (x >= self.width) || (y >= self.width) { return Err(PixelBufferError::RequestOutOfBounds); } let start = x * self.component_order.len() + y * self.stride; let repr: [u8; 3] = color.into(); for i in 0..self.component_order.len() { self.data[start + i] = repr[self.component_order[i] as usize]; } Ok(()) } fn get_pixel(&self, x: usize, y: usize) -> Result<Self::ColorType, PixelBufferError> { if (x >= self.width) || (y >= self.width) { return Err(PixelBufferError::RequestOutOfBounds); } let start = x * 3 + y * self.stride; let mut color: [u8; 3] = [0, 0, 0]; for i in 0..self.component_order.len() { color[self.component_order[i] as usize] = self.data[start + i]; } Ok(DigitalYCbCrColor::from(color)) } } pub mod iter; mod tests;
fn new(width: usize, height: usize, stride: Option<usize>, fourcc: Option<&'a str>) -> Self { let f = fourcc.unwrap_or("YUV444"); let component_order = YUV444iPixelBuffer::decode_component_order(f); let line_width = stride.unwrap_or(width * 3); YUV444iPixelBuffer { width, height, data: vec![0; line_width * height], stride: line_width, fourcc: f, component_order } }
function_block-full_function
[ { "content": "/// Pixel buffer trait, all Pixel buffers will implement this\n\npub trait PixelBuffer<'a>: Sized + IntoIterator\n\n // + Sub + Mul + Add + Div + SubAssign + MulAssign + AddAssign + DivAssign\n\n{\n\n /// The color type this pixel buffer contains\n\n type ColorType;\n\n\n\n /// Create ...
Rust
src/main.rs
kurtbuilds/checkexec
22af898b48c2c10432762406c4ec4b714995f5f2
use std::borrow::Cow; use std::fmt::{Display}; use std::path::{Path}; use std::process::{exit, Command}; use clap::{App, AppSettings, Arg}; use std::fs; use shell_escape::escape; const VERSION: &str = env!("CARGO_PKG_VERSION"); struct Error { message: String, } impl std::fmt::Debug for Error { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{}", self.message) } } impl std::fmt::Display for Error { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{}", self.message) } } impl std::error::Error for Error {} macro_rules! err { ($($arg:tt)*) => { Error { message: format!($($arg)*), } } } fn infer_dependencies<'a>(command: &[&'a str]) -> Result<Vec<&'a str>, Error> { let inferred_deps = command.iter() .filter_map(|s| fs::metadata(s).ok().map(|_| *s)) .collect::<Vec<&str>>(); if inferred_deps.is_empty() { Err(err!("--infer must find at least one accessible file in command arguments. Command arguments are: {}", command.iter().map(|s| format!("\"{}\"", s)).collect::<Vec<String>>().join(" ") )) } else { Ok(inferred_deps) } } fn should_execute<T: AsRef<Path> + Display>(target: &str, dependencies: &[T]) -> Result<bool, Error> { match fs::metadata(target) { Ok(meta) => { let modified = meta.modified().unwrap(); for dependency in dependencies { let dep_meta = fs::metadata(&dependency) .map_err(|_| err!("{}: Could not read file metadata", &dependency))?; if dep_meta.modified().unwrap() > modified { return Ok(true); } } Ok(false) } Err(_) => Ok(true) } } fn main() -> std::result::Result<(), Error> { let args = App::new("checkexec") .version(VERSION) .about("Conditionally run a command (like `make`)") .setting(AppSettings::ArgRequiredElseHelp) .setting(AppSettings::TrailingVarArg) .arg(Arg::new("target") .help("The file created by this checkexec execution.") .required(true) ) .arg(Arg::new("verbose") .long("verbose") .short('v') .takes_value(false) ) .arg(Arg::new("infer") .long("infer") .takes_value(false) .conflicts_with("dependencies") .help("Infer the dependency list. The inference takes all arguments to the command, filters it for files, and uses that list. \ --infer causes checkexec to fail if it creates an empty dependency list.") ) .arg(Arg::new("dependencies").min_values(0) .help("The list of files") ) .arg(Arg::new("command").min_values(1) .last(true) .required(true) .help("The command to execute if the check passes.") ) .get_matches(); let verbose = args.is_present("verbose"); let target = args.value_of("target").unwrap(); let command_args = args.values_of("command").unwrap().into_iter().skip(1).collect::<Vec<&str>>(); let dependencies = if args.is_present("infer") { infer_dependencies(&command_args)? } else { args.values_of("dependencies").map(|d| d.collect::<Vec<&str>>()).unwrap_or_default() } .iter() .flat_map(|s| s.split('\n')) .collect::<Vec<&str>>(); if verbose { eprintln!("Found {} dependencies:\n{}", dependencies.len(), dependencies.iter().map(|d| escape(Cow::Borrowed(d))).collect::<Vec<_>>().join("\n")); } if should_execute(target, &dependencies)? { let command = args.values_of("command").unwrap().collect::<Vec<&str>>(); if verbose { eprintln!("{} {}", command[0], command.iter().skip(1).map(|s| format!("\"{}\"", s)).collect::<Vec<String>>().join(" ")); } let output = Command::new(command[0]) .args(command[1..].iter()) .status() .map_err(|_| err!("{}: command not found", command[0]))?; exit(output.code().unwrap()); } Ok(()) } #[cfg(test)] mod test { use std::io::Write; use super::*; use tempfile::{TempDir, tempdir}; struct TempFiles { #[allow(dead_code)] dir: TempDir, pub files: Vec<String>, } fn touch(path: &str) -> std::io::Result<()> { let mut file = fs::File::create(path).unwrap(); file.write_all(b"") } fn touch_and_untouch(touched: usize, untouched: usize) -> TempFiles { let tempdir = tempdir().unwrap(); let dir = tempdir.path(); let mut files: Vec<String> = Vec::new(); files.extend((0..touched).map(|i| dir.join(i.to_string()).to_str().unwrap().to_string())); files.extend((touched..(touched + untouched)).map(|i| dir.join(i.to_string()).to_str().unwrap().to_string())); for file in files.iter().take(touched) { touch(file).unwrap(); std::thread::sleep(std::time::Duration::from_millis(10)); } TempFiles { dir: tempdir, files, } } #[test] fn test_infer_dependencies() { let TempFiles { dir: _dir, files } = touch_and_untouch(3, 0); let dependencies = infer_dependencies(&["cc", &files[0], &files[1]]).unwrap(); assert_eq!(dependencies, vec![&files[0], &files[1]]); } #[test] fn test_no_inferred_dependencies_errors() { let TempFiles { dir: _dir, files } = touch_and_untouch(0, 1); assert!(infer_dependencies(&["cc", &files[0]]).is_err()) } #[test] fn test_should_execute_errors_on_failed_dependency_access() { let TempFiles { dir: _dir, files } = touch_and_untouch(1, 1); assert!(should_execute(&files[0], &files[1..]).is_err(), "Should have failed to access file"); } #[test] fn test_should_execute_target_doesnt_exist() { let TempFiles { dir: _dir, files } = touch_and_untouch(1, 1); assert!(should_execute(&files[1], &files[0..1]).unwrap(), "Should execute because target doesn't exist"); } #[test] fn test_should_not_execute_newer_target() { let TempFiles { dir: _dir, files } = touch_and_untouch(2, 0); assert!(!should_execute(&files[1], &files[0..1]).unwrap(), "Should not execute because target is newer"); } #[test] fn test_should_execute_newer_dependencies() { let TempFiles { dir: _dir, files } = touch_and_untouch(2, 0); assert!(should_execute(&files[0], &files[1..]).unwrap()) } }
use std::borrow::Cow; use std::fmt::{Display}; use std::path::{Path}; use std::process::{exit, Command}; use clap::{App, AppSettings, Arg}; use std::fs; use shell_escape::escape; const VERSION: &str = env!("CARGO_PKG_VERSION"); struct Error { message: String, } impl std::fmt::Debug for Error { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{}", self.message) } } impl std::fmt::Display for Error { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{}", self.message) } } impl std::error::Error for Error {} macro_rules! err { ($($arg:tt)*) => { Error { message: format!($($arg)*), } } } fn infer_dependencies<'a>(command: &[&'a str]) -> Result<Vec<&'a str>, Error> { let inferred_deps = command.iter() .filter_map(|s| fs::metadata(s).ok().map(|_| *s)) .collect::<Vec<&str>>(); if inferred_deps.is_empty() { Err(err!("--infer must find at least one accessible file in command arguments. Command arguments are: {}", command.iter().map(|s| format!("\"{}\"", s)).collect::<Vec<String>>().join(" ") )) } else { Ok(inferred_deps) } } fn should_execute<T: AsRef<Path> + Display>(target: &str, dependencies: &[T]) -> Result<bool, Error> { match fs::metadata(target) { Ok(meta) => { let modified = meta.modified().unwrap(); for dependency in dependencies { let dep_meta = fs::metadata(&dependency) .map_err(|_| err!("{}: Could not read file metadata", &dependency))?; if dep_meta.modified().unwrap() > modified { return Ok(true); } } Ok(false) } Err(_) => Ok(true) } } fn main() -> std::result::Result<(), Error> { let args = App::new("checkexec") .version(VERSION) .about("Conditionally run a command (like `make`)") .setting(AppSettings::ArgRequiredElseHelp) .setting(AppSettings::TrailingVarArg) .arg(Arg::new("target") .help("The file created by this checkexec execution.") .required(true) ) .arg(Arg::new("verbose") .long("verbose") .short('v') .takes_value(false) ) .arg(Arg::new("infer") .long("infer") .takes_value(false) .conflicts_with("dependencies") .help("Infer the dependency list. The inference takes all arguments to the command, filters it for files, and uses that list. \ --infer causes checkexec to fail if it creates an empty dependency list.") ) .arg(Arg::new("dependencies").min_values(0) .help("The list of files") ) .arg(Arg::new("command").min_values(1) .last(true) .required(true) .help("The command to execute if the check passes.") ) .
#[cfg(test)] mod test { use std::io::Write; use super::*; use tempfile::{TempDir, tempdir}; struct TempFiles { #[allow(dead_code)] dir: TempDir, pub files: Vec<String>, } fn touch(path: &str) -> std::io::Result<()> { let mut file = fs::File::create(path).unwrap(); file.write_all(b"") } fn touch_and_untouch(touched: usize, untouched: usize) -> TempFiles { let tempdir = tempdir().unwrap(); let dir = tempdir.path(); let mut files: Vec<String> = Vec::new(); files.extend((0..touched).map(|i| dir.join(i.to_string()).to_str().unwrap().to_string())); files.extend((touched..(touched + untouched)).map(|i| dir.join(i.to_string()).to_str().unwrap().to_string())); for file in files.iter().take(touched) { touch(file).unwrap(); std::thread::sleep(std::time::Duration::from_millis(10)); } TempFiles { dir: tempdir, files, } } #[test] fn test_infer_dependencies() { let TempFiles { dir: _dir, files } = touch_and_untouch(3, 0); let dependencies = infer_dependencies(&["cc", &files[0], &files[1]]).unwrap(); assert_eq!(dependencies, vec![&files[0], &files[1]]); } #[test] fn test_no_inferred_dependencies_errors() { let TempFiles { dir: _dir, files } = touch_and_untouch(0, 1); assert!(infer_dependencies(&["cc", &files[0]]).is_err()) } #[test] fn test_should_execute_errors_on_failed_dependency_access() { let TempFiles { dir: _dir, files } = touch_and_untouch(1, 1); assert!(should_execute(&files[0], &files[1..]).is_err(), "Should have failed to access file"); } #[test] fn test_should_execute_target_doesnt_exist() { let TempFiles { dir: _dir, files } = touch_and_untouch(1, 1); assert!(should_execute(&files[1], &files[0..1]).unwrap(), "Should execute because target doesn't exist"); } #[test] fn test_should_not_execute_newer_target() { let TempFiles { dir: _dir, files } = touch_and_untouch(2, 0); assert!(!should_execute(&files[1], &files[0..1]).unwrap(), "Should not execute because target is newer"); } #[test] fn test_should_execute_newer_dependencies() { let TempFiles { dir: _dir, files } = touch_and_untouch(2, 0); assert!(should_execute(&files[0], &files[1..]).unwrap()) } }
get_matches(); let verbose = args.is_present("verbose"); let target = args.value_of("target").unwrap(); let command_args = args.values_of("command").unwrap().into_iter().skip(1).collect::<Vec<&str>>(); let dependencies = if args.is_present("infer") { infer_dependencies(&command_args)? } else { args.values_of("dependencies").map(|d| d.collect::<Vec<&str>>()).unwrap_or_default() } .iter() .flat_map(|s| s.split('\n')) .collect::<Vec<&str>>(); if verbose { eprintln!("Found {} dependencies:\n{}", dependencies.len(), dependencies.iter().map(|d| escape(Cow::Borrowed(d))).collect::<Vec<_>>().join("\n")); } if should_execute(target, &dependencies)? { let command = args.values_of("command").unwrap().collect::<Vec<&str>>(); if verbose { eprintln!("{} {}", command[0], command.iter().skip(1).map(|s| format!("\"{}\"", s)).collect::<Vec<String>>().join(" ")); } let output = Command::new(command[0]) .args(command[1..].iter()) .status() .map_err(|_| err!("{}: command not found", command[0]))?; exit(output.code().unwrap()); } Ok(()) }
function_block-function_prefix_line
[ { "content": "<div id=\"top\"></div>\n\n\n\n<p align=\"center\">\n\n<a href=\"https://github.com/kurtbuilds/checkexec/graphs/contributors\">\n\n <img src=\"https://img.shields.io/github/contributors/kurtbuilds/checkexec.svg?style=flat-square\" alt=\"GitHub Contributors\" />\n\n</a>\n\n<a href=\"https://githu...
Rust
shapes/src/plymesh.rs
hackmad/pbr_rust
b7ae75564bf71c4dfea8b20f49d05ac1b89e6734
#![allow(dead_code)] use super::TriangleMesh; use core::geometry::*; use core::paramset::*; use core::texture::FloatTextureMap; use ply_rs::parser::Parser; use ply_rs::ply::*; use std::fs::File; use std::io::BufReader; use std::sync::Arc; use textures::ConstantTexture; pub struct PLYMesh; impl PLYMesh { pub fn from_props( p: ( &ParamSet, ArcTransform, ArcTransform, bool, &FloatTextureMap, ), ) -> Vec<ArcShape> { let (params, o2w, w2o, reverse_orientation, float_textures) = p; let path = params.find_one_filename("filename", String::from("")); assert!(path.len() > 0, "PLY filename not provied"); let alpha_tex_name = params.find_one_texture("alpha", String::from("")); let alpha_tex = if alpha_tex_name.len() > 0 { if let Some(tex) = float_textures.get(&alpha_tex_name) { Arc::clone(&tex) } else { warn!( "Couldn't find float texture '{}' for 'alpha' parameter", alpha_tex_name ); let alpha = params.find_one_float("alpha", 1.0); Arc::new(ConstantTexture::new(alpha)) } } else { let alpha = params.find_one_float("alpha", 1.0); Arc::new(ConstantTexture::new(alpha)) }; let shadow_alpha_tex_name = params.find_one_texture("shadowalpha", String::from("")); let shadow_alpha_tex = if shadow_alpha_tex_name.len() > 0 { if let Some(tex) = float_textures.get(&shadow_alpha_tex_name) { Arc::clone(tex) } else { warn!( "Couldn't find float texture '{}' for 'shadowalpha' parameter. Using float 'shadowalpha' parameterer instead.", alpha_tex_name ); let alpha = params.find_one_float("shadowalpha", 1.0); Arc::new(ConstantTexture::new(alpha)) } } else { let alpha = params.find_one_float("shadowalpha", 1.0); Arc::new(ConstantTexture::new(alpha)) }; let file = File::open(&path).expect(format!("Unable to open PLY file '{}'", path).as_ref()); let mut reader = BufReader::new(file); let parser = Parser::<DefaultElement>::new(); let ply = match parser.read_ply(&mut reader) { Ok(p) => p, Err(e) => panic!("Unable to parse PLY file '{}'. {}.", path, e), }; let mut points: Vec<Point3f> = vec![]; let mut normals: Vec<Normal3f> = vec![]; let mut uvs: Vec<Point2f> = vec![]; let mut has_normals = true; let mut has_uvs = true; let mut vertex_indices: Vec<usize> = vec![]; let mut face_count = 0; for (name, list) in ply.payload.iter() { match name.as_ref() { "vertex" => { for elem in list.iter() { let vertex = Self::parse_vertex(elem); points.push(vertex.point); has_normals = has_normals && vertex.has_normal; if has_normals { normals.push(vertex.normal); } has_uvs = has_uvs && vertex.has_uv; if has_uvs { uvs.push(vertex.uv); } } } "face" => { for elem in list.iter() { Self::parse_face(elem, &mut vertex_indices); face_count += 1; } } s => warn!("Ignoring unexpected element '{}' in '{}'", s, path), } } if points.len() == 0 || face_count == 0 { error!( "PLY file '{}' is invalid! No face/vertex elements found!", path ); return vec![]; } if !has_normals { normals = vec![]; } if !has_uvs { uvs = vec![]; } TriangleMesh::create( Arc::clone(&o2w), Arc::clone(&w2o), reverse_orientation, vertex_indices.len() / 3, vertex_indices, points, normals, vec![], uvs, Some(alpha_tex), Some(shadow_alpha_tex), vec![], ) } fn parse_vertex(elem: &KeyMap<Property>) -> Vertex { let mut p = Point3f::default(); let mut n = Normal3f::default(); let mut uv = Point2f::default(); let mut nc = 0; let mut uvc = 0; for (name, value) in elem.iter() { if let Property::Float(v) = value { match name.as_ref() { "x" => { p.x = *v; } "y" => { p.y = *v; } "z" => { p.z = *v; } "nx" => { n.x = *v; nc += 1; } "ny" => { n.y = *v; nc += 1; } "nz" => { n.z = *v; nc += 1; } "u" | "s" | "texture_u" | "texture_s" => { uv.x = *v; uvc += 1; } "v" | "t" | "texture_v" | "texture_t" => { uv.y = *v; uvc += 1; } s => debug!("Ignoring unexpected vertex element '{}'", s), } } else { debug!("Ignoring unexpected vertex property type"); } } Vertex::new(p, n, uv, nc == 3, uvc == 2) } fn parse_face(elem: &KeyMap<Property>, vertex_indices: &mut Vec<usize>) { for (name, value) in elem.iter() { match name.as_ref() { "vertex_indices" => { if let Property::ListInt(vi) = value { if vi.len() != 3 && vi.len() != 4 { panic!("Only triangles and quads are supported!"); } if vi.len() >= 3 { vertex_indices.push(vi[0] as usize); vertex_indices.push(vi[1] as usize); vertex_indices.push(vi[2] as usize); } if vi.len() == 4 { vertex_indices.push(vi[3] as usize); vertex_indices.push(vi[0] as usize); vertex_indices.push(vi[2] as usize); } } else if let Property::ListUInt(vi) = value { if vi.len() != 3 && vi.len() != 4 { panic!("Only triangles and quads are supported!"); } if vi.len() >= 3 { vertex_indices.push(vi[0] as usize); vertex_indices.push(vi[1] as usize); vertex_indices.push(vi[2] as usize); } if vi.len() == 4 { vertex_indices.push(vi[3] as usize); vertex_indices.push(vi[0] as usize); vertex_indices.push(vi[2] as usize); } } else { debug!("Ignoring unexpected face property type"); } } s => debug!("Ignoring unexpected face element '{}'", s), } } } } struct Vertex { point: Point3f, normal: Normal3f, uv: Point2f, has_normal: bool, has_uv: bool, } impl Vertex { fn new(point: Point3f, normal: Normal3f, uv: Point2f, has_normal: bool, has_uv: bool) -> Self { Self { point, normal, uv, has_normal, has_uv, } } }
#![allow(dead_code)] use super::TriangleMesh; use core::geometry::*; use core::paramset::*; use core::texture::FloatTextureMap; use ply_rs::parser::Parser; use ply_rs::ply::*; use std::fs::File; use std::io::BufReader; use std::sync::Arc; use textures::ConstantTexture; pub struct PLYMesh; impl PLYMesh { pub fn from_props( p: ( &ParamSet, ArcTransform, ArcTransform, bool, &FloatTextureMap, ), ) -> Vec<ArcShape> { let (params, o2w, w2o, reverse_orientation, float_textures) = p; let path = params.find_one_filename("filename", String::from("")); assert!(path.len() > 0, "PLY filename not provied"); let alpha_tex_name = params.find_one_texture("alpha", String::from("")); let alpha_tex = if alpha_tex_name.len() > 0 { if let Some(tex) = float_textures.get(&alpha_tex_name) { Arc::clone(&tex) } else { warn!( "Couldn't find float texture '{}' for 'alpha' parameter", alpha_tex_name ); let alpha = params.find_one_float("alpha", 1.0); Arc::new(ConstantTexture::new(alpha)) } } else { let alpha = params.find_one_float("alpha", 1.0); Arc::new(ConstantTexture::new(alpha)) }; let shadow_alpha_tex_name = params.find_one_texture("shadowalpha", String::from("")); let shadow_alpha_tex = if shadow_alpha_tex_name.len() > 0 { if let Some(tex) = float_textures.get(&shadow_alpha_tex_name) { Arc::clone(tex) } else { warn!( "Couldn't find float texture '{}' for 'shadowalpha' parameter. Using float 'shadowalpha' parameterer instead.", alpha_tex_name ); let alpha = params.find_one_float("shadowalpha", 1.0); Arc::new(ConstantTexture::new(alpha)) } } else { let alpha = params.find_one_float("shadowalpha", 1.0); Arc::new(ConstantTexture::new(alpha)) }; let file = File::open(&path).expect(format!("Unable to open PLY file '{}'", path).as_ref()); let mut reader = BufReader::new(file); let parser = Parser::<DefaultElement>::new(); let ply = match parser.read_ply(&mut reader) { Ok(p) => p, Err(e) => panic!("Unable to parse PLY file '{}'. {}.", path, e), }; let mut points: Vec<Point3f> = vec![]; let mut normals: Vec<Normal3f> = vec![]; let mut uvs: Vec<Point2f> = vec![]; let mut has_normals = true; let mut has_uvs = true; let mut vertex_indices: Vec<usize> = vec![]; let mut face_count = 0; for (name, list) in ply.payload.iter() { match name.as_ref() { "vertex" => { for elem in list.iter() { let vertex = Self::parse_vertex(elem); points.push(vertex.point); has_normals = has_normals && vertex.has_normal; if has_normals { normals.push(vertex.normal); } has_uvs = has_uvs && vertex.has_uv; if has_uvs { uvs.push(vertex.uv); } } } "face" => { for elem in list.iter() { Self::parse_face(elem, &mut vertex_indices); face_count += 1; } } s => warn!("Ignoring unexpected element '{}' in '{}'", s, path), } } if points.len() == 0 || face_count == 0 { error!( "PLY file '{}' is invalid! No face/vertex elements found!", path ); return vec![]; } if !has_normals { normals = vec![]; } if !has_uvs { uvs = vec![]; } TriangleMesh::create( Arc::clone(&o2w), Arc::clone(&w2o), reverse_orientation, vertex_indices.len() / 3, vertex_indices, points, normals, vec![], uvs, Some(alpha_tex), Some(shadow_alpha_tex), vec![], ) } fn parse_vertex(elem: &KeyMap<Property>) -> Vertex { let mut p = Point3f::default(); let mut n = Normal3f::default(); let mut uv = Point2f::default(); let mut nc = 0; let mut uvc = 0; for (name, value) in elem.iter() { if let Property::Float(v) = value { match name.as_ref() { "x" => { p.x = *v; } "y" => { p.y = *v; } "z" => { p.z = *v; } "nx" => { n.x = *v; nc += 1; } "ny" => { n.y = *v; nc += 1; } "nz" => { n.z = *v; nc += 1; } "u" | "s" | "texture_u" | "texture_s" => { uv.x = *v; uvc += 1; } "v" | "t" | "texture_v" | "texture_t" => { uv.y = *v; uvc += 1; } s => debug!("Ignoring unexpected vertex element '{}'", s), } } else { debug!("Ignoring unexpected vertex property type"); } } Vertex::new(p, n, uv, nc == 3, uvc == 2) } fn parse_face(elem: &KeyMap<Property>, vertex_indices: &mut Vec<usize>) { for (name, value) in elem.iter() { match name.as_ref() { "vertex_indices" => { if let Property::ListInt(vi) = value { if vi.len() != 3 && vi.len() != 4 { panic!("Only triangles and quads are supported!"); } if vi.len() >= 3 { vertex_indices.push(vi[0] as usize); vertex_indices.push(vi[1] as usize); vertex_indices.push(vi[2] as usize); } if vi.len() == 4 { vertex_indices.push(vi[3] as usize); vertex_indices.push(vi[0] as usize); vertex_indices.push(vi[2] as usize); } }
} struct Vertex { point: Point3f, normal: Normal3f, uv: Point2f, has_normal: bool, has_uv: bool, } impl Vertex { fn new(point: Point3f, normal: Normal3f, uv: Point2f, has_normal: bool, has_uv: bool) -> Self { Self { point, normal, uv, has_normal, has_uv, } } }
else if let Property::ListUInt(vi) = value { if vi.len() != 3 && vi.len() != 4 { panic!("Only triangles and quads are supported!"); } if vi.len() >= 3 { vertex_indices.push(vi[0] as usize); vertex_indices.push(vi[1] as usize); vertex_indices.push(vi[2] as usize); } if vi.len() == 4 { vertex_indices.push(vi[3] as usize); vertex_indices.push(vi[0] as usize); vertex_indices.push(vi[2] as usize); } } else { debug!("Ignoring unexpected face property type"); } } s => debug!("Ignoring unexpected face element '{}'", s), } } }
function_block-function_prefix_line
[]
Rust
examples/custom_router/src/router/mod.rs
arn-the-long-beard/old_seed_archive
9aed8e64ab6ee5a2a6e9fd650eefb752fcb9144c
mod model; mod path; mod url; mod view; use seed::Url; use std::fmt::Debug; pub use {model::*, path::*, path::*, url::*, url::*, view::*}; use seed::{*, *}; struct_urls!(); impl<'a> Urls<'a> { pub fn build_url(self, segments: Vec<&str>) -> Url { self.base_url().set_path(segments) } } pub enum Move { IsNavigating, IsMovingBack, IsMovingForward, IsReady, } pub struct Router<Routes: Debug + PartialEq + ParsePath + Clone + Default + Navigation> { pub current_route: Option<Routes>, pub current_history_index: usize, pub default_route: Routes, base_url: Url, pub current_move: Move, history: Vec<Routes>, } impl<Routes: Debug + PartialEq + Default + ParsePath + Clone + Navigation> Default for Router<Routes> { fn default() -> Self { Router { current_history_index: 0, default_route: Routes::default(), history: Vec::new(), current_route: None, base_url: Url::new(), current_move: Move::IsReady, } } } impl<Routes: Debug + PartialEq + ParsePath + Default + Clone + Navigation> Router<Routes> { pub fn new() -> Router<Routes> { Router::default() } pub fn set_base_url(&mut self, url: Url) -> &mut Self { self.base_url = url; self } pub fn init_url_and_navigation(&mut self, url: Url) -> &mut Self { self.set_base_url(url.to_base_url()); self.navigate_to_url(url); self } fn push_to_history(&mut self, route: Routes) { self.history.push(route); self.current_history_index = self.history.len() - 1; } fn back(&mut self) -> bool { if let Some(next_route) = self.can_back_with_route() { self.current_route = Routes::parse_path(next_route.as_path().as_str()).ok(); self.current_history_index -= 1; true } else { false } } pub fn can_back_with_route(&self) -> Option<Routes> { if self.history.is_empty() { return None; } if self.current_history_index == 0 { return None; } let next_index = &self.current_history_index - 1; let route = self.history.get(next_index).unwrap(); Some(route.clone()) } pub fn can_back(&self) -> bool { self.can_back_with_route().is_some() } pub fn can_forward(&self) -> bool { self.can_forward_with_route().is_some() } pub fn can_forward_with_route(&self) -> Option<Routes> { if self.history.is_empty() { return None; } if self.current_history_index == self.history.len() - 1 { return None; } let next_index = &self.current_history_index + 1; let route = self.history.get(next_index).unwrap_or_else(|| { panic!( "We should have get route but index is failed {}", next_index ) }); Some(route.clone()) } fn forward(&mut self) -> bool { if let Some(next_route) = &self.can_forward_with_route() { let path: String = next_route.clone().as_path().to_string(); self.current_route = Routes::parse_path(&path).ok(); self.current_history_index += 1; true } else { false } } pub fn is_current_route(&self, route: &Routes) -> bool { if let Some(current_route) = &self.current_route { route.eq(&current_route) } else { false } } fn reload_without_cache() {} pub fn navigate_to_new(&mut self, route: &Routes) { self.current_route = Some(route.clone()); self.push_to_history(route.clone()); } fn navigate_to_url(&mut self, url: Url) { let path = &mut url.to_string(); path.remove(0); if let Ok(route_match) = Routes::parse_path(path) { self.navigate_to_new(&route_match); } else { self.navigate_to_new(&self.default_route.clone()); } } pub fn request_moving_back<F: FnOnce(Url) -> R, R>(&mut self, func: F) { self.current_move = Move::IsMovingBack; if let Some(next_route) = &self.can_back_with_route() { func(next_route.to_url()); } } pub fn request_moving_forward<F: FnOnce(Url) -> R, R>(&mut self, func: F) { self.current_move = Move::IsMovingForward; if let Some(next_route) = &self.can_forward_with_route() { func(next_route.to_url()); } } pub fn base_url(&self) -> &Url { &self.base_url } pub fn confirm_navigation(&mut self, url: Url) { match self.current_move { Move::IsNavigating => { self.navigate_to_url(url); } Move::IsMovingBack => { self.back(); } Move::IsMovingForward => { self.forward(); } Move::IsReady => { self.navigate_to_url(url); } } self.current_move = Move::IsReady; } } #[cfg(test)] mod test { use seed::{prelude::IndexMap, Url}; extern crate router_macro_derive; use super::*; use crate::router; use router::*; use router_macro_derive::*; use wasm_bindgen_test::*; wasm_bindgen_test_configure!(run_in_browser); #[derive(Debug, PartialEq, Copy, Clone, AsUrl)] pub enum DashboardAdminRoutes { Other, #[as_path = ""] Root, } #[derive(Debug, PartialEq, Clone, AsUrl)] pub enum DashboardRoutes { Admin(DashboardAdminRoutes), Profile(u32), #[as_path = ""] Root, } #[derive(Debug, PartialEq, Clone, AsUrl, Root)] enum ExampleRoutes { Login, Register, Stuff, Dashboard(DashboardRoutes), #[default_route] NotFound, #[as_path = ""] Home, } #[wasm_bindgen_test] fn test_router_default_route() { let mut router = Router::<ExampleRoutes>::new(); let url = Url::new().add_path_part("example"); router.navigate_to_url(url); assert_eq!(router.current_route.unwrap(), router.default_route); } #[wasm_bindgen_test] fn test_build_url() { let mut router: Router<ExampleRoutes> = Router::new(); let url = router.base_url().clone().add_path_part(""); router.navigate_to_url(url); assert_eq!( router.current_route.clone().unwrap(), ExampleRoutes::parse_path("").unwrap() ); let admin_url = router .base_url() .clone() .set_path("dashboard/admin/other".split("/")); router.navigate_to_url(admin_url); assert_eq!( router.current_route.clone().unwrap(), ExampleRoutes::parse_path("/dashboard/admin/other").unwrap() ); let admin_url = router .base_url() .clone() .set_path("dashboard/profile/1".split("/")); router.navigate_to_url(admin_url); assert_eq!( router.current_route.clone().unwrap(), ExampleRoutes::parse_path("/dashboard/profile/1").unwrap() ); } #[wasm_bindgen_test] fn test_navigation_to_route() { let mut router: Router<ExampleRoutes> = Router::new(); router.navigate_to_new(&ExampleRoutes::parse_path("/dashboard/profile/1").unwrap()); assert_eq!( router.current_route.clone().unwrap(), ExampleRoutes::Dashboard(DashboardRoutes::Profile(1)) ); assert_eq!(router.current_history_index, 0); router.navigate_to_new(&ExampleRoutes::parse_path("/dashboard/profile/55").unwrap()); assert_eq!( router.current_route.clone().unwrap(), ExampleRoutes::Dashboard(DashboardRoutes::Profile(55)) ); assert_eq!(router.current_history_index, 1); router.navigate_to_new(&ExampleRoutes::Home); assert_eq!( router.current_route.clone().unwrap(), ExampleRoutes::parse_path("").unwrap() ); assert_eq!(router.current_history_index, 2); } #[wasm_bindgen_test] fn test_backward() { let mut router: Router<ExampleRoutes> = Router::new(); let back = router.back(); assert_eq!(back, false, "We should Not have gone backwards"); assert_eq!( router.current_history_index, 0, "We should have current index 0" ); assert_eq!( router.current_route.is_none(), true, "We should not have current rou" ); router.navigate_to_new(&ExampleRoutes::parse_path("").unwrap()); router.navigate_to_new(&ExampleRoutes::parse_path("register").unwrap()); router.navigate_to_new(&ExampleRoutes::parse_path("dashboard/admin/other").unwrap()); assert_eq!(router.current_history_index, 2); let back = router.back(); assert_eq!(back, true, "We should have gone backwards"); assert_eq!(router.current_history_index, 1); assert_eq!( router.current_route.clone().unwrap(), ExampleRoutes::Register ); assert_eq!(router.is_current_route(&ExampleRoutes::Register), true); let back = router.back(); assert_eq!(back, true, "We should have gone backwards"); assert_eq!(router.current_history_index, 0); assert_eq!( router.current_route.clone().unwrap(), ExampleRoutes::parse_path("").unwrap() ); router.navigate_to_new(&ExampleRoutes::Dashboard(DashboardRoutes::Root)); assert_eq!( router.is_current_route(&ExampleRoutes::parse_path("dashboard/").unwrap()), true ); let back = router.back(); assert_eq!(back, true); assert_eq!(router.current_history_index, 2); assert_eq!( router.current_route.clone().unwrap(), ExampleRoutes::parse_path("dashboard/admin/other").unwrap() ); } #[wasm_bindgen_test] fn test_forward() { let mut router: Router<ExampleRoutes> = Router::new(); let back = router.forward(); assert_eq!(back, false, "We should Not have gone backwards"); assert_eq!( router.current_history_index, 0, "We should have current index 0" ); assert_eq!( router.current_route.is_none(), true, "We should not have current rou" ); router.navigate_to_new(&ExampleRoutes::parse_path("").unwrap()); router.navigate_to_new(&ExampleRoutes::parse_path("register").unwrap()); router.navigate_to_new(&ExampleRoutes::parse_path("/dashboard/profile/55").unwrap()); assert_eq!(router.current_history_index, 2); let back = router.back(); let back = router.back(); let forward = router.forward(); assert_eq!(forward, true, "We should have gone forward"); assert_eq!(router.current_history_index, 1); assert_eq!( router.current_route.clone().unwrap(), ExampleRoutes::Register ); let forward = router.forward(); assert_eq!(forward, true, "We should have gone forward"); assert_eq!(router.current_history_index, 2); assert_eq!( router.current_route.clone().unwrap(), ExampleRoutes::Dashboard(DashboardRoutes::Profile(55)) ); let forward = router.forward(); assert_eq!(forward, false, "We should Not have gone forward"); } }
mod model; mod path; mod url; mod view; use seed::Url; use std::fmt::Debug; pub use {model::*, path::*, path::*, url::*, url::*, view::*}; use seed::{*, *}; struct_urls!(); impl<'a> Urls<'a> { pub fn build_url(self, segments: Vec<&str>) -> Url { self.base_url().set_path(segments) } } pub enum Move { IsNavigating, IsMovingBack, IsMovingForward, IsReady, } pub struct Router<Routes: Debug + PartialEq + ParsePath + Clone + Default + Navigation> { pub current_route: Option<Routes>, pub current_history_index: usize, pub default_route: Routes, base_url: Url, pub current_move: Move, history: Vec<Routes>, } impl<Routes: Debug + PartialEq + Default + ParsePath + Clone + Navigation> Default for Router<Routes> { fn default() -> Self { Router { current_history_index: 0, default_route: Routes::default(), history: Vec::new(), current_route: None, base_url: Url::new(), current_move: Move::IsReady, } } } impl<Routes: Debug + PartialEq + ParsePath + Default + Clone + Navigation> Router<Routes> { pub fn new() -> Router<Routes> { Router::default() } pub fn set_base_url(&mut self, url: Url) -> &mut Self { self.base_url = url; self } pub fn init_url_and_navigation(&mut self, url: Url) -> &mut Self { self.set_base_url(url.to_base_url()); self.navigate_to_url(url); self } fn push_to_history(&mut self, route: Routes) { self.history.push(route); self.current_history_index = self.history.len() - 1; } fn back(&mut self) -> bool { if let Some(next_route) = self.can_back_with_route() { self.current_route = Routes::parse_path(next_route.as_path().as_str()).ok(); self.current_history_index -= 1; true } else { false } } pub fn can_back_with_route(&self) -> Option<Routes> { if self.history.is_empty() { return None; } if self.current_history_index == 0 { return None; } let next_index = &self.current_history_index - 1; let route = self.history.get(next_index).unwrap(); Some(route.clone()) } pub fn can_back(&self) -> bool { self.can_back_with_route().is_some() } pub fn can_forward(&self) -> bool { self.can_forward_with_route().is_some() } pub fn can_forward_with_route(&self) -> Option<Routes> { if self.history.is_empty() { return None; } if self.current_history_index == self.history.len() - 1 { return None; } let next_index = &self.current_history_index + 1; let route = self.history.get(next_index).unwrap_or_else(|| { panic!( "We should have get route but index is failed {}", next_index ) }); Some(route.clone()) } fn forward(&mut self) -> bool { if let Some(next_route) = &self.can_forward_with_route() { let path: String = next_route.clone().as_path().to_string(); self.current_route = Routes::parse_path(&path).ok(); self.current_history_index += 1; true } else { false } }
fn reload_without_cache() {} pub fn navigate_to_new(&mut self, route: &Routes) { self.current_route = Some(route.clone()); self.push_to_history(route.clone()); } fn navigate_to_url(&mut self, url: Url) { let path = &mut url.to_string(); path.remove(0); if let Ok(route_match) = Routes::parse_path(path) { self.navigate_to_new(&route_match); } else { self.navigate_to_new(&self.default_route.clone()); } } pub fn request_moving_back<F: FnOnce(Url) -> R, R>(&mut self, func: F) { self.current_move = Move::IsMovingBack; if let Some(next_route) = &self.can_back_with_route() { func(next_route.to_url()); } } pub fn request_moving_forward<F: FnOnce(Url) -> R, R>(&mut self, func: F) { self.current_move = Move::IsMovingForward; if let Some(next_route) = &self.can_forward_with_route() { func(next_route.to_url()); } } pub fn base_url(&self) -> &Url { &self.base_url } pub fn confirm_navigation(&mut self, url: Url) { match self.current_move { Move::IsNavigating => { self.navigate_to_url(url); } Move::IsMovingBack => { self.back(); } Move::IsMovingForward => { self.forward(); } Move::IsReady => { self.navigate_to_url(url); } } self.current_move = Move::IsReady; } } #[cfg(test)] mod test { use seed::{prelude::IndexMap, Url}; extern crate router_macro_derive; use super::*; use crate::router; use router::*; use router_macro_derive::*; use wasm_bindgen_test::*; wasm_bindgen_test_configure!(run_in_browser); #[derive(Debug, PartialEq, Copy, Clone, AsUrl)] pub enum DashboardAdminRoutes { Other, #[as_path = ""] Root, } #[derive(Debug, PartialEq, Clone, AsUrl)] pub enum DashboardRoutes { Admin(DashboardAdminRoutes), Profile(u32), #[as_path = ""] Root, } #[derive(Debug, PartialEq, Clone, AsUrl, Root)] enum ExampleRoutes { Login, Register, Stuff, Dashboard(DashboardRoutes), #[default_route] NotFound, #[as_path = ""] Home, } #[wasm_bindgen_test] fn test_router_default_route() { let mut router = Router::<ExampleRoutes>::new(); let url = Url::new().add_path_part("example"); router.navigate_to_url(url); assert_eq!(router.current_route.unwrap(), router.default_route); } #[wasm_bindgen_test] fn test_build_url() { let mut router: Router<ExampleRoutes> = Router::new(); let url = router.base_url().clone().add_path_part(""); router.navigate_to_url(url); assert_eq!( router.current_route.clone().unwrap(), ExampleRoutes::parse_path("").unwrap() ); let admin_url = router .base_url() .clone() .set_path("dashboard/admin/other".split("/")); router.navigate_to_url(admin_url); assert_eq!( router.current_route.clone().unwrap(), ExampleRoutes::parse_path("/dashboard/admin/other").unwrap() ); let admin_url = router .base_url() .clone() .set_path("dashboard/profile/1".split("/")); router.navigate_to_url(admin_url); assert_eq!( router.current_route.clone().unwrap(), ExampleRoutes::parse_path("/dashboard/profile/1").unwrap() ); } #[wasm_bindgen_test] fn test_navigation_to_route() { let mut router: Router<ExampleRoutes> = Router::new(); router.navigate_to_new(&ExampleRoutes::parse_path("/dashboard/profile/1").unwrap()); assert_eq!( router.current_route.clone().unwrap(), ExampleRoutes::Dashboard(DashboardRoutes::Profile(1)) ); assert_eq!(router.current_history_index, 0); router.navigate_to_new(&ExampleRoutes::parse_path("/dashboard/profile/55").unwrap()); assert_eq!( router.current_route.clone().unwrap(), ExampleRoutes::Dashboard(DashboardRoutes::Profile(55)) ); assert_eq!(router.current_history_index, 1); router.navigate_to_new(&ExampleRoutes::Home); assert_eq!( router.current_route.clone().unwrap(), ExampleRoutes::parse_path("").unwrap() ); assert_eq!(router.current_history_index, 2); } #[wasm_bindgen_test] fn test_backward() { let mut router: Router<ExampleRoutes> = Router::new(); let back = router.back(); assert_eq!(back, false, "We should Not have gone backwards"); assert_eq!( router.current_history_index, 0, "We should have current index 0" ); assert_eq!( router.current_route.is_none(), true, "We should not have current rou" ); router.navigate_to_new(&ExampleRoutes::parse_path("").unwrap()); router.navigate_to_new(&ExampleRoutes::parse_path("register").unwrap()); router.navigate_to_new(&ExampleRoutes::parse_path("dashboard/admin/other").unwrap()); assert_eq!(router.current_history_index, 2); let back = router.back(); assert_eq!(back, true, "We should have gone backwards"); assert_eq!(router.current_history_index, 1); assert_eq!( router.current_route.clone().unwrap(), ExampleRoutes::Register ); assert_eq!(router.is_current_route(&ExampleRoutes::Register), true); let back = router.back(); assert_eq!(back, true, "We should have gone backwards"); assert_eq!(router.current_history_index, 0); assert_eq!( router.current_route.clone().unwrap(), ExampleRoutes::parse_path("").unwrap() ); router.navigate_to_new(&ExampleRoutes::Dashboard(DashboardRoutes::Root)); assert_eq!( router.is_current_route(&ExampleRoutes::parse_path("dashboard/").unwrap()), true ); let back = router.back(); assert_eq!(back, true); assert_eq!(router.current_history_index, 2); assert_eq!( router.current_route.clone().unwrap(), ExampleRoutes::parse_path("dashboard/admin/other").unwrap() ); } #[wasm_bindgen_test] fn test_forward() { let mut router: Router<ExampleRoutes> = Router::new(); let back = router.forward(); assert_eq!(back, false, "We should Not have gone backwards"); assert_eq!( router.current_history_index, 0, "We should have current index 0" ); assert_eq!( router.current_route.is_none(), true, "We should not have current rou" ); router.navigate_to_new(&ExampleRoutes::parse_path("").unwrap()); router.navigate_to_new(&ExampleRoutes::parse_path("register").unwrap()); router.navigate_to_new(&ExampleRoutes::parse_path("/dashboard/profile/55").unwrap()); assert_eq!(router.current_history_index, 2); let back = router.back(); let back = router.back(); let forward = router.forward(); assert_eq!(forward, true, "We should have gone forward"); assert_eq!(router.current_history_index, 1); assert_eq!( router.current_route.clone().unwrap(), ExampleRoutes::Register ); let forward = router.forward(); assert_eq!(forward, true, "We should have gone forward"); assert_eq!(router.current_history_index, 2); assert_eq!( router.current_route.clone().unwrap(), ExampleRoutes::Dashboard(DashboardRoutes::Profile(55)) ); let forward = router.forward(); assert_eq!(forward, false, "We should Not have gone forward"); } }
pub fn is_current_route(&self, route: &Routes) -> bool { if let Some(current_route) = &self.current_route { route.eq(&current_route) } else { false } }
function_block-full_function
[ { "content": "pub fn init(url: Url, model: &mut Model, id: &String, orders: &mut impl Orders<Msg>) -> Model {\n\n Model {}\n\n}\n\n\n\npub struct Model {}\n\npub enum Msg {}\n", "file_path": "examples/custom_router/tests/routing_module/pages/profile.rs", "rank": 0, "score": 480232.6109669417 },...
Rust
src/ui/app.rs
xfbs/afp
0bd950504f24e2c762029b83f1c5142a6973664c
extern crate gio; extern crate gtk; use crate::ui::*; use gio::prelude::*; use gtk::prelude::*; use std::cell::RefCell; use std::env; use std::rc::Rc; const STYLE: &'static str = include_str!(concat!(env!("CARGO_MANIFEST_DIR"), "/data/style.css")); #[derive(Clone)] pub struct App { app: gtk::Application, window: Rc<RefCell<Option<gtk::ApplicationWindow>>>, main: MainController, } impl App { pub fn new(name: &str) -> App { App { app: gtk::Application::new(name, gio::ApplicationFlags::FLAGS_NONE) .expect("application startup failed"), window: Rc::new(RefCell::new(None)), main: MainController::new(), } } fn startup(&self) { self.setup_accels(); self.load_css(); self.main.startup(); } fn setup_accels(&self) { self.app.set_accels_for_action("app.quit", &["<Primary>Q"]); } fn load_css(&self) { let provider = gtk::CssProvider::new(); provider .load_from_data(STYLE.as_bytes()) .expect("Failed to load CSS"); gtk::StyleContext::add_provider_for_screen( &gdk::Screen::get_default().expect("Error initializing gtk css provider."), &provider, gtk::STYLE_PROVIDER_PRIORITY_APPLICATION, ); } fn shutdown(&self) { self.main.shutdown(); } fn activate(&self) { let window = gtk::ApplicationWindow::new(&self.app); *self.window.borrow_mut() = Some(window.clone()); self.main.activate(); self.setup_menu(); self.setup_actions(); self.main.add_window(&window); } fn setup_menu(&self) { let menu = gio::Menu::new(); let menu_bar = gio::Menu::new(); menu.append("About", "app.about"); menu.append("Quit", "app.quit"); self.app.set_app_menu(&menu); self.app.set_menubar(&menu_bar); } fn setup_actions(&self) { let quit = gio::SimpleAction::new("quit", None); let app = self.clone(); quit.connect_activate(move |_, _| { if let Some(window) = app.window.borrow().clone() { window.destroy(); } }); let about = gio::SimpleAction::new("about", None); let app = self.clone(); about.connect_activate(move |_, _| { let dialog = gtk::AboutDialog::new(); dialog.set_authors(&[env!("CARGO_PKG_AUTHORS")]); dialog.set_website_label(Some("Webseite")); dialog.set_website(Some(env!("CARGO_PKG_REPOSITORY"))); dialog.set_license_type(gtk::License::MitX11); dialog.set_program_name("Amateurfunkprüfer"); dialog.set_version(env!("CARGO_PKG_VERSION")); dialog.set_comments(env!("CARGO_PKG_DESCRIPTION")); dialog.set_title("Über Amateurfunkprüfer"); if let Some(window) = app.window.borrow().as_ref() { dialog.set_transient_for(Some(window)); } dialog.run(); dialog.destroy(); }); self.app.add_action(&quit); self.app.add_action(&about); } pub fn init(&self) { let app = self.clone(); self.app.connect_startup(move |_| app.startup()); let app = self.clone(); self.app.connect_shutdown(move |_| { app.shutdown(); }); let app = self.clone(); self.app.connect_activate(move |_| app.activate()); } pub fn run(&self) { self.app.run(&env::args().collect::<Vec<_>>()); } }
extern crate gio; extern crate gtk; use crate::ui::*; use gio::prelude::*; use gtk::prelude::*; use std::cell::RefCell; use std::env; use std::rc::Rc; const STYLE: &'static str = include_str!(concat!(env!("CARGO_MANIFEST_DIR"), "/data/style.css")); #[derive(Clone)] pub struct App { app: gtk::Application, window: Rc<RefCell<Option<gtk::ApplicationWindow>>>, main: MainController, } impl App { pub fn new(name: &str) -> App { App { app: gtk::Application::new(name, gio::ApplicationFlags::FLAGS_NONE) .expect("application startup failed"), window: Rc::new(RefCell::new(None)), main: MainController::new(), } } fn startup(&self) { self.setup_accels(); self.load_css(); self.main.startup(); } fn setup_accels(&self) { self.app.set_accels_for_action("app.quit", &["<Primary>Q"]); } fn load_css(&self) { let provider = gtk::CssProvider::new(); provider .load_from_data(STYLE.as_bytes()) .expect("Failed to load CSS"); gtk::StyleContext::add_provider_for_screen( &gdk::Screen::get_default().expect("Error initializing gtk css provider."), &provider, gtk::STYLE_PROVIDER_PRIORITY_APPLICATION, ); } fn shutdown(&self) { self.main.shutdown(); } fn activate(&self) { let window = gtk::ApplicationWindow::new(&self.app); *self.window.borrow_mut() = Some(window.clone()); self.main.activate(); self.setup_menu(); self.setup_actions(); self.main.add_window(&window); } fn setup_menu(&self) { let menu = gio::Menu::new(); let menu_bar = gio::Menu::new(); menu.append("About", "app.about"); menu.append("Quit", "app.quit"); self.app.set_app_menu(&menu); self.app.set_menubar(&menu_bar); } fn setup_actions(&self) { let quit = gio::SimpleAction::new("quit", None)
if let Some(window) = app.window.borrow().as_ref() { dialog.set_transient_for(Some(window)); } dialog.run(); dialog.destroy(); }); self.app.add_action(&quit); self.app.add_action(&about); } pub fn init(&self) { let app = self.clone(); self.app.connect_startup(move |_| app.startup()); let app = self.clone(); self.app.connect_shutdown(move |_| { app.shutdown(); }); let app = self.clone(); self.app.connect_activate(move |_| app.activate()); } pub fn run(&self) { self.app.run(&env::args().collect::<Vec<_>>()); } }
; let app = self.clone(); quit.connect_activate(move |_, _| { if let Some(window) = app.window.borrow().clone() { window.destroy(); } }); let about = gio::SimpleAction::new("about", None); let app = self.clone(); about.connect_activate(move |_, _| { let dialog = gtk::AboutDialog::new(); dialog.set_authors(&[env!("CARGO_PKG_AUTHORS")]); dialog.set_website_label(Some("Webseite")); dialog.set_website(Some(env!("CARGO_PKG_REPOSITORY"))); dialog.set_license_type(gtk::License::MitX11); dialog.set_program_name("Amateurfunkprüfer"); dialog.set_version(env!("CARGO_PKG_VERSION")); dialog.set_comments(env!("CARGO_PKG_DESCRIPTION")); dialog.set_title("Über Amateurfunkprüfer");
random
[ { "content": "#[test]\n\nfn test_load_file() {\n\n let mut d = std::path::PathBuf::from(env!(\"CARGO_MANIFEST_DIR\"));\n\n d.push(\"test/datastore.yaml\");\n\n\n\n let ds = DataStore::load(&d);\n\n assert!(ds.is_ok());\n\n let ds = ds.ok().unwrap();\n\n assert_eq!(&ds.filename, &d);\n\n}\n\n\n...
Rust
hphp/hack/src/rupro/lib/shallow_decl_provider/provider.rs
ianhoffman/hhvm
decc4e479e0e689c65f936f0828cb761d34075b1
use std::rc::Rc; use std::{fs, io}; use bumpalo::Bump; use crate::decl_defs::{ShallowClass, ShallowFun, ShallowMethod}; use crate::decl_ty_provider::DeclTyProvider; use crate::pos::{RelativePath, RelativePathCtx, Symbol}; use crate::reason::Reason; use crate::shallow_decl_provider::ShallowDeclCache; #[derive(Debug)] pub struct ShallowDeclProvider<R: Reason> { cache: Rc<dyn ShallowDeclCache<R = R>>, decl_ty_provider: Rc<DeclTyProvider<R>>, relative_path_ctx: Rc<RelativePathCtx>, } impl<R: Reason> ShallowDeclProvider<R> { pub fn new( cache: Rc<dyn ShallowDeclCache<R = R>>, decl_ty_provider: Rc<DeclTyProvider<R>>, relative_path_ctx: Rc<RelativePathCtx>, ) -> Self { Self { cache, decl_ty_provider, relative_path_ctx, } } pub fn get_decl_ty_provider(&self) -> &Rc<DeclTyProvider<R>> { &self.decl_ty_provider } pub fn get_shallow_class(&self, name: &Symbol) -> Option<Rc<ShallowClass<R>>> { self.cache.get_shallow_class(name) } pub fn add_from_oxidized_class(&self, sc: &oxidized_by_ref::shallow_decl_defs::ClassDecl<'_>) { let res = Rc::new(self.utils().mk_shallow_class(sc)); self.cache.put_shallow_class(res.sc_name.id().clone(), res); } pub fn add_from_oxidized_fun( &self, name: &str, sf: &oxidized_by_ref::shallow_decl_defs::FunDecl<'_>, ) { let res = Rc::new(self.utils().mk_shallow_fun(sf)); let name = self.decl_ty_provider.get_pos_provider().mk_symbol(name); self.cache.put_shallow_fun(name, res); } pub fn add_from_oxidized_decls(&self, decls: &oxidized_by_ref::direct_decl_parser::Decls<'_>) { for (name, decl) in decls.iter() { use oxidized_by_ref::direct_decl_parser::Decl::*; match decl { Class(sc) => drop(self.add_from_oxidized_class(sc)), Fun(sf) => drop(self.add_from_oxidized_fun(name, sf)), decl => unimplemented!("new_local_with_decls: {:?}", decl), } } } pub fn add_from_files( &self, filenames: &mut dyn Iterator<Item = &RelativePath>, ) -> io::Result<()> { for rel_fln in filenames { let arena = Bump::new(); let fln = rel_fln.to_absolute(&self.relative_path_ctx); let text = arena.alloc_slice_clone(fs::read_to_string(&fln)?.as_bytes()); let rel_path = oxidized::relative_path::RelativePath::make( oxidized::relative_path::Prefix::Dummy, fln, ); let parsed_file = stack_limit::with_elastic_stack(|stack_limit| { direct_decl_parser::parse_decls( oxidized_by_ref::decl_parser_options::DeclParserOptions::DEFAULT, rel_path.clone(), text, &arena, Some(stack_limit), ) }) .unwrap_or_else(|failure| { panic!( "Rust decl parser FFI exceeded maximum allowed stack of {} KiB", failure.max_stack_size_tried / stack_limit::KI ); }); self.add_from_oxidized_decls(&parsed_file.decls); } Ok(()) } fn utils(&self) -> ShallowDeclUtils<R> { ShallowDeclUtils::new(self.decl_ty_provider.clone()) } } struct ShallowDeclUtils<R: Reason> { decl_ty_provider: Rc<DeclTyProvider<R>>, } impl<R: Reason> ShallowDeclUtils<R> { fn new(decl_ty_provider: Rc<DeclTyProvider<R>>) -> Self { Self { decl_ty_provider } } fn mk_shallow_method( &self, sm: &oxidized_by_ref::shallow_decl_defs::ShallowMethod<'_>, ) -> ShallowMethod<R> { let decl_tys = &self.decl_ty_provider; ShallowMethod { sm_name: decl_tys.get_pos_provider().mk_pos_id_of_ref::<R>(sm.name), sm_type: decl_tys.mk_decl_ty_from_parsed(sm.type_), } } fn mk_shallow_class( &self, sc: &oxidized_by_ref::shallow_decl_defs::ClassDecl<'_>, ) -> ShallowClass<R> { let decl_tys = &self.decl_ty_provider; ShallowClass { sc_name: decl_tys.get_pos_provider().mk_pos_id_of_ref::<R>(sc.name), sc_extends: sc .extends .iter() .map(|ty| decl_tys.mk_decl_ty_from_parsed(ty)) .collect(), sc_methods: sc .methods .iter() .map(|sm| self.mk_shallow_method(sm)) .collect(), } } fn mk_shallow_fun( &self, sf: &oxidized_by_ref::shallow_decl_defs::FunDecl<'_>, ) -> ShallowFun<R> { let decl_tys = &self.decl_ty_provider; ShallowFun { fe_pos: decl_tys.get_pos_provider().mk_pos_of_ref::<R>(sf.pos), fe_type: decl_tys.mk_decl_ty_from_parsed(sf.type_), } } }
use std::rc::Rc; use std::{fs, io}; use bumpalo::Bump; use crate::decl_defs::{ShallowClass, ShallowFun, ShallowMethod}; use crate::decl_ty_provider::DeclTyProvider; use crate::pos::{RelativePath, RelativePathCtx, Symbol}; use crate::reason::Reason; use crate::shallow_decl_provider::ShallowDeclCache; #[derive(Debug)] pub struct ShallowDeclProvider<R: Reason> { cache: Rc<dyn ShallowDeclCache<R = R>>, decl_ty_provider: Rc<DeclTyProvider<R>>, relative_path_ctx: Rc<RelativePathCtx>, } impl<R: Reason> ShallowDeclProvider<R> { pub fn new( cache: Rc<dyn ShallowDeclCache<R = R>>, decl_ty_provider: Rc<DeclTyProvider<R>>, relative_path_ctx: Rc<RelativePathCtx>, ) -> Self { Self { cache, decl_ty_provider, relative_path_ctx, } } pub fn get_decl_ty_provider(&self) -> &Rc<DeclTyProvider<R>> { &self.decl_ty_provider } pub fn get_shallow_class(&self, name: &Symbol) -> Option<Rc<ShallowClass<R>>> { self.cache.get_shallow_class(name) } pub fn add_from_oxidized_class(&self, sc: &oxidized_by_ref::shallow_decl_defs::ClassDecl<'_>) { let res = Rc::new(self.utils().mk_shallow_class(sc)); self.cache.put_shallow_class(res.sc_name.id().clone(), res); } pub fn add_from_oxidized_fun( &self, name: &str, sf: &oxidized_by_ref::shallow_decl_defs::FunDecl<'_>, ) { let res = Rc::new(self.utils().mk_shallow_fun(sf)); let name = self.decl_ty_provider.get_pos_provider().mk_symbol(name); self.cache.put_shallow_fun(name, res); } pub fn add_from_oxidized_decls(&self, decls: &oxidized_by_ref::direct_decl_parser::Decls<'_>) { for (name, decl) in decls.iter() { use oxidized_by_ref::direct_decl_parser::Decl::*; match decl { Class(sc) => drop(self.add_from_oxidized_class(sc)), Fun(sf) => drop(self.add_from_oxidized_fun(name, sf)), decl => unimplemented!("new_local_with_decls: {:?}", decl), } } } pub fn add_from_files( &self, filenames: &mut dyn Iterator<Item = &RelativePath>, ) -> io::Result<()> { for rel_fln in filenames { let arena = Bump::new(); let fln = rel_fln.to_absolute(&self.relative_path_ctx); let text = arena.alloc_slice_clone(fs::read_to_string(&fln)?.as_bytes()); let rel_path = oxidized::relative_path::RelativePath::make( oxidized::relative_path::Prefix::Dummy, fln, ); let parsed_file = stack_limit::with_elastic_stack(|stack_limit| { direct_decl_parser::parse_decls( oxidized_by_ref::decl_parser_options::DeclParserOptions::DEFAULT, rel_path.clone(), text, &arena, Some(stack_limit), ) }) .unwrap_or_else(|failure| { panic!( "Rust decl parser FFI exceeded maximum allowed stack of {} KiB", failure.max_stack_size_tried / stack_limit::KI ); }); self.add_from_oxidized_decls(&parsed_file.decls); } Ok(()) } fn utils(&self) -> ShallowDeclUtils<R> { ShallowDeclUtils::new(self.decl_ty_provider.clone()) } } struct ShallowDeclUtils<R: Reason> { decl_ty_provider: Rc<DeclTyProvider<R>>, } impl<R: Reason> ShallowDeclUtils<R> { fn new(decl_ty_provider: Rc<DeclTyProvider<R>>) -> Self { Self { decl_ty_provider } } fn mk_shallow_method( &self, sm: &oxidized_by_ref::shallow_decl_defs::ShallowMethod<'_>, ) -> ShallowMethod<R> { let decl_tys = &self.decl_ty_provider; ShallowMethod { sm_name: decl_tys.get_pos_provider().mk_pos_id_of_ref::<R>(sm.name), sm_type: decl_tys.mk_decl_ty_from_parsed(sm.type_), } } fn mk_shallow_class( &self, sc: &oxidized_by_ref::shallow_decl_defs::ClassDecl<'_>, ) -> ShallowClass<R> { let decl_tys = &self.decl_ty_provider; ShallowClass { sc_name: decl_tys.get_pos_provider().mk_pos_id_of_ref::<R>(sc.name), sc_extends: sc .extends .iter() .map(|ty| decl_tys.mk_decl_ty_from_parsed(ty)) .collect(), sc_methods: sc .methods .iter() .map(|sm| self.mk_shallow_method(sm)) .collect(), } } fn mk_shallow_fun( &self, sf: &oxidized_by_ref::shallow_decl_defs::FunDecl<'_>, ) -> ShallowFun<R> { let decl_tys = &self.dec
}
l_ty_provider; ShallowFun { fe_pos: decl_tys.get_pos_provider().mk_pos_of_ref::<R>(sf.pos), fe_type: decl_tys.mk_decl_ty_from_parsed(sf.type_), } }
function_block-function_prefixed
[]
Rust
src/input/system.rs
alanpoon/crayon
ab320e4cab285e1baee802363f024235883f8aef
use std::sync::{Arc, RwLock}; use crate::application::prelude::{LifecycleListener, LifecycleListenerHandle}; use crate::window::prelude::{Event, EventListener, EventListenerHandle}; use super::events::InputEvent; use super::keyboard::{Key, Keyboard}; use super::mouse::{Mouse, MouseButton}; use super::touchpad::{GesturePan, GestureTap, TouchPad, TouchState}; use super::InputParams; use crate::utils::hash::FastHashSet; use crate::math::prelude::Vector2; pub struct InputSystem { events: EventListenerHandle, lifecycle: LifecycleListenerHandle, state: Arc<InputState>, } struct InputState { touch_emulation: bool, touch_emulation_button: RwLock<Option<MouseButton>>, mouse: RwLock<Mouse>, keyboard: RwLock<Keyboard>, touchpad: RwLock<TouchPad>, } impl EventListener for Arc<InputState> { fn on(&mut self, v: &Event) -> Result<(), failure::Error> { if let Event::InputDevice(v) = *v { match v { InputEvent::MouseMoved { position } => { if self.touch_emulation_button.read().unwrap().is_some() { self.touchpad.write().unwrap().on_touch( 255, TouchState::Move, self.mouse.read().unwrap().position(), ); } self.mouse.write().unwrap().on_move(position) } InputEvent::MousePressed { button } => { if self.touch_emulation { *self.touch_emulation_button.write().unwrap() = Some(button); self.touchpad.write().unwrap().on_touch( 255, TouchState::Start, self.mouse.read().unwrap().position(), ); } self.mouse.write().unwrap().on_button_pressed(button) } InputEvent::MouseReleased { button } => { if *self.touch_emulation_button.read().unwrap() == Some(button) { *self.touch_emulation_button.write().unwrap() = None; self.touchpad.write().unwrap().on_touch( 255, TouchState::End, self.mouse.read().unwrap().position(), ); } self.mouse.write().unwrap().on_button_released(button) } InputEvent::MouseWheel { delta } => { self.mouse.write().unwrap().on_wheel_scroll(delta) } InputEvent::KeyboardPressed { key } => { self.keyboard.write().unwrap().on_key_pressed(key) } InputEvent::KeyboardReleased { key } => { self.keyboard.write().unwrap().on_key_released(key) } InputEvent::ReceivedCharacter { character } => { self.keyboard.write().unwrap().on_char(character) } InputEvent::Touch { id, state, position, } => { self.touchpad.write().unwrap().on_touch(id, state, position); } } } Ok(()) } } impl LifecycleListener for Arc<InputState> { fn on_post_update(&mut self) -> Result<(), failure::Error> { self.mouse.write().unwrap().advance(); self.keyboard.write().unwrap().advance(); self.touchpad.write().unwrap().advance(); Ok(()) } } impl Drop for InputSystem { fn drop(&mut self) { crate::application::detach(self.lifecycle); crate::window::detach(self.events); } } impl InputSystem { pub fn new(setup: InputParams) -> Self { debug_assert!(crate::application::valid(), ""); let state = Arc::new(InputState { touch_emulation: setup.touch_emulation, touch_emulation_button: RwLock::new(None), mouse: RwLock::new(Mouse::new(setup.mouse)), keyboard: RwLock::new(Keyboard::new(setup.keyboard)), touchpad: RwLock::new(TouchPad::new(setup.touchpad)), }); InputSystem { state: state.clone(), lifecycle: crate::application::attach(state.clone()), events: crate::window::attach(state), } } pub fn reset(&self) { self.state.mouse.write().unwrap().reset(); self.state.keyboard.write().unwrap().reset(); self.state.touchpad.write().unwrap().reset(); *self.state.touch_emulation_button.write().unwrap() = None; } #[inline] pub fn has_keyboard_attached(&self) -> bool { true } #[inline] pub fn is_key_down(&self, key: Key) -> bool { self.state.keyboard.read().unwrap().is_key_down(key) } #[inline] pub fn is_key_press(&self, key: Key) -> bool { self.state.keyboard.read().unwrap().is_key_press(key) } #[inline] pub fn key_presses(&self) -> FastHashSet<Key> { self.state.keyboard.read().unwrap().key_presses() } #[inline] pub fn is_key_release(&self, key: Key) -> bool { self.state.keyboard.read().unwrap().is_key_release(key) } #[inline] pub fn key_releases(&self) -> FastHashSet<Key> { self.state.keyboard.read().unwrap().key_releases() } #[inline] pub fn is_key_repeat(&self, key: Key) -> bool { self.state.keyboard.read().unwrap().is_key_repeat(key) } #[inline] pub fn text(&self) -> String { use std::iter::FromIterator; String::from_iter(self.state.keyboard.read().unwrap().captured_chars()) } #[inline] pub fn has_mouse_attached(&self) -> bool { true } #[inline] pub fn is_mouse_down(&self, button: MouseButton) -> bool { self.state.mouse.read().unwrap().is_button_down(button) } #[inline] pub fn is_mouse_press(&self, button: MouseButton) -> bool { self.state.mouse.read().unwrap().is_button_press(button) } #[inline] pub fn mouse_presses(&self) -> FastHashSet<MouseButton> { self.state.mouse.read().unwrap().mouse_presses() } #[inline] pub fn is_mouse_release(&self, button: MouseButton) -> bool { self.state.mouse.read().unwrap().is_button_release(button) } #[inline] pub fn mouse_releases(&self) -> FastHashSet<MouseButton> { self.state.mouse.read().unwrap().mouse_releases() } #[inline] pub fn is_mouse_click(&self, button: MouseButton) -> bool { self.state.mouse.read().unwrap().is_button_click(button) } #[inline] pub fn is_mouse_double_click(&self, button: MouseButton) -> bool { self.state .mouse .read() .unwrap() .is_button_double_click(button) } #[inline] pub fn mouse_position(&self) -> Vector2<f32> { self.state.mouse.read().unwrap().position() } #[inline] pub fn mouse_movement(&self) -> Vector2<f32> { self.state.mouse.read().unwrap().movement() } #[inline] pub fn mouse_scroll(&self) -> Vector2<f32> { self.state.mouse.read().unwrap().scroll() } #[inline] pub fn has_touchpad_attached(&self) -> bool { true } #[inline] pub fn is_finger_touched(&self, n: usize) -> bool { self.state.touchpad.read().unwrap().is_touched(n) } #[inline] pub fn finger_position(&self, n: usize) -> Option<Vector2<f32>> { self.state.touchpad.read().unwrap().position(n) } #[inline] pub fn finger_tap(&self) -> GestureTap { self.state.touchpad.read().unwrap().tap() } #[inline] pub fn finger_double_tap(&self) -> GestureTap { self.state.touchpad.read().unwrap().double_tap() } #[inline] pub fn finger_pan(&self) -> GesturePan { self.state.touchpad.read().unwrap().pan() } }
use std::sync::{Arc, RwLock}; use crate::application::prelude::{LifecycleListener, LifecycleListenerHandle}; use crate::window::prelude::{Event, EventListener, EventListenerHandle}; use super::events::InputEvent; use super::keyboard::{Key, Keyboard}; use super::mouse::{Mouse, MouseButton}; use super::touchpad::{GesturePan, GestureTap, TouchPad, TouchState}; use super::InputParams; use crate::utils::hash::FastHashSet; use crate::math::prelude::Vector2; pub struct InputSystem { events: EventListenerHandle, lifecycle: LifecycleListenerHandle, state: Arc<InputState>, } struct InputState { touch_emulation: bool, touch_emulation_button: RwLock<Option<MouseButton>>, mouse: RwLock<Mouse>, keyboard: RwLock<Keyboard>, touchpad: RwLock<TouchPad>, } impl EventListener for Arc<InputState> { fn on(&mut self, v: &Event) -> Result<(), failure::Error> { if let Event::InputDevice(v) = *v { match v { InputEvent::MouseMoved { position } => { if self.touch_emulation_button.read().unwrap().is_some() { self.touchpad.write().unwrap().on_touch( 255, TouchState::Move, self.mouse.read().unwrap().position(), ); } self.mouse.write().unwrap().on_move(position) } InputEvent::MousePressed { button } => { if self.touch_emulation { *self.touch_emulation_button.write().unwrap() = Some(button); self.touchpad.write().unwrap().on_touch( 255, TouchState::Start, self.mouse.read().unwrap().position(), ); } self.mouse.write().unwrap().on_button_pressed(button) } InputEvent::MouseReleased { button } => { if *self.touch_emulation_button.read().unwrap() == Some(button) { *self.touch_emulation_button.write().unwrap() = None; self.touchpad.write().unwrap().on_touch( 255, TouchState::End, self.mouse.read().unwrap().position(), ); } self.mouse.write().unwrap().on_button_released(button) } InputEvent::MouseWheel { delta } => { self.mouse.write().unwrap().on_wheel_scroll(delta) } InputEvent::KeyboardPressed { key } => { self.keyboard.write().unwrap().on_key_pressed(key) } InputEvent::KeyboardReleased { key } => { self.keyboard.write().unwrap().on_key_released(key) } InputEvent::ReceivedCharacter { character } => { self.keyboard.write().unwrap().on_char(character) } InputEvent::Touch { id, state, position, } => { self.touchpad.write().unwrap().on_touch(id, state, position); } } } Ok(()) } } impl LifecycleListener for Arc<InputState> { fn on_post_update(&mut self) -> Result<(), failure::Error> { self.mouse.write().unwrap().advance(); self.keyboard.write().unwrap().advance(); self.touchpad.write().unwrap().advance(); Ok(()) } } impl Drop for InputSystem { fn drop(&mut self) { crate::application::detach(self.lifecycle); crate::window::detach(self.events); } } impl InputSystem { pub fn new(setup: InputParams) -> Self { debug_assert!(crate::application::valid(), ""); let state = Arc::new(InputState { touch_emulation: setup.touch_emulation, touch_emulation_button: RwLock::new(None), mouse: RwLock::new(Mouse::new(setup.mouse)), keyboard: RwLock::new(Keyboard::new(setup.keyboard)), touchpad: RwLock::new(TouchPad::new(setup.touchpad)), }); InputSystem { state: state.clone(), lifecycle: crate::application::attach(state.clone()), events: crate::window::attach(state), } } pub fn reset(&self) { self.state.mouse.write().unwrap().reset(); self.state.keyboard.write().unwrap().reset(); self.state.touchpad.write().unwrap(
b fn key_presses(&self) -> FastHashSet<Key> { self.state.keyboard.read().unwrap().key_presses() } #[inline] pub fn is_key_release(&self, key: Key) -> bool { self.state.keyboard.read().unwrap().is_key_release(key) } #[inline] pub fn key_releases(&self) -> FastHashSet<Key> { self.state.keyboard.read().unwrap().key_releases() } #[inline] pub fn is_key_repeat(&self, key: Key) -> bool { self.state.keyboard.read().unwrap().is_key_repeat(key) } #[inline] pub fn text(&self) -> String { use std::iter::FromIterator; String::from_iter(self.state.keyboard.read().unwrap().captured_chars()) } #[inline] pub fn has_mouse_attached(&self) -> bool { true } #[inline] pub fn is_mouse_down(&self, button: MouseButton) -> bool { self.state.mouse.read().unwrap().is_button_down(button) } #[inline] pub fn is_mouse_press(&self, button: MouseButton) -> bool { self.state.mouse.read().unwrap().is_button_press(button) } #[inline] pub fn mouse_presses(&self) -> FastHashSet<MouseButton> { self.state.mouse.read().unwrap().mouse_presses() } #[inline] pub fn is_mouse_release(&self, button: MouseButton) -> bool { self.state.mouse.read().unwrap().is_button_release(button) } #[inline] pub fn mouse_releases(&self) -> FastHashSet<MouseButton> { self.state.mouse.read().unwrap().mouse_releases() } #[inline] pub fn is_mouse_click(&self, button: MouseButton) -> bool { self.state.mouse.read().unwrap().is_button_click(button) } #[inline] pub fn is_mouse_double_click(&self, button: MouseButton) -> bool { self.state .mouse .read() .unwrap() .is_button_double_click(button) } #[inline] pub fn mouse_position(&self) -> Vector2<f32> { self.state.mouse.read().unwrap().position() } #[inline] pub fn mouse_movement(&self) -> Vector2<f32> { self.state.mouse.read().unwrap().movement() } #[inline] pub fn mouse_scroll(&self) -> Vector2<f32> { self.state.mouse.read().unwrap().scroll() } #[inline] pub fn has_touchpad_attached(&self) -> bool { true } #[inline] pub fn is_finger_touched(&self, n: usize) -> bool { self.state.touchpad.read().unwrap().is_touched(n) } #[inline] pub fn finger_position(&self, n: usize) -> Option<Vector2<f32>> { self.state.touchpad.read().unwrap().position(n) } #[inline] pub fn finger_tap(&self) -> GestureTap { self.state.touchpad.read().unwrap().tap() } #[inline] pub fn finger_double_tap(&self) -> GestureTap { self.state.touchpad.read().unwrap().double_tap() } #[inline] pub fn finger_pan(&self) -> GesturePan { self.state.touchpad.read().unwrap().pan() } }
).reset(); *self.state.touch_emulation_button.write().unwrap() = None; } #[inline] pub fn has_keyboard_attached(&self) -> bool { true } #[inline] pub fn is_key_down(&self, key: Key) -> bool { self.state.keyboard.read().unwrap().is_key_down(key) } #[inline] pub fn is_key_press(&self, key: Key) -> bool { self.state.keyboard.read().unwrap().is_key_press(key) } #[inline] pu
random
[ { "content": "#[inline]\n\npub fn is_mouse_down(button: MouseButton) -> bool {\n\n ctx().is_mouse_down(button)\n\n}\n\n\n\n/// Checks if a mouse button has been pressed during last frame.\n", "file_path": "src/input/mod.rs", "rank": 0, "score": 293791.80774219765 }, { "content": "#[inline...
Rust
tremor-script/src/ast/support.rs
0xd34b33f/tremor-runtime
73af8033509e224e4cbf078559f27bec4c12cf3d
#![cfg_attr(tarpaulin, skip)] use super::{ BinOpKind, EventPath, Invoke, InvokeAggr, InvokeAggrFn, LocalPath, MetadataPath, Segment, StatePath, UnaryOpKind, }; use std::fmt; impl<'script> fmt::Debug for InvokeAggrFn<'script> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "fn(aggr) {}::{}", self.module, self.fun) } } impl<'script> PartialEq for InvokeAggrFn<'script> { fn eq(&self, other: &Self) -> bool { self.module == other.module && self.fun == other.fun && self.args == other.args } } impl<'script> PartialEq for Segment<'script> { fn eq(&self, other: &Self) -> bool { use Segment::{Element, Id, Idx, Range}; match (self, other) { (Id { mid: id1, .. }, Id { mid: id2, .. }) => id1 == id2, (Idx { idx: idx1, .. }, Idx { idx: idx2, .. }) => idx1 == idx2, (Element { expr: expr1, .. }, Element { expr: expr2, .. }) => expr1 == expr2, ( Range { range_start: start1, range_end: end1, .. }, Range { range_start: start2, range_end: end2, .. }, ) => start1 == start2 && end1 == end2, _ => false, } } } impl<'script> PartialEq for LocalPath<'script> { fn eq(&self, other: &Self) -> bool { self.idx == other.idx && self.is_const == other.is_const && self.segments == other.segments } } impl<'script> PartialEq for MetadataPath<'script> { fn eq(&self, other: &Self) -> bool { self.segments == other.segments } } impl<'script> PartialEq for EventPath<'script> { fn eq(&self, other: &Self) -> bool { self.segments == other.segments } } impl<'script> PartialEq for StatePath<'script> { fn eq(&self, other: &Self) -> bool { self.segments == other.segments } } impl BinOpKind { fn operator_name(self) -> &'static str { match self { Self::Or => "or", Self::Xor => "xor", Self::And => "and", Self::BitOr => "|", Self::BitXor => "^", Self::BitAnd => "&", Self::Eq => "==", Self::NotEq => "!=", Self::Gte => ">=", Self::Gt => ">", Self::Lte => "<=", Self::Lt => "<", Self::RBitShiftSigned => ">>", Self::RBitShiftUnsigned => ">>>", Self::LBitShift => "<<", Self::Add => "+", Self::Sub => "-", Self::Mul => "*", Self::Div => "/", Self::Mod => "%", } } } impl fmt::Display for BinOpKind { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str(self.operator_name()) } } impl UnaryOpKind { fn operator_name(self) -> &'static str { match self { Self::Plus => "+", Self::Minus => "-", Self::Not => "not", Self::BitNot => "!", } } } impl fmt::Display for UnaryOpKind { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str(self.operator_name()) } } impl<'script> PartialEq for Invoke<'script> { fn eq(&self, other: &Self) -> bool { self.mid == other.mid && self.module == other.module && self.fun == other.fun } } impl<'script> fmt::Debug for Invoke<'script> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "fn {}::{}", self.module.join("::"), self.fun) } } impl PartialEq for InvokeAggr { fn eq(&self, other: &Self) -> bool { self.mid == other.mid && self.module == other.module && self.fun == other.fun && self.aggr_id == other.aggr_id } } impl fmt::Debug for InvokeAggr { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "fn(aggr) {}::{}", self.module, self.fun) } }
#![cfg_attr(tarpaulin, skip)] use super::{ BinOpKind, EventPath, Invoke, InvokeAggr, InvokeAggrFn, LocalPath, MetadataPath, Segment, StatePath, UnaryOpKind, }; use std::fmt; impl<'script> fmt::Debug for InvokeAggrFn<'script> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "fn(aggr) {}::{}", self.module, self.fun) } } impl<'script> PartialEq for InvokeAggrFn<'script> { fn eq(&self, other: &Self) -> bool { self.module == other.module && self.fun == other.fun && self.args == other.args } } impl<'script> PartialEq for Segment<'script> { fn eq(&self, other: &Self) -> bool { use Segment::{Element, Id, Idx, Range}; match (self, other) { (Id { mid: id1, .. }, Id { mid: id2, .. }) => id1 == id2, (Idx { idx: idx1, .. }, Idx { idx: idx2, .. }) => idx1 == idx2, (Element { expr: expr1, .. }, Element { expr: expr2, .. }) => expr1 == expr2, ( Range { range_start: start1, range_end: end1, .. }, Range { range_start: start2, range_end: end2, .. }, ) => start1 == start2 && end1 == end2, _ => false, } } } impl<'script> PartialEq for LocalPath<'script> { fn eq(&self, other: &Self) -> bool { self.idx == other.idx && self.is_const == other.is_const && self.segments == other.segments } } impl<'script> PartialEq for MetadataPath<'script> { fn eq(&self, other: &Self) -> bool { self.segments == other.segments } } impl<'script> PartialEq for EventPath<'script> { fn eq(&self, other: &Self) -> bool { self.segments == other.segments } } impl<'script> PartialEq for StatePath<'script> { fn eq(&self, other: &Self) -> bool { self.segments == other.segments } } impl BinOpKind { fn operator_name(self) -> &'static str { match self { Self::Or => "or", Self::Xor => "xor", Self::And => "and", Self::BitOr => "|", Self::BitXor => "^", Self::BitAnd => "&",
} impl fmt::Display for BinOpKind { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str(self.operator_name()) } } impl UnaryOpKind { fn operator_name(self) -> &'static str { match self { Self::Plus => "+", Self::Minus => "-", Self::Not => "not", Self::BitNot => "!", } } } impl fmt::Display for UnaryOpKind { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str(self.operator_name()) } } impl<'script> PartialEq for Invoke<'script> { fn eq(&self, other: &Self) -> bool { self.mid == other.mid && self.module == other.module && self.fun == other.fun } } impl<'script> fmt::Debug for Invoke<'script> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "fn {}::{}", self.module.join("::"), self.fun) } } impl PartialEq for InvokeAggr { fn eq(&self, other: &Self) -> bool { self.mid == other.mid && self.module == other.module && self.fun == other.fun && self.aggr_id == other.aggr_id } } impl fmt::Debug for InvokeAggr { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "fn(aggr) {}::{}", self.module, self.fun) } }
Self::Eq => "==", Self::NotEq => "!=", Self::Gte => ">=", Self::Gt => ">", Self::Lte => "<=", Self::Lt => "<", Self::RBitShiftSigned => ">>", Self::RBitShiftUnsigned => ">>>", Self::LBitShift => "<<", Self::Add => "+", Self::Sub => "-", Self::Mul => "*", Self::Div => "/", Self::Mod => "%", } }
function_block-function_prefix_line
[]
Rust
src/core/image.rs
RahulDas-dev/ndarray-vision
fddbb85f67b2e9124a8c9582ecd775ff5d60a3e7
use crate::core::colour_models::*; use crate::core::traits::PixelBound; use ndarray::prelude::*; use ndarray::{s, Data, DataMut, OwnedRepr, RawDataClone, ViewRepr}; use num_traits::cast::{FromPrimitive, NumCast}; use num_traits::Num; use std::{fmt, hash, marker::PhantomData}; pub type Image<T, C> = ImageBase<OwnedRepr<T>, C>; pub type ImageView<'a, T, C> = ImageBase<ViewRepr<&'a T>, C>; pub struct ImageBase<T, C> where C: ColourModel, T: Data, { pub data: ArrayBase<T, Ix3>, pub(crate) model: PhantomData<C>, } impl<T, U, C> ImageBase<U, C> where U: Data<Elem = T>, T: Copy + Clone + FromPrimitive + Num + NumCast + PixelBound, C: ColourModel, { pub fn into_type<T2>(self) -> Image<T2, C> where T2: Copy + Clone + FromPrimitive + Num + NumCast + PixelBound, { let rescale = |x: &T| { let scaled = normalise_pixel_value(*x) * (T2::max_pixel() - T2::min_pixel()) .to_f64() .unwrap_or_else(|| 0.0f64); T2::from_f64(scaled).unwrap_or_else(T2::zero) + T2::min_pixel() }; let data = self.data.map(rescale); Image::<_, C>::from_data(data) } } impl<S, T, C> ImageBase<S, C> where S: Data<Elem = T>, T: Clone, C: ColourModel, { pub fn to_owned(&self) -> Image<T, C> { Image { data: self.data.to_owned(), model: PhantomData, } } pub fn from_shape_data(rows: usize, cols: usize, data: Vec<T>) -> Image<T, C> { let data = Array3::from_shape_vec((rows, cols, C::channels()), data).unwrap(); Image { data, model: PhantomData, } } } impl<T, C> Image<T, C> where T: Clone + Num, C: ColourModel, { pub fn new(rows: usize, columns: usize) -> Self { Image { data: Array3::zeros((rows, columns, C::channels())), model: PhantomData, } } } impl<T, U, C> ImageBase<T, C> where T: Data<Elem = U>, C: ColourModel, { pub fn from_data(data: ArrayBase<T, Ix3>) -> Self { Self { data, model: PhantomData, } } pub fn rows(&self) -> usize { self.data.shape()[0] } pub fn cols(&self) -> usize { self.data.shape()[1] } pub fn channels(&self) -> usize { C::channels() } pub fn pixel(&self, row: usize, col: usize) -> ArrayView<U, Ix1> { self.data.slice(s![row, col, ..]) } pub fn into_type_raw<C2>(self) -> ImageBase<T, C2> where C2: ColourModel, { assert_eq!(C2::channels(), C::channels()); ImageBase::<T, C2>::from_data(self.data) } } impl<T, U, C> ImageBase<T, C> where T: DataMut<Elem = U>, C: ColourModel, { pub fn pixel_mut(&mut self, row: usize, col: usize) -> ArrayViewMut<U, Ix1> { self.data.slice_mut(s![row, col, ..]) } } impl<T, U, C> fmt::Debug for ImageBase<U, C> where U: Data<Elem = T>, T: fmt::Debug, C: ColourModel, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "ColourModel={:?} Data={:?}", self.model, self.data)?; Ok(()) } } impl<T, U, C> PartialEq<ImageBase<U, C>> for ImageBase<U, C> where U: Data<Elem = T>, T: PartialEq, C: ColourModel, { fn eq(&self, other: &Self) -> bool { self.model == other.model && self.data == other.data } } impl<S, C> Clone for ImageBase<S, C> where S: RawDataClone + Data, C: ColourModel, { fn clone(&self) -> Self { Self { data: self.data.clone(), model: PhantomData, } } fn clone_from(&mut self, other: &Self) { self.data.clone_from(&other.data) } } impl<'a, S, C> hash::Hash for ImageBase<S, C> where S: Data, S::Elem: hash::Hash, C: ColourModel, { fn hash<H: hash::Hasher>(&self, state: &mut H) { self.model.hash(state); self.data.hash(state); } } pub fn normalise_pixel_value<T>(t: T) -> f64 where T: PixelBound + Num + NumCast, { let numerator = (t + T::min_pixel()).to_f64(); let denominator = (T::max_pixel() - T::min_pixel()).to_f64(); let numerator = numerator.unwrap_or_else(|| 0.0f64); let denominator = denominator.unwrap_or_else(|| 1.0f64); numerator / denominator } #[cfg(test)] mod tests { use super::*; use ndarray::arr1; #[test] fn image_consistency_checks() { let i = Image::<u8, RGB>::new(1, 2); assert_eq!(i.rows(), 1); assert_eq!(i.cols(), 2); assert_eq!(i.channels(), 3); assert_eq!(i.channels(), i.data.shape()[2]); } #[test] fn image_type_conversion() { let mut i = Image::<u8, RGB>::new(1, 1); i.pixel_mut(0, 0) .assign(&arr1(&[u8::max_value(), 0, u8::max_value() / 3])); let t: Image<u16, RGB> = i.into_type(); assert_eq!( t.pixel(0, 0), arr1(&[u16::max_value(), 0, u16::max_value() / 3]) ); } }
use crate::core::colour_models::*; use crate::core::traits::PixelBound; use ndarray::prelude::*; use ndarray::{s, Data, DataMut, OwnedRepr, RawDataClone, ViewRepr}; use num_traits::cast::{FromPrimitive, NumCast}; use num_traits::Num; use std::{fmt, hash, marker::PhantomData}; pub type Image<T, C> = ImageBase<OwnedRepr<T>, C>; pub type ImageView<'a, T, C> = ImageBase<ViewRepr<&'a T>, C>; pub struct ImageBase<T, C> where C: ColourModel, T: Data, { pub data: ArrayBase<T, Ix3>, pub(crate) model: PhantomData<C>, } impl<T, U, C> ImageBase<U, C> where U: Data<Elem = T>, T: Copy + Clone + FromPrimitive + Num + NumCast + PixelBound, C: ColourModel, { pub fn into_type<T2>(self) -> Image<T2, C> where T2: Copy + Clone + FromPrimitive + Num + NumCast + PixelBound, { let rescale = |x: &T| { let scaled = normalise_pixel_value(*x) * (T2::max_pixel() - T2::min_pixel()) .to_f64() .unwrap_or_else(|| 0.0f64); T2::from_f64(scaled).unwrap_or_else(T2::zero) + T2::min_pixel() }; let data = self.data.map(rescale); Image::<_, C>::from_data(data) } } impl<S, T, C> ImageBase<S, C> where S: Data<Elem = T>, T: Clone, C: ColourModel, { pub f
pub fn from_shape_data(rows: usize, cols: usize, data: Vec<T>) -> Image<T, C> { let data = Array3::from_shape_vec((rows, cols, C::channels()), data).unwrap(); Image { data, model: PhantomData, } } } impl<T, C> Image<T, C> where T: Clone + Num, C: ColourModel, { pub fn new(rows: usize, columns: usize) -> Self { Image { data: Array3::zeros((rows, columns, C::channels())), model: PhantomData, } } } impl<T, U, C> ImageBase<T, C> where T: Data<Elem = U>, C: ColourModel, { pub fn from_data(data: ArrayBase<T, Ix3>) -> Self { Self { data, model: PhantomData, } } pub fn rows(&self) -> usize { self.data.shape()[0] } pub fn cols(&self) -> usize { self.data.shape()[1] } pub fn channels(&self) -> usize { C::channels() } pub fn pixel(&self, row: usize, col: usize) -> ArrayView<U, Ix1> { self.data.slice(s![row, col, ..]) } pub fn into_type_raw<C2>(self) -> ImageBase<T, C2> where C2: ColourModel, { assert_eq!(C2::channels(), C::channels()); ImageBase::<T, C2>::from_data(self.data) } } impl<T, U, C> ImageBase<T, C> where T: DataMut<Elem = U>, C: ColourModel, { pub fn pixel_mut(&mut self, row: usize, col: usize) -> ArrayViewMut<U, Ix1> { self.data.slice_mut(s![row, col, ..]) } } impl<T, U, C> fmt::Debug for ImageBase<U, C> where U: Data<Elem = T>, T: fmt::Debug, C: ColourModel, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "ColourModel={:?} Data={:?}", self.model, self.data)?; Ok(()) } } impl<T, U, C> PartialEq<ImageBase<U, C>> for ImageBase<U, C> where U: Data<Elem = T>, T: PartialEq, C: ColourModel, { fn eq(&self, other: &Self) -> bool { self.model == other.model && self.data == other.data } } impl<S, C> Clone for ImageBase<S, C> where S: RawDataClone + Data, C: ColourModel, { fn clone(&self) -> Self { Self { data: self.data.clone(), model: PhantomData, } } fn clone_from(&mut self, other: &Self) { self.data.clone_from(&other.data) } } impl<'a, S, C> hash::Hash for ImageBase<S, C> where S: Data, S::Elem: hash::Hash, C: ColourModel, { fn hash<H: hash::Hasher>(&self, state: &mut H) { self.model.hash(state); self.data.hash(state); } } pub fn normalise_pixel_value<T>(t: T) -> f64 where T: PixelBound + Num + NumCast, { let numerator = (t + T::min_pixel()).to_f64(); let denominator = (T::max_pixel() - T::min_pixel()).to_f64(); let numerator = numerator.unwrap_or_else(|| 0.0f64); let denominator = denominator.unwrap_or_else(|| 1.0f64); numerator / denominator } #[cfg(test)] mod tests { use super::*; use ndarray::arr1; #[test] fn image_consistency_checks() { let i = Image::<u8, RGB>::new(1, 2); assert_eq!(i.rows(), 1); assert_eq!(i.cols(), 2); assert_eq!(i.channels(), 3); assert_eq!(i.channels(), i.data.shape()[2]); } #[test] fn image_type_conversion() { let mut i = Image::<u8, RGB>::new(1, 1); i.pixel_mut(0, 0) .assign(&arr1(&[u8::max_value(), 0, u8::max_value() / 3])); let t: Image<u16, RGB> = i.into_type(); assert_eq!( t.pixel(0, 0), arr1(&[u16::max_value(), 0, u16::max_value() / 3]) ); } }
n to_owned(&self) -> Image<T, C> { Image { data: self.data.to_owned(), model: PhantomData, } }
function_block-function_prefixed
[]
Rust
examples3/convex_decomposition.rs
BenBergman/nphysics
11ca4d6f967c35e7f51e65295174c5b0395cbd93
extern crate rand; extern crate kiss3d; extern crate nalgebra as na; extern crate ncollide; extern crate nphysics; extern crate nphysics_testbed3d; use std::sync::Arc; use std::path::Path; use rand::random; use na::{Pnt3, Vec3, Translation}; use kiss3d::loader::obj; use ncollide::shape::{Plane, Compound, Convex}; use ncollide::procedural::TriMesh3; use ncollide::transformation; use ncollide::bounding_volume::{BoundingVolume, AABB}; use ncollide::bounding_volume; use ncollide::inspection::Repr3; use nphysics::world::World; use nphysics::object::RigidBody; use nphysics_testbed3d::Testbed; fn main() { /* * World */ let mut world = World::new(); world.set_gravity(Vec3::new(0.0, -9.81, 0.0)); /* * Planes */ let shift = 10.0; let normals = [ Vec3::new(0.0, 1.0, 0.0), Vec3::new(-1.0, 1.0, 0.0), Vec3::new(1.0, 1.0, 0.0), Vec3::new(0.0, 1.0, -1.0), Vec3::new(0.0, 1.0, 1.0), ]; let poss = [ Vec3::new(0.0, 0.0, 0.0), Vec3::new(shift, 0.0, 0.0), Vec3::new(-shift, 0.0, 0.0), Vec3::new(0.0, 0.0, shift), Vec3::new(0.0, 0.0, -shift) ]; for (normal, pos) in normals.iter().zip(poss.iter()) { let geom = Plane::new(*normal); let mut rb = RigidBody::new_static(geom, 0.3, 0.6); rb.append_translation(pos); world.add_body(rb); } /* * Create the convex decompositions. */ let geoms = models(); let mut bodies = Vec::new(); let ngeoms = geoms.len(); for obj_path in geoms.into_iter() { let deltas = na::one(); let mtl_path = Path::new(""); let mut geom_data = Vec::new(); let obj = obj::parse_file(&Path::new(&obj_path), &mtl_path, ""); if let Ok(model) = obj { let meshes: Vec<TriMesh3<f32>> = model.into_iter().map(|mesh| mesh.1.to_trimesh().unwrap()).collect(); let (mins, maxs) = bounding_volume::point_cloud_aabb(&deltas, &meshes[0].coords[..]); let mut aabb = AABB::new(mins, maxs); for mesh in meshes[1 ..].iter() { let (mins, maxs) = bounding_volume::point_cloud_aabb(&deltas, &mesh.coords[..]); aabb.merge(&AABB::new(mins, maxs)); } let center = aabb.translation(); let diag = na::norm(&(*aabb.maxs() - *aabb.mins())); for mut trimesh in meshes.into_iter() { trimesh.translate_by(&-center); trimesh.scale_by_scalar(6.0 / diag); trimesh.split_index_buffer(true); let (decomp, _) = transformation::hacd(trimesh, 0.03, 1); for mesh in decomp.into_iter() { let convex = Arc::new(Box::new(Convex::new(mesh.coords)) as Box<Repr3<f32>>); geom_data.push((deltas, convex)); } } let compound = Compound::new(geom_data); let mut rb = RigidBody::new_dynamic(compound, 1.0, 0.3, 0.5); rb.set_deactivation_threshold(Some(0.5)); bodies.push(rb) } } if bodies.len() != ngeoms { println!("#########################################################################################"); println!("Some model are missing. You can download them all at : http://crozet.re/nphysics/models."); println!("All the obj files should be put on the `./media/models` folder."); println!("#########################################################################################"); } let nreplicats = 100 / bodies.len(); for rb in bodies.iter() { for _ in 0 .. nreplicats { let mut rb = rb.clone(); let pos = random::<Vec3<f32>>() * 30.0+ Vec3::new(-15.0, 15.0, -15.0); rb.append_translation(&pos); world.add_body(rb); } } /* * Set up the testbed. */ let mut testbed = Testbed::new(world); testbed.look_at(Pnt3::new(-30.0, 30.0, -30.0), Pnt3::new(0.0, 0.0, 0.0)); testbed.run(); } fn models() -> Vec<String> { vec!("media/models/CRATERS_F_decimated.obj".to_string() , "media/models/DRAGON_F_decimated.obj".to_string() , "media/models/GARGOYLE_F_decimated.obj".to_string() , "media/models/Hand1_decimated.obj".to_string() , "media/models/RSCREATURE_F_decimated.obj".to_string() , "media/models/Sketched-Brunnen_decimated.obj".to_string() , "media/models/Teapot_decimated.obj".to_string() , "media/models/block.obj".to_string() , "media/models/block_decimated.obj".to_string() , "media/models/bowl.obj".to_string() , "media/models/bunny_decimated.obj".to_string() , "media/models/camel.obj".to_string() , "media/models/camel_decimated.obj".to_string() , "media/models/casting.obj".to_string() , "media/models/casting_decimated.obj".to_string() , "media/models/chair.obj".to_string() , "media/models/cow1.obj".to_string() , "media/models/cow1_decimated.obj".to_string() , "media/models/cow2.obj".to_string() , "media/models/cow2_decimated.obj".to_string() , "media/models/crank_decimated.obj".to_string() , "media/models/cup.obj".to_string() , "media/models/cup_decimated.obj".to_string() , "media/models/dancer2_decimated.obj".to_string() , "media/models/deer_bound.obj".to_string() , "media/models/dilo_decimated.obj".to_string() , "media/models/dino_decimated.obj".to_string() , "media/models/drum.obj".to_string() , "media/models/egea.obj".to_string() , "media/models/egea_decimated.obj".to_string() , "media/models/eight.obj".to_string() , "media/models/elephant_decimated.obj".to_string() , "media/models/elk.obj".to_string() , "media/models/elk_decimated.obj".to_string() , "media/models/face-YH_decimated.obj".to_string() , "media/models/feline_decimated.obj".to_string() , "media/models/fish_decimated.obj".to_string() , "media/models/foot.obj".to_string() , "media/models/foot_decimated.obj".to_string() , "media/models/genus3.obj".to_string() , "media/models/genus3_decimated.obj".to_string() , "media/models/greek_sculpture_decimated.obj".to_string() , "media/models/hand2_decimated.obj".to_string() , "media/models/hand_decimated.obj".to_string() , "media/models/helix.obj".to_string() , "media/models/helmet.obj".to_string() , "media/models/hero.obj".to_string() , "media/models/hero_decimated.obj".to_string() , "media/models/homer.obj".to_string() , "media/models/homer_decimated.obj".to_string() , "media/models/hornbug.obj".to_string() , "media/models/horse_decimated.obj".to_string() , "media/models/maneki-neko_decimated.obj".to_string() , "media/models/mannequin-devil.obj".to_string() , "media/models/mannequin-devil_decimated.obj".to_string() , "media/models/mannequin.obj".to_string() , "media/models/mannequin_decimated.obj".to_string() , "media/models/mask_decimated.obj".to_string() , "media/models/moaimoai.obj".to_string() , "media/models/moaimoai_decimated.obj".to_string() , "media/models/monk_decimated.obj".to_string() , "media/models/octopus_decimated.obj".to_string() , "media/models/pig.obj".to_string() , "media/models/pig_decimated.obj".to_string() , "media/models/pinocchio_b_decimated.obj".to_string() , "media/models/polygirl.obj".to_string() , "media/models/polygirl_decimated.obj".to_string() , "media/models/rabbit_decimated.obj".to_string() , "media/models/rocker-arm.obj".to_string() , "media/models/rocker-arm_decimated.obj".to_string() , "media/models/screw-remeshed_decimated.obj".to_string() , "media/models/screwdriver_decimated.obj".to_string() , "media/models/shark_b_decimated.obj".to_string() , "media/models/skull-original_decimated.obj".to_string() , "media/models/sledge.obj".to_string() , "media/models/squirrel.obj".to_string() , "media/models/squirrel_decimated.obj".to_string() , "media/models/sword_decimated.obj".to_string() , "media/models/table.obj".to_string() , "media/models/test2.obj".to_string() , "media/models/tstTorusModel.obj".to_string() , "media/models/tstTorusModel2.obj".to_string() , "media/models/tstTorusModel3.obj".to_string() , "media/models/tube1.obj".to_string() , "media/models/venus-original_decimated.obj".to_string() , "media/models/venus.obj".to_string() ) }
extern crate rand; extern crate kiss3d; extern crate nalgebra as na; extern crate ncollide; extern crate nphysics; extern crate nphysics_testbed3d; use std::sync::Arc; use std::path::Path; use rand::random; use na::{Pnt3, Vec3, Translation}; use kiss3d::loader::obj; use ncollide::shape::{Plane, Compound, Convex}; use ncollide::procedural::TriMesh3; use ncollide::transformation; use ncollide::bounding_volume::{BoundingVolume, AABB}; use ncollide::bounding_volume; use ncollide::inspection::Repr3; use nphysics::world::World; use nphysics::object::RigidBody; use nphysics_testbed3d::Testbed; fn main() { /* * World */ let mut world = World::new(); world.set_gravity(Vec3::new(0.0, -9.81, 0.0)); /* * Planes */ let shift = 10.0; let normals = [ Vec3::new(0.0, 1.0, 0.0), Vec3::new(-1.0, 1.0, 0.0), Vec3::new(1.0, 1.0, 0.0), Vec3::new(0.0, 1.0, -1.0), Vec3::new(0.0, 1.0, 1.0), ]; let poss = [ Vec3::new(0.0, 0.0, 0.0), Vec3::new(shift, 0.0, 0.0), Vec3::new(-shift, 0.0, 0.0), Vec3::new(0.0, 0.0, shift), Vec3::new(0.0, 0.0, -shift) ]; for (normal, pos) in normals.iter().zip(poss.iter()) { let geom = Plane::new(*normal); let mut rb = RigidBody::new_static(geom, 0.3, 0.6); rb.append_translation(pos); world.add_body(rb); } /* * Create the convex decompositions. */ let geoms = models(); let mut bodies = Vec::new(); let ngeoms = geoms.len(); for obj_path in geoms.into_iter() { let deltas = na::one(); let mtl_path = Path::new(""); let mut geom_data = Vec::new(); let obj = obj::parse_file(&Path::new(&obj_path), &mtl_path, ""); if let Ok(model) = obj { let meshes: Vec<TriMesh3<f32>> = model.into_iter().map(|mesh| mesh.1.to_trimesh().unwrap()).collect(); let (mins, maxs) = bounding_volume::point_cloud_aabb(&deltas, &meshes[0].coords[..]); let mut aabb = AABB::new(mins, maxs); for mesh in meshes[1 ..].iter() { let (mins, maxs) = bounding_volume::point_cloud_aabb(&deltas, &mesh.coords[..]); aabb.merge(&AABB::new(mins, maxs)); } let center = aabb.translation(); let diag = na::norm(&(*aabb.maxs() - *aabb.mins())); for mut trimesh in meshes.into_iter() { trimesh.translate_by(&-center); trimesh.scale_by_scalar(6.0 / diag); trimesh.split_index_buffer(true); let (decomp, _) = transformation::hacd(trimesh, 0.03, 1); for mesh in decomp.into_iter() { let convex = Arc::new(Box::new(Convex::new(mesh.coords)) as Box<Repr3<f32>>); geom_data.push((deltas, convex)); } } let compound = Compound::new(geom_data); let mut rb = RigidBody::new_dynamic(compound, 1.0, 0.3, 0.5); rb.set_deactivation_threshold(Some(0.5)); bodies.push(rb) } } if bodies.len() != ngeoms { println!("#########################################################################################"); println!("Some model are missing. You can download them all at : http://crozet.re/nphysics/models."); println!("All the obj files should be put on the `./media/models` folder."); println!("#########################################################################################"); } let nreplicats = 100 / bodies.len(); for rb in bodies.iter() { for _ in 0 .. nreplicats { let mut rb = rb.clone(); let pos = random::<Vec3<f32>>() * 30.0+ Vec3::new(-15.0, 15.0, -15.0); rb.append_translation(&pos); world.add_body(rb); } } /* * Set up the testbed. */ let mut testbed = Testbed::new(world); testbed.look_at(Pnt3::new(-30.0, 30.0, -30.0), Pnt3::new(0.0, 0.0, 0.0)); testbed.run(); } fn models() -> Vec<String> { vec!("media/models/CRATERS_F_decimated.obj".to_string() , "media/models/DRAGON_F_decimated.obj".to_string() , "media/models/GARGOYLE_F_decimated.obj".to_string() , "media/
, "media/models/genus3.obj".to_string() , "media/models/genus3_decimated.obj".to_string() , "media/models/greek_sculpture_decimated.obj".to_string() , "media/models/hand2_decimated.obj".to_string() , "media/models/hand_decimated.obj".to_string() , "media/models/helix.obj".to_string() , "media/models/helmet.obj".to_string() , "media/models/hero.obj".to_string() , "media/models/hero_decimated.obj".to_string() , "media/models/homer.obj".to_string() , "media/models/homer_decimated.obj".to_string() , "media/models/hornbug.obj".to_string() , "media/models/horse_decimated.obj".to_string() , "media/models/maneki-neko_decimated.obj".to_string() , "media/models/mannequin-devil.obj".to_string() , "media/models/mannequin-devil_decimated.obj".to_string() , "media/models/mannequin.obj".to_string() , "media/models/mannequin_decimated.obj".to_string() , "media/models/mask_decimated.obj".to_string() , "media/models/moaimoai.obj".to_string() , "media/models/moaimoai_decimated.obj".to_string() , "media/models/monk_decimated.obj".to_string() , "media/models/octopus_decimated.obj".to_string() , "media/models/pig.obj".to_string() , "media/models/pig_decimated.obj".to_string() , "media/models/pinocchio_b_decimated.obj".to_string() , "media/models/polygirl.obj".to_string() , "media/models/polygirl_decimated.obj".to_string() , "media/models/rabbit_decimated.obj".to_string() , "media/models/rocker-arm.obj".to_string() , "media/models/rocker-arm_decimated.obj".to_string() , "media/models/screw-remeshed_decimated.obj".to_string() , "media/models/screwdriver_decimated.obj".to_string() , "media/models/shark_b_decimated.obj".to_string() , "media/models/skull-original_decimated.obj".to_string() , "media/models/sledge.obj".to_string() , "media/models/squirrel.obj".to_string() , "media/models/squirrel_decimated.obj".to_string() , "media/models/sword_decimated.obj".to_string() , "media/models/table.obj".to_string() , "media/models/test2.obj".to_string() , "media/models/tstTorusModel.obj".to_string() , "media/models/tstTorusModel2.obj".to_string() , "media/models/tstTorusModel3.obj".to_string() , "media/models/tube1.obj".to_string() , "media/models/venus-original_decimated.obj".to_string() , "media/models/venus.obj".to_string() ) }
models/Hand1_decimated.obj".to_string() , "media/models/RSCREATURE_F_decimated.obj".to_string() , "media/models/Sketched-Brunnen_decimated.obj".to_string() , "media/models/Teapot_decimated.obj".to_string() , "media/models/block.obj".to_string() , "media/models/block_decimated.obj".to_string() , "media/models/bowl.obj".to_string() , "media/models/bunny_decimated.obj".to_string() , "media/models/camel.obj".to_string() , "media/models/camel_decimated.obj".to_string() , "media/models/casting.obj".to_string() , "media/models/casting_decimated.obj".to_string() , "media/models/chair.obj".to_string() , "media/models/cow1.obj".to_string() , "media/models/cow1_decimated.obj".to_string() , "media/models/cow2.obj".to_string() , "media/models/cow2_decimated.obj".to_string() , "media/models/crank_decimated.obj".to_string() , "media/models/cup.obj".to_string() , "media/models/cup_decimated.obj".to_string() , "media/models/dancer2_decimated.obj".to_string() , "media/models/deer_bound.obj".to_string() , "media/models/dilo_decimated.obj".to_string() , "media/models/dino_decimated.obj".to_string() , "media/models/drum.obj".to_string() , "media/models/egea.obj".to_string() , "media/models/egea_decimated.obj".to_string() , "media/models/eight.obj".to_string() , "media/models/elephant_decimated.obj".to_string() , "media/models/elk.obj".to_string() , "media/models/elk_decimated.obj".to_string() , "media/models/face-YH_decimated.obj".to_string() , "media/models/feline_decimated.obj".to_string() , "media/models/fish_decimated.obj".to_string() , "media/models/foot.obj".to_string() , "media/models/foot_decimated.obj".to_string()
function_block-random_span
[ { "content": "fn add_ragdoll(pos: Vec3<f32>, world: &mut World) {\n\n // head\n\n let head_geom = Ball::new(0.8);\n\n let mut head = RigidBody::new_dynamic(head_geom, 1.0, 0.3, 0.5);\n\n head.append_translation(&(pos + Vec3::new(0.0, 2.4, 0.0)));\n\n\n\n // body\n\n let body_geom ...
Rust
src/content/content_encoding.rs
felippemr/http-types
f77c653d6703192430b4ba8fb016fe17ba8d457f
use crate::content::{Encoding, EncodingProposal}; use crate::headers::{HeaderName, HeaderValue, Headers, ToHeaderValues, CONTENT_ENCODING}; use std::fmt::{self, Debug}; use std::ops::{Deref, DerefMut}; use std::option; pub struct ContentEncoding { inner: Encoding, } impl ContentEncoding { pub fn new(encoding: Encoding) -> Self { Self { inner: encoding } } pub fn from_headers(headers: impl AsRef<Headers>) -> crate::Result<Option<Self>> { let headers = match headers.as_ref().get(CONTENT_ENCODING) { Some(headers) => headers, None => return Ok(None), }; let mut inner = None; for value in headers { if let Some(entry) = Encoding::from_str(value.as_str()) { inner = Some(entry); } } let inner = inner.expect("Headers instance with no entries found"); Ok(Some(Self { inner })) } pub fn apply(&self, mut headers: impl AsMut<Headers>) { headers.as_mut().insert(CONTENT_ENCODING, self.value()); } pub fn name(&self) -> HeaderName { CONTENT_ENCODING } pub fn value(&self) -> HeaderValue { self.inner.into() } pub fn encoding(&self) -> Encoding { self.inner } } impl ToHeaderValues for ContentEncoding { type Iter = option::IntoIter<HeaderValue>; fn to_header_values(&self) -> crate::Result<Self::Iter> { Ok(self.value().to_header_values().unwrap()) } } impl Deref for ContentEncoding { type Target = Encoding; fn deref(&self) -> &Self::Target { &self.inner } } impl DerefMut for ContentEncoding { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.inner } } impl PartialEq<Encoding> for ContentEncoding { fn eq(&self, other: &Encoding) -> bool { &self.inner == other } } impl PartialEq<&Encoding> for ContentEncoding { fn eq(&self, other: &&Encoding) -> bool { &&self.inner == other } } impl From<Encoding> for ContentEncoding { fn from(encoding: Encoding) -> Self { Self { inner: encoding } } } impl From<&Encoding> for ContentEncoding { fn from(encoding: &Encoding) -> Self { Self { inner: *encoding } } } impl From<EncodingProposal> for ContentEncoding { fn from(encoding: EncodingProposal) -> Self { Self { inner: encoding.encoding, } } } impl From<&EncodingProposal> for ContentEncoding { fn from(encoding: &EncodingProposal) -> Self { Self { inner: encoding.encoding, } } } impl Debug for ContentEncoding { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.inner.fmt(f) } }
use crate::content::{Encoding, EncodingProposal}; use crate::headers::{HeaderName, HeaderValue, Headers, ToHeaderValues, CONTENT_ENCODING}; use std::fmt::{self, Debug}; use std::ops::{Deref, DerefMut}; use std::option; pub struct ContentEncoding { inner: Encoding, } impl ContentEncoding { pub fn new(encoding: Encoding) -> Self { Self { inner: encoding } } pub fn from_headers(headers: impl AsRef<Headers>) -> crate::Result<Option<Self>> { let headers = match headers.as_ref().get(CONTENT_ENCODING) { Some(headers) => headers, None => return Ok(None), }; let mut inner = None; for value in headers { if let Some(entry) = Encoding::from_str(value.as_str()) { inner = Some(entry); } } let inn
TENT_ENCODING } pub fn value(&self) -> HeaderValue { self.inner.into() } pub fn encoding(&self) -> Encoding { self.inner } } impl ToHeaderValues for ContentEncoding { type Iter = option::IntoIter<HeaderValue>; fn to_header_values(&self) -> crate::Result<Self::Iter> { Ok(self.value().to_header_values().unwrap()) } } impl Deref for ContentEncoding { type Target = Encoding; fn deref(&self) -> &Self::Target { &self.inner } } impl DerefMut for ContentEncoding { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.inner } } impl PartialEq<Encoding> for ContentEncoding { fn eq(&self, other: &Encoding) -> bool { &self.inner == other } } impl PartialEq<&Encoding> for ContentEncoding { fn eq(&self, other: &&Encoding) -> bool { &&self.inner == other } } impl From<Encoding> for ContentEncoding { fn from(encoding: Encoding) -> Self { Self { inner: encoding } } } impl From<&Encoding> for ContentEncoding { fn from(encoding: &Encoding) -> Self { Self { inner: *encoding } } } impl From<EncodingProposal> for ContentEncoding { fn from(encoding: EncodingProposal) -> Self { Self { inner: encoding.encoding, } } } impl From<&EncodingProposal> for ContentEncoding { fn from(encoding: &EncodingProposal) -> Self { Self { inner: encoding.encoding, } } } impl Debug for ContentEncoding { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.inner.fmt(f) } }
er = inner.expect("Headers instance with no entries found"); Ok(Some(Self { inner })) } pub fn apply(&self, mut headers: impl AsMut<Headers>) { headers.as_mut().insert(CONTENT_ENCODING, self.value()); } pub fn name(&self) -> HeaderName { CON
random
[ { "content": "#[inline]\n\npub fn powered_by(mut headers: impl AsMut<Headers>, value: Option<HeaderValue>) {\n\n let name = HeaderName::from_lowercase_str(\"X-Powered-By\");\n\n match value {\n\n Some(value) => {\n\n headers.as_mut().insert(name, value);\n\n }\n\n None => {...
Rust
sflk-lang/src/parser.rs
anima-libera/sflk
973e7435ec44e5d775aad5737ad3835b6558d0f2
use crate::ast::{Chop, Comment, Expr, Node, Program, Stmt, TargetExpr}; use crate::scu::{Loc, SourceCodeUnit}; use crate::tokenizer::{BinOp, CharReadingHead, Kw, Matched, StmtBinOp, Tok, Tokenizer}; use std::{collections::VecDeque, rc::Rc}; pub struct ParsingWarning { } pub struct TokBuffer { crh: CharReadingHead, tokenizer: Tokenizer, toks_ahead: VecDeque<(Tok, Loc)>, } impl TokBuffer { pub fn from(crh: CharReadingHead) -> TokBuffer { TokBuffer { crh, tokenizer: Tokenizer::new(), toks_ahead: VecDeque::new(), } } fn prepare_max_index(&mut self, n: usize) { if self.toks_ahead.len() < n + 1 { self.toks_ahead.reserve(n - self.toks_ahead.len()); } while self.toks_ahead.len() < n + 1 { self.toks_ahead .push_back(self.tokenizer.pop_tok(&mut self.crh)); } } fn prepare_all(&mut self) { loop { self.toks_ahead .push_back(self.tokenizer.pop_tok(&mut self.crh)); if matches!(self.toks_ahead.back().map(|t| &t.0), Some(Tok::Eof)) { break; } } } fn peek(&mut self, n: usize) -> &(Tok, Loc) { self.prepare_max_index(n); &self.toks_ahead[n] } fn prepared(&self) -> &VecDeque<(Tok, Loc)> { &self.toks_ahead } fn pop(&mut self) -> (Tok, Loc) { self.peek(0); let tok_loc_opt = self.toks_ahead.pop_front(); if let Some(tok_loc) = tok_loc_opt { tok_loc } else { panic!("bug: no token to pop") } } fn disc(&mut self) { if self.toks_ahead.pop_front().is_none() { panic!("bug: token discarded but not peeked before") } } } impl TokBuffer { fn scu(&self) -> Rc<SourceCodeUnit> { self.crh.scu() } } pub struct Parser {} impl Parser { pub fn new() -> Parser { Parser {} } } impl Parser { pub fn parse_program(&mut self, tb: &mut TokBuffer) -> Node<Program> { let stmts = self.parse_all_as_stmts(tb); Node::from(Program { stmts }, Loc::total_of(tb.scu())) } fn parse_all_as_stmts(&mut self, tb: &mut TokBuffer) -> Vec<Node<Stmt>> { let mut stmts: Vec<Node<Stmt>> = Vec::new(); loop { let comments = self.parse_comments(tb); if matches!(tb.peek(0).0, Tok::Eof) { break; } let mut stmt_node = self.parse_stmt(tb); stmt_node.add_left_comments(comments); stmts.push(stmt_node); } stmts } fn parse_stmts(&mut self, tb: &mut TokBuffer) -> Vec<Node<Stmt>> { let mut stmts: Vec<Node<Stmt>> = Vec::new(); while let Some(stmt) = self.maybe_parse_stmt(tb) { stmts.push(stmt); } stmts } fn parse_stmt(&mut self, tb: &mut TokBuffer) -> Node<Stmt> { let left_comments = self.parse_comments(tb); let mut stmt_node = if let Some(stmt_node) = self.maybe_parse_stmt(tb) { stmt_node } else { let (_tok, loc) = tb.pop(); Node::from(Stmt::Invalid, loc) }; stmt_node.add_left_comments(left_comments); stmt_node } fn maybe_parse_stmt(&mut self, tb: &mut TokBuffer) -> Option<Node<Stmt>> { let (first_tok, first_loc) = tb.peek(0); if let Tok::Kw(kw) = first_tok { match kw { Kw::Np => { let kw_loc = first_loc.clone(); tb.disc(); Some(Node::from(Stmt::Nop, kw_loc)) } Kw::Pr => { let kw_loc = first_loc.clone(); tb.disc(); let expr_node = self.parse_expr(tb); let full_loc = &kw_loc + expr_node.loc(); Some(Node::from(Stmt::Print { expr: expr_node }, full_loc)) } Kw::Nl => { let kw_loc = first_loc.clone(); tb.disc(); Some(Node::from(Stmt::Newline, kw_loc)) } Kw::Ev => { let kw_loc = first_loc.clone(); tb.disc(); let expr_node = self.parse_expr(tb); let full_loc = &kw_loc + expr_node.loc(); Some(Node::from(Stmt::Evaluate { expr: expr_node }, full_loc)) } Kw::Do => { let kw_loc = first_loc.clone(); tb.disc(); let expr_node = self.parse_expr(tb); let full_loc = &kw_loc + expr_node.loc(); Some(Node::from(Stmt::Do { expr: expr_node }, full_loc)) } Kw::Dh => { let kw_loc = first_loc.clone(); tb.disc(); let expr_node = self.parse_expr(tb); let full_loc = &kw_loc + expr_node.loc(); Some(Node::from(Stmt::DoHere { expr: expr_node }, full_loc)) } Kw::Fh => { let kw_loc = first_loc.clone(); tb.disc(); let expr_node = self.parse_expr(tb); let full_loc = &kw_loc + expr_node.loc(); Some(Node::from(Stmt::DoFileHere { expr: expr_node }, full_loc)) } Kw::If => { let kw_loc = first_loc.clone(); tb.disc(); let cond_expr_node = self.parse_expr(tb); let th_stmt_node = self.maybe_parse_stmt_extension_stmt(tb, Kw::Th); let el_stmt_node = self.maybe_parse_stmt_extension_stmt(tb, Kw::El); let mut full_loc = kw_loc; if let Some(stmt_node) = &th_stmt_node { full_loc += stmt_node.loc(); } if let Some(stmt_node) = &el_stmt_node { full_loc += stmt_node.loc(); } Some(Node::from( Stmt::If { cond_expr: cond_expr_node, th_stmt: th_stmt_node.map(Box::new), el_stmt: el_stmt_node.map(Box::new), }, full_loc, )) } _ => { let kw_loc = first_loc.clone(); tb.disc(); Some(Node::from(Stmt::Invalid, kw_loc)) } } } else if let Some(stmt) = self.maybe_parse_assign_stmt(tb) { Some(stmt) } else { None } } fn maybe_parse_stmt_extension_stmt( &mut self, tb: &mut TokBuffer, kw: Kw, ) -> Option<Node<Stmt>> { let (tok, _) = tb.peek(0); match tok { Tok::Kw(tok_kw) if *tok_kw == kw => { tb.disc(); Some(self.parse_stmt(tb)) } _ => None, } } fn maybe_parse_assign_stmt(&mut self, tb: &mut TokBuffer) -> Option<Node<Stmt>> { tb.prepare_max_index(1); let prepared = tb.prepared(); match (&prepared[0], &prepared[1]) { ((Tok::Name { string, .. }, name_loc), (Tok::StmtBinOp(StmtBinOp::ToLeft), _)) => { let target_node = Node::from(TargetExpr::VariableName(string.clone()), name_loc.clone()); tb.disc(); tb.disc(); let expr_node = self.parse_expr(tb); let total_loc = target_node.loc() + expr_node.loc(); Some(Node::from( Stmt::Assign { target: target_node, expr: expr_node, }, total_loc, )) } _ => None, } } fn parse_expr(&mut self, tb: &mut TokBuffer) -> Node<Expr> { let expr_node = self.parse_expr_beg(tb); let mut chops: Vec<Node<Chop>> = Vec::new(); while let Some(chop_node) = self.maybe_parse_chop(tb) { chops.push(chop_node); } if chops.is_empty() { expr_node } else { let loc = expr_node.loc() + chops.last().unwrap().loc(); Node::from( Expr::Chain { init: Box::new(expr_node), chops, }, loc, ) } } fn parse_expr_beg(&mut self, tb: &mut TokBuffer) -> Node<Expr> { let (tok, left_loc) = tb.pop(); match tok { Tok::Name { string, .. } => Node::from(Expr::VariableName(string), left_loc), Tok::Integer(integer) => Node::from(Expr::IntegerLiteral(integer), left_loc), Tok::String { content, .. } => Node::from(Expr::StringLiteral(content), left_loc), Tok::Left(Matched::Curly) => { let stmts = self.parse_stmts(tb); let (right_tok, right_loc) = tb.pop(); match right_tok { Tok::Right(Matched::Curly) => { Node::from(Expr::BlockLiteral(stmts), left_loc + right_loc) } _ => panic!("TODO: generate an error here"), } } Tok::Left(Matched::Paren) => { let expr_node = self.parse_expr(tb); let (right_tok, right_loc) = tb.pop(); match right_tok { Tok::Right(Matched::Paren) => { Node::from(expr_node.unwrap(), left_loc + right_loc) } _ => panic!("TODO: generate an error here"), } } _ => Node::from(Expr::Invalid, left_loc), } } fn maybe_parse_chop(&mut self, tb: &mut TokBuffer) -> Option<Node<Chop>> { let (op_tok, op_loc) = tb.peek(0).clone(); if let Tok::BinOp(op) = op_tok { tb.disc(); let expr_node = self.parse_expr_beg(tb); let full_loc = &op_loc + expr_node.loc(); match op { BinOp::Plus => Some(Node::from(Chop::Plus(expr_node), full_loc)), BinOp::Minus => Some(Node::from(Chop::Minus(expr_node), full_loc)), BinOp::Star => Some(Node::from(Chop::Star(expr_node), full_loc)), BinOp::Slash => Some(Node::from(Chop::Slash(expr_node), full_loc)), BinOp::ToRight => Some(Node::from(Chop::ToRight(expr_node), full_loc)), } } else { None } } fn parse_comments(&mut self, tb: &mut TokBuffer) -> Vec<Node<Comment>> { let mut comments: Vec<Node<Comment>> = Vec::new(); while let Some(comment) = self.maybe_parse_comment(tb) { comments.push(comment); } comments } fn maybe_parse_comment(&mut self, tb: &mut TokBuffer) -> Option<Node<Comment>> { let (tok, loc) = tb.peek(0); if let Tok::Comment { content, delimitation_thickness, no_end_hash_warning, } = tok { let comment_node = Node::from( Comment::new(content.to_owned(), delimitation_thickness.to_owned()), loc.to_owned(), ); tb.disc(); Some(comment_node) } else { None } } }
use crate::ast::{Chop, Comment, Expr, Node, Program, Stmt, TargetExpr}; use crate::scu::{Loc, SourceCodeUnit}; use crate::tokenizer::{BinOp, CharReadingHead, Kw, Matched, StmtBinOp, Tok, Tokenizer}; use std::{collections::VecDeque, rc::Rc}; pub struct ParsingWarning { } pub struct TokBuffer { crh: CharReadingHead, tokenizer: Tokenizer, toks_ahead: VecDeque<(Tok, Loc)>, } impl TokBuffer { pub fn from(crh: CharReadingHead) -> TokBuffer { TokBuffer { crh, tokenizer: Tokenizer::new(), toks_ahead: VecDeque::new(), } } fn prepare_max_index(&mut self, n: usize) { if self.toks_ahead.len() < n + 1 { self.toks_ahead.reserve(n - self.toks_ahead.len()); } while self.toks_ahead.len() < n + 1 { self.toks_ahead .push_back(self.tokenizer.pop_tok(&mut self.crh)); } } fn prepare_all(&mut self) { loop { self.toks_ahead .push_back(self.tokenizer.pop_tok(&mut self.crh)); if matches!(self.toks_ahead.back().map(|t| &t.0), Some(Tok::Eof)) { break; } } } fn peek(&mut self, n: usize) -> &(Tok, Loc) { self.prepare_max_index(n); &self.toks_ahead[n] } fn prepared(&self) -> &VecDeque<(Tok, Loc)> { &self.toks_ahead } fn pop(&mut self) -> (Tok, Loc) { self.peek(0); let tok_loc_opt = self.toks_ahead.pop_front(); if let Some(tok_loc) = tok_loc_opt { tok_loc } else { panic!("bug: no token to pop") } } fn disc(&mut self) { if self.toks_ahead.pop_front().is_none() { panic!("bug: token discarded but not peeked before") } } } impl TokBuffer { fn scu(&self) -> Rc<SourceCodeUnit> { self.crh.scu() } } pub struct Parser {} impl Parser { pub fn new() -> Parser { Parser {} } } impl Parser { pub fn parse_program(&mut self, tb: &mut TokBuffer) -> Node<Program> { let stmts = self.parse_all_as_stmts(tb); Node::from(Program { stmts }, Loc::total_of(tb.scu())) } fn parse_all_as_stmts(&mut self, tb: &mut TokBuffer) -> Vec<Node<Stmt>> { let mut stmts: Vec<Node<Stmt>> = Vec::new(); loop { let comments = self.parse_comments(tb); if matches!(tb.peek(0).0, Tok::Eof) { break; } let mut stmt_node = self.parse_stmt(tb); stmt_node.add_left_comments(comments); stmts.push(stmt_node); } stmts } fn parse_stmts(&mut self, tb: &mut TokBuffer) -> Vec<Node<Stmt>> { let mut stmts: Vec<Node<Stmt>> = Vec::new(); while let Some(stmt) = self.maybe_parse_stmt(tb) { stmts.push(stmt); } stmts } fn parse_stmt(&mut self, tb: &mut TokBuffer) -> Node<Stmt> { let left_comments = self.parse_comments(tb); let mut stmt_node = if let Some(stmt_node) = self.maybe_parse_stmt(tb) { stmt_node } else { let (_tok, loc) = tb.pop(); Node::from(Stmt::Invalid, loc) }; stmt_node.add_left_comments(left_comments); stmt_node } fn maybe_parse_stmt(&mut self, tb: &mut TokBuffer) -> Option<Node<Stmt>> { let (first_tok, first_loc) = tb.peek(0); if let Tok::Kw(kw) = first_tok { match kw { Kw::Np => { let kw_loc = first_loc.clone(); tb.disc(); Some(Node::from(Stmt::Nop, kw_loc)) } Kw::Pr => { let kw_loc = first_loc.clone(); tb.disc(); let expr_node = self.parse_expr(tb); let full_loc = &kw_loc + expr_node.loc(); Some(Node::from(Stmt::Print { expr: expr_node }, full_loc)) } Kw::Nl => { let kw_loc = first_loc.clone(); tb.disc(); Some(Node::from(Stmt::Newline, kw_loc)) } Kw::Ev => { let kw_loc = first_loc.clone(); tb.disc(); let expr_node = self.parse_expr(tb); let full_loc = &kw_loc + expr_node.loc(); Some(Node::from(Stmt::Evaluate { expr: expr_node }, full_loc)) } Kw::Do => { let kw_loc = first_loc.clone(); tb.disc(); let expr_node = self.parse_expr(tb); let full_loc = &kw_loc + expr_node.loc(); Some(Node::from(Stmt::Do { expr: expr_node }, full_loc)) } Kw::Dh => { let kw_loc = first_loc.clone(); tb.disc(); let expr_node = self.parse_expr(tb); let full_loc = &kw_loc + expr_node.loc(); Some(Node::from(Stmt::DoHere { expr: expr_node }, full_loc)) } Kw::Fh => { let kw_loc = first_loc.clone(); tb.disc
le let Some(chop_node) = self.maybe_parse_chop(tb) { chops.push(chop_node); } if chops.is_empty() { expr_node } else { let loc = expr_node.loc() + chops.last().unwrap().loc(); Node::from( Expr::Chain { init: Box::new(expr_node), chops, }, loc, ) } } fn parse_expr_beg(&mut self, tb: &mut TokBuffer) -> Node<Expr> { let (tok, left_loc) = tb.pop(); match tok { Tok::Name { string, .. } => Node::from(Expr::VariableName(string), left_loc), Tok::Integer(integer) => Node::from(Expr::IntegerLiteral(integer), left_loc), Tok::String { content, .. } => Node::from(Expr::StringLiteral(content), left_loc), Tok::Left(Matched::Curly) => { let stmts = self.parse_stmts(tb); let (right_tok, right_loc) = tb.pop(); match right_tok { Tok::Right(Matched::Curly) => { Node::from(Expr::BlockLiteral(stmts), left_loc + right_loc) } _ => panic!("TODO: generate an error here"), } } Tok::Left(Matched::Paren) => { let expr_node = self.parse_expr(tb); let (right_tok, right_loc) = tb.pop(); match right_tok { Tok::Right(Matched::Paren) => { Node::from(expr_node.unwrap(), left_loc + right_loc) } _ => panic!("TODO: generate an error here"), } } _ => Node::from(Expr::Invalid, left_loc), } } fn maybe_parse_chop(&mut self, tb: &mut TokBuffer) -> Option<Node<Chop>> { let (op_tok, op_loc) = tb.peek(0).clone(); if let Tok::BinOp(op) = op_tok { tb.disc(); let expr_node = self.parse_expr_beg(tb); let full_loc = &op_loc + expr_node.loc(); match op { BinOp::Plus => Some(Node::from(Chop::Plus(expr_node), full_loc)), BinOp::Minus => Some(Node::from(Chop::Minus(expr_node), full_loc)), BinOp::Star => Some(Node::from(Chop::Star(expr_node), full_loc)), BinOp::Slash => Some(Node::from(Chop::Slash(expr_node), full_loc)), BinOp::ToRight => Some(Node::from(Chop::ToRight(expr_node), full_loc)), } } else { None } } fn parse_comments(&mut self, tb: &mut TokBuffer) -> Vec<Node<Comment>> { let mut comments: Vec<Node<Comment>> = Vec::new(); while let Some(comment) = self.maybe_parse_comment(tb) { comments.push(comment); } comments } fn maybe_parse_comment(&mut self, tb: &mut TokBuffer) -> Option<Node<Comment>> { let (tok, loc) = tb.peek(0); if let Tok::Comment { content, delimitation_thickness, no_end_hash_warning, } = tok { let comment_node = Node::from( Comment::new(content.to_owned(), delimitation_thickness.to_owned()), loc.to_owned(), ); tb.disc(); Some(comment_node) } else { None } } }
(); let expr_node = self.parse_expr(tb); let full_loc = &kw_loc + expr_node.loc(); Some(Node::from(Stmt::DoFileHere { expr: expr_node }, full_loc)) } Kw::If => { let kw_loc = first_loc.clone(); tb.disc(); let cond_expr_node = self.parse_expr(tb); let th_stmt_node = self.maybe_parse_stmt_extension_stmt(tb, Kw::Th); let el_stmt_node = self.maybe_parse_stmt_extension_stmt(tb, Kw::El); let mut full_loc = kw_loc; if let Some(stmt_node) = &th_stmt_node { full_loc += stmt_node.loc(); } if let Some(stmt_node) = &el_stmt_node { full_loc += stmt_node.loc(); } Some(Node::from( Stmt::If { cond_expr: cond_expr_node, th_stmt: th_stmt_node.map(Box::new), el_stmt: el_stmt_node.map(Box::new), }, full_loc, )) } _ => { let kw_loc = first_loc.clone(); tb.disc(); Some(Node::from(Stmt::Invalid, kw_loc)) } } } else if let Some(stmt) = self.maybe_parse_assign_stmt(tb) { Some(stmt) } else { None } } fn maybe_parse_stmt_extension_stmt( &mut self, tb: &mut TokBuffer, kw: Kw, ) -> Option<Node<Stmt>> { let (tok, _) = tb.peek(0); match tok { Tok::Kw(tok_kw) if *tok_kw == kw => { tb.disc(); Some(self.parse_stmt(tb)) } _ => None, } } fn maybe_parse_assign_stmt(&mut self, tb: &mut TokBuffer) -> Option<Node<Stmt>> { tb.prepare_max_index(1); let prepared = tb.prepared(); match (&prepared[0], &prepared[1]) { ((Tok::Name { string, .. }, name_loc), (Tok::StmtBinOp(StmtBinOp::ToLeft), _)) => { let target_node = Node::from(TargetExpr::VariableName(string.clone()), name_loc.clone()); tb.disc(); tb.disc(); let expr_node = self.parse_expr(tb); let total_loc = target_node.loc() + expr_node.loc(); Some(Node::from( Stmt::Assign { target: target_node, expr: expr_node, }, total_loc, )) } _ => None, } } fn parse_expr(&mut self, tb: &mut TokBuffer) -> Node<Expr> { let expr_node = self.parse_expr_beg(tb); let mut chops: Vec<Node<Chop>> = Vec::new(); whi
random
[ { "content": "struct Comments {\n\n\tleft_comments: Vec<Comment>,\n\n\tinternal_comments: Vec<Comment>,\n\n}\n\n\n\nimpl Comments {\n\n\tfn new() -> Comments {\n\n\t\tComments {\n\n\t\t\tleft_comments: Vec::new(),\n\n\t\t\tinternal_comments: Vec::new(),\n\n\t\t}\n\n\t}\n\n}\n\n\n\nimpl<T> Node<T> {\n\n\tpub fn ...
Rust
cubespin/src/mcube.rs
setekhid/tastes
923c3ececc02a0f6282b7507a861a4ed68b19314
#[derive(Hash, Eq, PartialEq, Debug)] pub struct StatT(pub [[i8; 9]; 6]); pub fn print(stat: &StatT) { let StatT(s) = *stat; println!(" {} {} {}", s[0][0], s[0][1], s[0][2]); println!(" {} {} {}", s[0][3], s[0][4], s[0][5]); println!(" {} {} {}", s[0][6], s[0][7], s[0][8]); println!(""); println!("{} {} {} {} {} {} {} {} {} {} {} {}", s[1][0], s[1][1], s[1][2], s[2][0], s[2][1], s[2][2], s[3][0], s[3][1], s[3][2], s[4][0], s[4][1], s[4][2]); println!("{} {} {} {} {} {} {} {} {} {} {} {}", s[1][3], s[1][4], s[1][5], s[2][3], s[2][4], s[2][5], s[3][3], s[3][4], s[3][5], s[4][3], s[4][4], s[4][5]); println!("{} {} {} {} {} {} {} {} {} {} {} {}", s[1][6], s[1][7], s[1][8], s[2][6], s[2][7], s[2][8], s[3][6], s[3][7], s[3][8], s[4][6], s[4][7], s[4][8]); println!(""); println!(" {} {} {}", s[5][0], s[5][1], s[5][2]); println!(" {} {} {}", s[5][3], s[5][4], s[5][5]); println!(" {} {} {}", s[5][6], s[5][7], s[5][8]); } pub fn spin(stat: StatT, op: i8) -> StatT { match op { 0 => stat, 1 => spin1(stat), -1 => spin1(spin1(spin1(stat))), 2 => spin2(stat), -2 => spin2(spin2(spin2(stat))), 3 => spin3(stat), -3 => spin3(spin3(spin3(stat))), 4 => spin4(stat), -4 => spin4(spin4(spin4(stat))), 5 => spin5(stat), -5 => spin5(spin5(spin5(stat))), 6 => spin6(stat), -6 => spin6(spin6(spin6(stat))), _ => stat } } fn swap_cells(stat: StatT, f1: usize, c1: usize, f2: usize, c2: usize, f3: usize, c3: usize, f4: usize, c4: usize) -> StatT { let StatT(mut s) = stat; { let swap = s[f4-1][c4]; s[f4-1][c4] = s[f3-1][c3]; s[f3-1][c3] = s[f2-1][c2]; s[f2-1][c2] = s[f1-1][c1]; s[f1-1][c1] = swap; } return StatT(s); } fn swap_arris(stat: StatT, f1: usize, a1: [usize; 3], f2: usize, a2: [usize; 3], f3: usize, a3: [usize; 3], f4: usize, a4: [usize; 3]) -> StatT { let mut s = stat; for ci in 0..3 { s = swap_cells(s, f1, a1[ci], f2, a2[ci], f3, a3[ci], f4, a4[ci]); } return s; } fn spin_face(stat: StatT, fi: usize) -> StatT { let mut s = stat; s = swap_cells(s, fi, 1, fi, 5, fi, 7, fi, 3); s = swap_cells(s, fi, 0, fi, 2, fi, 8, fi, 6); return s; } fn spin1(stat: StatT) -> StatT { swap_arris(spin_face(stat, 1), 5, [2, 1, 0], 4, [2, 1, 0], 3, [2, 1, 0], 2, [2, 1, 0]) } fn spin2(stat: StatT) -> StatT { swap_arris(spin_face(stat, 2), 1, [0, 3, 6], 3, [0, 3, 6], 6, [0, 3, 6], 5, [8, 5, 2]) } fn spin3(stat: StatT) -> StatT { swap_arris(spin_face(stat, 3), 1, [6, 7, 8], 4, [0, 3, 6], 6, [2, 1, 0], 2, [8, 5, 2]) } fn spin4(stat: StatT) -> StatT { swap_arris(spin_face(stat, 4), 1, [8, 5, 2], 5, [0, 3, 6], 6, [8, 5, 2], 3, [8, 5, 2]) } fn spin5(stat: StatT) -> StatT { swap_arris(spin_face(stat, 5), 1, [2, 1, 0], 2, [0, 3, 6], 6, [6, 7, 8], 4, [8, 5, 2]) } fn spin6(stat: StatT) -> StatT { swap_arris(spin_face(stat, 6), 3, [6, 7, 8], 4, [6, 7, 8], 5, [6, 7, 8], 2, [6, 7, 8]) } use std::collections::HashMap; use std::rc::Rc; struct BfsNode(Option<Rc<BfsNode>>, i8); pub fn autospin(begin_stat: &StatT, end_stat: &StatT) -> Vec<i8> { let mut lleaves: HashMap<StatT, Rc<BfsNode>> = HashMap::new(); let mut rleaves: HashMap<StatT, Rc<BfsNode>> = HashMap::new(); lleaves.insert(clone(begin_stat), Rc::new(BfsNode(None, 0))); rleaves.insert(clone(end_stat), Rc::new(BfsNode(None, 0))); match link_trees(&lleaves, &rleaves, check_linkage(&lleaves, &rleaves)) { Some(result) => return result, None => () } for _ in 0..16 { lleaves = expand_leaves(lleaves); match link_trees(&lleaves, &rleaves, check_linkage(&lleaves, &rleaves)) { Some(result) => return result, None => () } rleaves = expand_leaves(rleaves); match link_trees(&lleaves, &rleaves, check_linkage(&lleaves, &rleaves)) { Some(result) => return result, None => () } } return vec![0]; } fn clone(stat: &StatT) -> StatT { StatT(stat.0.clone()) } fn expand_leaves(leaves: HashMap<StatT, Rc<BfsNode>>) -> HashMap<StatT, Rc<BfsNode>> { let mut fresh: HashMap<StatT, Rc<BfsNode>> = HashMap::new(); for (stat, steps) in leaves { for step_op in 1..7 { fresh.insert(spin(clone(&stat), step_op), Rc::new(BfsNode(Some(steps.clone()), step_op))); fresh.insert(spin(clone(&stat), -step_op), Rc::new(BfsNode(Some(steps.clone()), -step_op))); } } return fresh; } fn check_linkage<'a>(lleaves: &'a HashMap<StatT, Rc<BfsNode>>, rleaves: &'a HashMap<StatT, Rc<BfsNode>>) -> Option<&'a StatT> { for stat in lleaves.keys() { if rleaves.contains_key(stat) { return Some(stat); } } return None; } fn link_trees(lleaves: &HashMap<StatT, Rc<BfsNode>>, rleaves: &HashMap<StatT, Rc<BfsNode>>, linkage: Option<&StatT>) -> Option<Vec<i8>> { match linkage { Some(key_stat) => { let mut llist = bfstep2list_hm(lleaves.get(key_stat)); let mut rlist = bfstep2list_hm(rleaves.get(key_stat)); rlist.reverse(); for rlist_step in rlist { llist.push(-rlist_step); } Some(llist) }, None => None } } fn bfstep2list_hm(steps: Option<&Rc<BfsNode>>) -> Vec<i8> { match steps { Some(thing) => bfstep2list(Some(thing.clone())), None => bfstep2list(None) } } fn bfstep2list(steps: Option<Rc<BfsNode>>) -> Vec<i8> { match steps { Some(substeps) => { let mut step_list = bfstep2list(substeps.0.clone()); step_list.push(substeps.1); step_list }, None => Vec::new() } }
#[derive(Hash, Eq, PartialEq, Debug)] pub struct StatT(pub [[i8; 9]; 6]); pub fn print(stat: &StatT) { let StatT(s) = *stat; println!(" {} {} {}", s[0][0], s[0][1], s[0][2]); println!(" {} {} {}", s[0][3], s[0][4], s[0][5]); println!(" {} {} {}", s[0][6], s[0][7], s[0][8]); println!(""); println!("{} {} {} {} {} {} {} {} {} {} {} {}", s[1][0], s[1][1], s[1][2], s[2][0], s[2][1], s[2][2], s[3][0], s[3][1], s[3][2], s[4][0], s[4][1], s[4][2]); println!("{} {} {} {} {} {} {} {} {} {} {} {}", s[1][3], s[1][4], s[1][5], s[2][3], s[2][4], s[2][5], s[3][3], s[3][4], s[3][5], s[4][3], s[4][4], s[4][5]); println!("{} {} {} {} {} {} {} {} {} {} {} {}", s[1][6], s[1][7], s[1][8], s[2][6], s[2][7], s[2][8], s[3][6], s[3][7], s[3][8], s[4][6], s[4][7], s[4][8]); println!(""); println!(" {} {} {}", s[5][0], s[5][1], s[5][2]); println!(" {} {} {}", s[5][3], s[5][4], s[5][5]); println!(" {} {} {}", s[5][6], s[5][7], s[5][8]); } pub fn spin(stat: StatT, op: i8) -> StatT { match op { 0 => stat, 1 => spin1(stat), -1 => spin1(spin1(spin1(stat))), 2 => spin2(stat), -2 => spin2(spin2(spin2(stat))), 3 => spin3(stat), -3 => spin3(spin3(spin3(stat))), 4 => spin4(stat), -4 => spin4(spin4(spin4(stat))), 5 => spin5(stat), -5 => spin5(spin5(spin5(stat))), 6 => spin6(stat), -6 => spin6(spin6(spin6(stat))), _ => stat } } fn swap_cells(stat: StatT, f1: usize, c1: usize, f2: usize, c2: usize, f3: usize, c3: usize, f4: usize, c4: usize) -> StatT { let StatT(mut s) = stat; { let swap = s[f4-1][c4]; s[f4-1][c4] = s[f3-1][c3]; s[f3-1][c3] = s[f2-1][c2]; s[f2-1][c2] = s[f1-1][c1]; s[f1-1][c1] = swap; } return StatT(s); } fn swap_arris(stat: StatT, f1: usize, a1: [usize; 3], f2: usize, a2: [usize; 3], f3: usize, a3: [usize; 3], f4: usize, a4: [usize; 3]) -> StatT { let mut s = stat; for ci in 0..3 { s = swap_cells(s, f1, a1[ci], f2, a2[ci], f3, a3[ci], f4, a4[ci]); } return s; } fn spin_face(stat: StatT, fi: usize) -> StatT { let mut s = stat; s = swap_cells(s, fi, 1, fi, 5, fi, 7, fi, 3); s = swap_cells(s, fi, 0, fi, 2, fi, 8, fi, 6); return s; } fn spin1(stat: StatT) -> StatT { swap_arris(spin_face(stat, 1), 5, [2, 1, 0], 4, [2, 1, 0], 3, [2, 1, 0], 2, [2, 1, 0]) } fn spin2(stat: StatT) -> StatT { swap_arris(spin_face(stat, 2), 1, [0, 3, 6], 3, [0, 3, 6], 6, [0, 3, 6], 5, [8, 5, 2]) } fn spin3(stat: StatT) -> StatT { swap_arris(spin_face(stat, 3), 1, [6, 7, 8], 4, [0, 3, 6], 6, [2, 1, 0], 2, [8, 5, 2]) } fn spin4(stat: StatT) -> StatT { swap_arris(spin_face(stat, 4), 1, [8, 5, 2], 5, [0, 3, 6], 6, [8, 5, 2], 3, [8, 5, 2]) } fn spin5(stat: StatT) -> StatT { swap_arris(spin_face(stat, 5), 1, [2, 1, 0], 2, [0, 3, 6], 6, [6, 7, 8], 4, [8, 5, 2]) } fn spin6(stat: StatT) -> StatT { swap_arris(spin_face(stat, 6), 3, [6, 7, 8], 4, [6, 7, 8], 5, [6, 7, 8], 2, [6, 7, 8]) } use std::collections::HashMap; use std::rc::Rc; struct BfsNode(Option<Rc<BfsNode>>, i8); pub fn autospin(begin_stat: &StatT, end_stat: &StatT) -> Vec<i8> { let mut lleaves: HashMap<StatT, Rc<BfsNode>> = HashMap::new(); let mut rleaves: HashMap<StatT, Rc<BfsNode>> = HashMap::new(); lleaves.insert(clone(begin_stat), Rc::new(BfsNode(None, 0))); rleaves.insert(clone(end_stat), Rc::new(BfsNode(None, 0))); match link_trees(&lleaves, &rleaves, check_linkage(&lleaves, &rleaves)) {
); rlist.reverse(); for rlist_step in rlist { llist.push(-rlist_step); } Some(llist) }, None => None } } fn bfstep2list_hm(steps: Option<&Rc<BfsNode>>) -> Vec<i8> { match steps { Some(thing) => bfstep2list(Some(thing.clone())), None => bfstep2list(None) } } fn bfstep2list(steps: Option<Rc<BfsNode>>) -> Vec<i8> { match steps { Some(substeps) => { let mut step_list = bfstep2list(substeps.0.clone()); step_list.push(substeps.1); step_list }, None => Vec::new() } }
Some(result) => return result, None => () } for _ in 0..16 { lleaves = expand_leaves(lleaves); match link_trees(&lleaves, &rleaves, check_linkage(&lleaves, &rleaves)) { Some(result) => return result, None => () } rleaves = expand_leaves(rleaves); match link_trees(&lleaves, &rleaves, check_linkage(&lleaves, &rleaves)) { Some(result) => return result, None => () } } return vec![0]; } fn clone(stat: &StatT) -> StatT { StatT(stat.0.clone()) } fn expand_leaves(leaves: HashMap<StatT, Rc<BfsNode>>) -> HashMap<StatT, Rc<BfsNode>> { let mut fresh: HashMap<StatT, Rc<BfsNode>> = HashMap::new(); for (stat, steps) in leaves { for step_op in 1..7 { fresh.insert(spin(clone(&stat), step_op), Rc::new(BfsNode(Some(steps.clone()), step_op))); fresh.insert(spin(clone(&stat), -step_op), Rc::new(BfsNode(Some(steps.clone()), -step_op))); } } return fresh; } fn check_linkage<'a>(lleaves: &'a HashMap<StatT, Rc<BfsNode>>, rleaves: &'a HashMap<StatT, Rc<BfsNode>>) -> Option<&'a StatT> { for stat in lleaves.keys() { if rleaves.contains_key(stat) { return Some(stat); } } return None; } fn link_trees(lleaves: &HashMap<StatT, Rc<BfsNode>>, rleaves: &HashMap<StatT, Rc<BfsNode>>, linkage: Option<&StatT>) -> Option<Vec<i8>> { match linkage { Some(key_stat) => { let mut llist = bfstep2list_hm(lleaves.get(key_stat)); let mut rlist = bfstep2list_hm(rleaves.get(key_stat)
random
[ { "content": "fn mcube_mix(stat: mcube::StatT, steps: &[i8]) -> mcube::StatT {\n\n let mut s = stat;\n\n for st in steps {\n\n s = mcube::spin(s, *st);\n\n }\n\n return s;\n\n}\n", "file_path": "cubespin/src/main.rs", "rank": 6, "score": 107929.81570244649 }, { "content": ...
Rust
src/sp_lib/datastore/history.rs
jrtabash/stock_portfolio
84dcb73dee43152b323159e780333d36ee23bfe3
use std::error::Error; use crate::util::datetime; use crate::util::datetime::LocalDate; use crate::util::price_type::PriceType; use crate::datastore::datastore::DataStore; pub type Price = PriceType; #[inline(always)] pub fn tag() -> &'static str { &"history" } pub struct HistoryEntry { pub date: LocalDate, pub open: Price, pub high: Price, pub low: Price, pub close: Price, pub adj_close: Price, pub volume: u64 } impl HistoryEntry { pub fn new(date: LocalDate, open: Price, high: Price, low: Price, close: Price, adj_close: Price, volume: u64) -> Self { HistoryEntry { date: date, open: open, high: high, low: low, close: close, adj_close: adj_close, volume: volume } } pub fn parse_csv(csv: &str) -> Result<Self, Box<dyn Error>> { let values: Vec<&str> = csv.split(',').map(|field| field.trim()).collect(); if values.len() == HistoryEntry::number_of_fields() { Ok(HistoryEntry { date: datetime::parse_date(&values[0])?, open: values[1].parse::<Price>()?, high: values[2].parse::<Price>()?, low: values[3].parse::<Price>()?, close: values[4].parse::<Price>()?, adj_close: values[5].parse::<Price>()?, volume: values[6].parse::<u64>()? }) } else { Err(format!("HistoryEntry: Invalid csv data length={} expected=7", values.len()).into()) } } #[inline(always)] pub fn number_of_fields() -> usize { return 7 } } pub struct History { symbol: String, entries: Vec<HistoryEntry> } impl History { pub fn new(symbol: &str) -> Self { History { symbol: String::from(symbol), entries: Vec::new() } } pub fn parse_csv(symbol: &str, csv: &str) -> Result<Self, Box<dyn Error>> { let mut hist = History::new(symbol); for line in csv.split('\n') { if line.is_empty() || line.starts_with(char::is_alphabetic) { continue; } hist.entries.push(HistoryEntry::parse_csv(line)?); } Ok(hist) } pub fn parse_filter_csv(symbol: &str, csv: &str, pred: impl Fn(&HistoryEntry) -> bool) -> Result<Self, Box<dyn Error>> { let mut hist = History::new(symbol); for line in csv.split('\n') { if line.is_empty() || line.starts_with(char::is_alphabetic) { continue; } let entry = HistoryEntry::parse_csv(line)?; if pred(&entry) { hist.entries.push(entry); } } Ok(hist) } pub fn ds_select_all(ds: &DataStore, symbol: &str) -> Result<Self, Box<dyn Error>> { let content = ds.select_symbol(tag(), symbol)?; History::parse_csv(symbol, &content) } pub fn ds_select_if(ds: &DataStore, symbol: &str, pred: impl Fn(&HistoryEntry) -> bool) -> Result<Self, Box<dyn Error>> { let content = ds.select_symbol(tag(), symbol)?; History::parse_filter_csv(symbol, &content, pred) } pub fn ds_select_last(ds: &DataStore, symbol: &str) -> Result<Self, Box<dyn Error>> { let content = ds.select_last(tag(), symbol)?; History::parse_csv(symbol, &content) } pub fn check_csv(csv: &str) -> Result<(), Box<dyn Error>> { let hist = History::parse_csv("history_check", csv)?; let cnt = hist.count(); if cnt > 0 { let entries = hist.entries; let mut last_date = entries[0].date; for i in 1..cnt { let curr_date = entries[i].date; datetime::check_dup_or_back_gap(&last_date, &curr_date)?; last_date = curr_date; } } Ok(()) } #[inline(always)] pub fn symbol(&self) -> &str { &self.symbol } #[inline(always)] pub fn entries(&self) -> &Vec<HistoryEntry> { &self.entries } #[inline(always)] pub fn count(&self) -> usize { self.entries.len() } } #[cfg(test)] mod tests { use super::*; #[test] fn test_history_entry() { let csv = "2021-02-25,26.1,31.0,22.0,24.0,24.0,9000"; let entry = HistoryEntry::parse_csv(&csv).unwrap(); check_entry(&entry, datetime::make_date(2021, 2, 25), 26.1, 31.0, 22.0, 24.0, 24.0, 9000); } #[test] fn test_history_entry_with_whitespace() { let csv = "2021-02-25, 26.1,31.0 ,22.0, 24.0 ,24.0,9000\n"; let entry = HistoryEntry::parse_csv(&csv).unwrap(); check_entry(&entry, datetime::make_date(2021, 2, 25), 26.1, 31.0, 22.0, 24.0, 24.0, 9000); } #[test] fn test_history_entry_error() { let csv = "2021-02-25,26.1,31.0,22.0,24.0,24.0"; assert!(HistoryEntry::parse_csv(&csv).is_err()); let csv = "2021-02-25,26.1,31.0,22.0,24.0,24.0,9000,123"; assert!(HistoryEntry::parse_csv(&csv).is_err()); } #[test] fn test_history_parse_csv() { let csv = "2021-02-24,25.0,30.0,20.0,26.0,26.0,10000\n\ 2021-02-25,26.1,31.0,22.0,24.0,24.0,9000\n\ 2021-02-26,24.9,32.0,24.0,28.0,28.0,11000"; let hist = History::parse_csv("AAPL", &csv).unwrap(); assert_eq!(hist.symbol(), "AAPL"); assert_eq!(hist.count(), 3); let entries = hist.entries(); check_entry(&entries[0], datetime::make_date(2021, 2, 24), 25.0, 30.0, 20.0, 26.0, 26.0, 10000); check_entry(&entries[1], datetime::make_date(2021, 2, 25), 26.1, 31.0, 22.0, 24.0, 24.0, 9000); check_entry(&entries[2], datetime::make_date(2021, 2, 26), 24.9, 32.0, 24.0, 28.0, 28.0, 11000); } #[test] fn test_history_parse_csv_with_header() { let csv = "Date,Open,High,Low,Close,Adj Close,Volume\n\ 2021-02-24,25.0,30.0,20.0,26.0,26.0,10000\n\ 2021-02-25,26.1,31.0,22.0,24.0,24.0,9000\n\ 2021-02-26,24.9,32.0,24.0,28.0,28.0,11000"; let hist = History::parse_csv("AAPL", &csv).unwrap(); assert_eq!(hist.symbol(), "AAPL"); assert_eq!(hist.count(), 3); let entries = hist.entries(); check_entry(&entries[0], datetime::make_date(2021, 2, 24), 25.0, 30.0, 20.0, 26.0, 26.0, 10000); check_entry(&entries[1], datetime::make_date(2021, 2, 25), 26.1, 31.0, 22.0, 24.0, 24.0, 9000); check_entry(&entries[2], datetime::make_date(2021, 2, 26), 24.9, 32.0, 24.0, 28.0, 28.0, 11000); } #[test] fn test_history_parse_csv_with_empty_lines() { let csv = "\n\ 2021-02-24,25.0,30.0,20.0,26.0,26.0,10000\n\ \n\ 2021-02-25,26.1,31.0,22.0,24.0,24.0,9000\n\ \n\ 2021-02-26,24.9,32.0,24.0,28.0,28.0,11000\n\ \n"; let hist = History::parse_csv("AAPL", &csv).unwrap(); assert_eq!(hist.symbol(), "AAPL"); assert_eq!(hist.count(), 3); let entries = hist.entries(); check_entry(&entries[0], datetime::make_date(2021, 2, 24), 25.0, 30.0, 20.0, 26.0, 26.0, 10000); check_entry(&entries[1], datetime::make_date(2021, 2, 25), 26.1, 31.0, 22.0, 24.0, 24.0, 9000); check_entry(&entries[2], datetime::make_date(2021, 2, 26), 24.9, 32.0, 24.0, 28.0, 28.0, 11000); } #[test] fn test_history_parse_filter_csv() { let csv = "2021-02-24,25.0,30.0,20.0,26.0,26.0,10000\n\ 2021-02-25,26.1,31.0,22.0,24.0,24.0,9000\n\ 2021-02-26,24.9,32.0,24.0,28.0,28.0,11000"; let hist = History::parse_filter_csv("AAPL", &csv, |entry| entry.date > datetime::make_date(2021, 2, 24)).unwrap(); assert_eq!(hist.symbol(), "AAPL"); assert_eq!(hist.count(), 2); let entries = hist.entries(); check_entry(&entries[0], datetime::make_date(2021, 2, 25), 26.1, 31.0, 22.0, 24.0, 24.0, 9000); check_entry(&entries[1], datetime::make_date(2021, 2, 26), 24.9, 32.0, 24.0, 28.0, 28.0, 11000); } #[test] fn test_history_parse_filter_csv_with_header() { let csv = "Date,Open,High,Low,Close,Adj Close,Volume\n\ 2021-02-24,25.0,30.0,20.0,26.0,26.0,10000\n\ 2021-02-25,26.1,31.0,22.0,24.0,24.0,9000\n\ 2021-02-26,24.9,32.0,24.0,28.0,28.0,11000"; let hist = History::parse_filter_csv("AAPL", &csv, |entry| entry.date > datetime::make_date(2021, 2, 24)).unwrap(); assert_eq!(hist.symbol(), "AAPL"); assert_eq!(hist.count(), 2); let entries = hist.entries(); check_entry(&entries[0], datetime::make_date(2021, 2, 25), 26.1, 31.0, 22.0, 24.0, 24.0, 9000); check_entry(&entries[1], datetime::make_date(2021, 2, 26), 24.9, 32.0, 24.0, 28.0, 28.0, 11000); } #[test] fn test_check_csv() { let csv = "2021-02-24,25.0,30.0,20.0,26.0,26.0,10000\n\ 2021-02-25,26.1,31.0,22.0,24.0,24.0,9000\n\ 2021-02-26,24.9,32.0,24.0,28.0,28.0,11000"; assert!(History::check_csv(&csv).is_ok()); let csv = "2021-02-24,25.0,30.0,20.0,26.0,26.0,10000\n\ 2021-02-24,26.1,31.0,22.0,24.0,24.0,9000\n\ 2021-02-26,24.9,32.0,24.0,28.0,28.0,11000"; match History::check_csv(&csv) { Ok(_) => assert!(false), Err(err) => assert_eq!(&format!("{}", err), "Duplicate date 2021-02-24") }; let csv = "2021-02-24,25.0,30.0,20.0,26.0,26.0,10000\n\ 2021-02-25,26.1,31.0,22.0,24.0,24.0,9000\n\ 2021-02-23,24.9,32.0,24.0,28.0,28.0,11000"; match History::check_csv(&csv) { Ok(_) => assert!(false), Err(err) => assert_eq!(&format!("{}", err), "Earlier date 2021-02-23") }; } fn check_entry(entry: &HistoryEntry, date: LocalDate, open: Price, high: Price, low: Price, close: Price, adj_close: Price, volume: u64) { assert_eq!(entry.date, date); assert_eq!(entry.open, open); assert_eq!(entry.high, high); assert_eq!(entry.low, low); assert_eq!(entry.close, close); assert_eq!(entry.adj_close, adj_close); assert_eq!(entry.volume, volume); } }
use std::error::Error; use crate::util::datetime; use crate::util::datetime::LocalDate; use crate::util::price_type::PriceType; use crate::datastore::datastore::DataStore; pub type Price = PriceType; #[inline(always)] pub fn tag() -> &'static str { &"history" } pub struct HistoryEntry { pub date: LocalDate, pub open: Price, pub high: Price, pub low: Price, pub close: Price, pub adj_close: Price, pub volume: u64 } impl HistoryEntry { pub fn new(date: LocalDate, open: Price, high: Price, low: Price, close: Price, adj_close: Price, volume: u64) -> Self { HistoryEntry { date: date, open: open, high: high, low: low, close: close, adj_close: adj_close, volume: volume } } pub fn parse_csv(csv: &str) -> Result<Self, Box<dyn Error>> { let values: Vec<&str> = csv.split(',').map(|field| field.trim()).collect(); if values.len() == HistoryEntry::number_of_fields() { Ok(HistoryEntry { date: datetime::parse_date(&values[0])?, open: values[1].parse::<Price>()?, high: values[2].parse::<Price>()?, low: values[3].parse::<Price>()?, close: values[4].parse::<Price>()?, adj_close: values[5].parse::<Price>()?, volume: values[6].parse::<u64>()? }) } else { Err(format!("HistoryEntry: Invalid csv data length={} expected=7", values.len()).into()) } } #[inline(always)] pub fn number_of_fields() -> usize { return 7 } } pub struct History { symbol: String, entries: Vec<HistoryEntry> } impl History { pub fn new(symbol: &str) -> Self { History { symbol: String::from(symbol), entries: Vec::new() } } pub fn parse_csv(symbol: &str, csv: &str) -> Result<Self, Box<dyn Error>> { let mut hist = History::new(symbol); for line in csv.split('\n') { if line.is_empty() || line.starts_with(char::is_alphabetic) { continue; } hist.entries.push(HistoryEntry::parse_csv(line)?); } Ok(hist) } pub fn parse_filter_csv(symbol: &str, csv: &str, pred: impl Fn(&HistoryEntry) -> bool) -> Result<Self, Box<dyn Error>> { let mut hist = History::new(symbol); for line in csv.split('\n') { if line.is_empty(
hist = History::parse_filter_csv("AAPL", &csv, |entry| entry.date > datetime::make_date(2021, 2, 24)).unwrap(); assert_eq!(hist.symbol(), "AAPL"); assert_eq!(hist.count(), 2); let entries = hist.entries(); check_entry(&entries[0], datetime::make_date(2021, 2, 25), 26.1, 31.0, 22.0, 24.0, 24.0, 9000); check_entry(&entries[1], datetime::make_date(2021, 2, 26), 24.9, 32.0, 24.0, 28.0, 28.0, 11000); } #[test] fn test_check_csv() { let csv = "2021-02-24,25.0,30.0,20.0,26.0,26.0,10000\n\ 2021-02-25,26.1,31.0,22.0,24.0,24.0,9000\n\ 2021-02-26,24.9,32.0,24.0,28.0,28.0,11000"; assert!(History::check_csv(&csv).is_ok()); let csv = "2021-02-24,25.0,30.0,20.0,26.0,26.0,10000\n\ 2021-02-24,26.1,31.0,22.0,24.0,24.0,9000\n\ 2021-02-26,24.9,32.0,24.0,28.0,28.0,11000"; match History::check_csv(&csv) { Ok(_) => assert!(false), Err(err) => assert_eq!(&format!("{}", err), "Duplicate date 2021-02-24") }; let csv = "2021-02-24,25.0,30.0,20.0,26.0,26.0,10000\n\ 2021-02-25,26.1,31.0,22.0,24.0,24.0,9000\n\ 2021-02-23,24.9,32.0,24.0,28.0,28.0,11000"; match History::check_csv(&csv) { Ok(_) => assert!(false), Err(err) => assert_eq!(&format!("{}", err), "Earlier date 2021-02-23") }; } fn check_entry(entry: &HistoryEntry, date: LocalDate, open: Price, high: Price, low: Price, close: Price, adj_close: Price, volume: u64) { assert_eq!(entry.date, date); assert_eq!(entry.open, open); assert_eq!(entry.high, high); assert_eq!(entry.low, low); assert_eq!(entry.close, close); assert_eq!(entry.adj_close, adj_close); assert_eq!(entry.volume, volume); } }
) || line.starts_with(char::is_alphabetic) { continue; } let entry = HistoryEntry::parse_csv(line)?; if pred(&entry) { hist.entries.push(entry); } } Ok(hist) } pub fn ds_select_all(ds: &DataStore, symbol: &str) -> Result<Self, Box<dyn Error>> { let content = ds.select_symbol(tag(), symbol)?; History::parse_csv(symbol, &content) } pub fn ds_select_if(ds: &DataStore, symbol: &str, pred: impl Fn(&HistoryEntry) -> bool) -> Result<Self, Box<dyn Error>> { let content = ds.select_symbol(tag(), symbol)?; History::parse_filter_csv(symbol, &content, pred) } pub fn ds_select_last(ds: &DataStore, symbol: &str) -> Result<Self, Box<dyn Error>> { let content = ds.select_last(tag(), symbol)?; History::parse_csv(symbol, &content) } pub fn check_csv(csv: &str) -> Result<(), Box<dyn Error>> { let hist = History::parse_csv("history_check", csv)?; let cnt = hist.count(); if cnt > 0 { let entries = hist.entries; let mut last_date = entries[0].date; for i in 1..cnt { let curr_date = entries[i].date; datetime::check_dup_or_back_gap(&last_date, &curr_date)?; last_date = curr_date; } } Ok(()) } #[inline(always)] pub fn symbol(&self) -> &str { &self.symbol } #[inline(always)] pub fn entries(&self) -> &Vec<HistoryEntry> { &self.entries } #[inline(always)] pub fn count(&self) -> usize { self.entries.len() } } #[cfg(test)] mod tests { use super::*; #[test] fn test_history_entry() { let csv = "2021-02-25,26.1,31.0,22.0,24.0,24.0,9000"; let entry = HistoryEntry::parse_csv(&csv).unwrap(); check_entry(&entry, datetime::make_date(2021, 2, 25), 26.1, 31.0, 22.0, 24.0, 24.0, 9000); } #[test] fn test_history_entry_with_whitespace() { let csv = "2021-02-25, 26.1,31.0 ,22.0, 24.0 ,24.0,9000\n"; let entry = HistoryEntry::parse_csv(&csv).unwrap(); check_entry(&entry, datetime::make_date(2021, 2, 25), 26.1, 31.0, 22.0, 24.0, 24.0, 9000); } #[test] fn test_history_entry_error() { let csv = "2021-02-25,26.1,31.0,22.0,24.0,24.0"; assert!(HistoryEntry::parse_csv(&csv).is_err()); let csv = "2021-02-25,26.1,31.0,22.0,24.0,24.0,9000,123"; assert!(HistoryEntry::parse_csv(&csv).is_err()); } #[test] fn test_history_parse_csv() { let csv = "2021-02-24,25.0,30.0,20.0,26.0,26.0,10000\n\ 2021-02-25,26.1,31.0,22.0,24.0,24.0,9000\n\ 2021-02-26,24.9,32.0,24.0,28.0,28.0,11000"; let hist = History::parse_csv("AAPL", &csv).unwrap(); assert_eq!(hist.symbol(), "AAPL"); assert_eq!(hist.count(), 3); let entries = hist.entries(); check_entry(&entries[0], datetime::make_date(2021, 2, 24), 25.0, 30.0, 20.0, 26.0, 26.0, 10000); check_entry(&entries[1], datetime::make_date(2021, 2, 25), 26.1, 31.0, 22.0, 24.0, 24.0, 9000); check_entry(&entries[2], datetime::make_date(2021, 2, 26), 24.9, 32.0, 24.0, 28.0, 28.0, 11000); } #[test] fn test_history_parse_csv_with_header() { let csv = "Date,Open,High,Low,Close,Adj Close,Volume\n\ 2021-02-24,25.0,30.0,20.0,26.0,26.0,10000\n\ 2021-02-25,26.1,31.0,22.0,24.0,24.0,9000\n\ 2021-02-26,24.9,32.0,24.0,28.0,28.0,11000"; let hist = History::parse_csv("AAPL", &csv).unwrap(); assert_eq!(hist.symbol(), "AAPL"); assert_eq!(hist.count(), 3); let entries = hist.entries(); check_entry(&entries[0], datetime::make_date(2021, 2, 24), 25.0, 30.0, 20.0, 26.0, 26.0, 10000); check_entry(&entries[1], datetime::make_date(2021, 2, 25), 26.1, 31.0, 22.0, 24.0, 24.0, 9000); check_entry(&entries[2], datetime::make_date(2021, 2, 26), 24.9, 32.0, 24.0, 28.0, 28.0, 11000); } #[test] fn test_history_parse_csv_with_empty_lines() { let csv = "\n\ 2021-02-24,25.0,30.0,20.0,26.0,26.0,10000\n\ \n\ 2021-02-25,26.1,31.0,22.0,24.0,24.0,9000\n\ \n\ 2021-02-26,24.9,32.0,24.0,28.0,28.0,11000\n\ \n"; let hist = History::parse_csv("AAPL", &csv).unwrap(); assert_eq!(hist.symbol(), "AAPL"); assert_eq!(hist.count(), 3); let entries = hist.entries(); check_entry(&entries[0], datetime::make_date(2021, 2, 24), 25.0, 30.0, 20.0, 26.0, 26.0, 10000); check_entry(&entries[1], datetime::make_date(2021, 2, 25), 26.1, 31.0, 22.0, 24.0, 24.0, 9000); check_entry(&entries[2], datetime::make_date(2021, 2, 26), 24.9, 32.0, 24.0, 28.0, 28.0, 11000); } #[test] fn test_history_parse_filter_csv() { let csv = "2021-02-24,25.0,30.0,20.0,26.0,26.0,10000\n\ 2021-02-25,26.1,31.0,22.0,24.0,24.0,9000\n\ 2021-02-26,24.9,32.0,24.0,28.0,28.0,11000"; let hist = History::parse_filter_csv("AAPL", &csv, |entry| entry.date > datetime::make_date(2021, 2, 24)).unwrap(); assert_eq!(hist.symbol(), "AAPL"); assert_eq!(hist.count(), 2); let entries = hist.entries(); check_entry(&entries[0], datetime::make_date(2021, 2, 25), 26.1, 31.0, 22.0, 24.0, 24.0, 9000); check_entry(&entries[1], datetime::make_date(2021, 2, 26), 24.9, 32.0, 24.0, 28.0, 28.0, 11000); } #[test] fn test_history_parse_filter_csv_with_header() { let csv = "Date,Open,High,Low,Close,Adj Close,Volume\n\ 2021-02-24,25.0,30.0,20.0,26.0,26.0,10000\n\ 2021-02-25,26.1,31.0,22.0,24.0,24.0,9000\n\ 2021-02-26,24.9,32.0,24.0,28.0,28.0,11000"; let
random
[ { "content": "pub fn update_stock_from_csv(stock: &mut Stock, csv: &str) -> Result<bool, Box<dyn Error>> {\n\n let hist = History::parse_csv(&stock.symbol, csv)?;\n\n if hist.count() > 0 {\n\n let latest = &hist.entries()[hist.count() - 1];\n\n if latest.adj_close > 0.0 {\n\n stoc...
Rust
src/main.rs
miyachan/lnx
a0fed83e43df6b898ff4d85dfb0f6ecd844af04c
#[macro_use] extern crate log; #[macro_use] extern crate serde_json; use std::fs::File; use std::io::BufReader; use std::sync::Arc; use anyhow::{Error, Result}; use axum::handler::{delete, get, post, Handler}; use axum::http::header; use axum::Router; use fern::colors::{Color, ColoredLevelConfig}; use hyper::http::HeaderValue; use hyper::server::conn::Http; use log::LevelFilter; use structopt::StructOpt; use tokio::net::TcpListener; use tokio_rustls::rustls::internal::pemfile::{certs, pkcs8_private_keys}; use tokio_rustls::rustls::{NoClientAuth, ServerConfig}; use tokio_rustls::TlsAcceptor; use tower::ServiceBuilder; use tower_http::auth::RequireAuthorizationLayer; use tower_http::set_header::SetResponseHeaderLayer; mod auth; mod responders; mod routes; use engine::SearchEngine; use tower::util::MapResponseLayer; use tower_http::add_extension::AddExtensionLayer; #[derive(Debug, StructOpt)] #[structopt(name = "lnx", about = "A ultra-fast, adaptable search engine.")] struct Settings { #[structopt(long, default_value = "info", env)] log_level: LevelFilter, #[structopt(long, env)] pretty_logs: Option<bool>, #[structopt(long, short, default_value = "127.0.0.1", env)] host: String, #[structopt(long, short, default_value = "8000", env)] port: u16, #[structopt(long, env)] tls_key_file: Option<String>, #[structopt(long, env)] tls_cert_file: Option<String>, #[structopt(long, short = "auth", env, hide_env_values = true)] authentication_key: Option<String>, #[structopt(long, short = "threads", env)] runtime_threads: Option<usize>, #[structopt(long, env)] log_file: Option<String>, #[structopt(long, env, takes_value = false)] enable_fast_fuzzy: bool, } fn main() { let settings = match setup() { Ok(s) => s, Err(e) => { eprintln!("error during server setup: {:?}", e); return; } }; let threads = settings.runtime_threads.unwrap_or_else(|| num_cpus::get()); info!("starting runtime with {} threads", threads); let maybe_runtime = tokio::runtime::Builder::new_multi_thread() .worker_threads(threads) .enable_all() .build(); let result = match maybe_runtime { Ok(runtime) => runtime.block_on(start(settings)), Err(e) => { error!("error during runtime creation: {:?}", e); return; } }; if let Err(e) = result { error!("error during server runtime: {:?}", e); } } fn setup_logger(level: LevelFilter, log_file: &Option<String>, pretty: bool) -> Result<()> { let mut colours = ColoredLevelConfig::new(); if pretty { colours = colours .info(Color::Green) .warn(Color::Yellow) .error(Color::BrightRed) .debug(Color::Magenta) .trace(Color::Cyan); } let mut builder = fern::Dispatch::new() .format(move |out, message, record| { out.finish(format_args!( "{} | {} | {:<5} - {}", chrono::Local::now().format("[%Y-%m-%d][%H:%M:%S]"), record.target(), colours.color(record.level()), message, )) }) .level(level) .level_for( "sqlx", if level == LevelFilter::Info { LevelFilter::Warn } else { level }, ) .chain(std::io::stdout()); if let Some(file) = log_file { builder = builder.chain(fern::log_file(file)?); } builder.apply()?; Ok(()) } fn setup() -> Result<Settings> { let config: Settings = Settings::from_args(); setup_logger( config.log_level, &config.log_file, config.pretty_logs.unwrap_or(true), )?; Ok(config) } async fn start(settings: Settings) -> Result<()> { info!("checking tls files"); let tls = check_tls_files(&settings)?; info!("setting up the authorization manager"); let (authorization_manager, tokens) = auth::AuthManager::connect("./lnx-data/data").await?; let authorization_manager = Arc::new(authorization_manager); info!("setting up the search engine"); let engine = Arc::new(SearchEngine::create("./lnx-data/meta", settings.enable_fast_fuzzy).await?); let super_user_middleware = ServiceBuilder::new() .layer(RequireAuthorizationLayer::custom( auth::SuperUserAuthIfEnabled::bearer( settings .authentication_key .as_ref() .map(|v| v.as_str()) .unwrap_or_else(|| ""), settings.authentication_key.is_some(), &"Missing token bearer authorization header.", )?, )) .layer( SetResponseHeaderLayer::<HeaderValue, hyper::Body>::overriding( header::SERVER, HeaderValue::from_static("lnx"), ), ) .layer(AddExtensionLayer::new(authorization_manager)) .layer(MapResponseLayer::new(routes::map_status)) .into_inner(); let super_user_app = Router::new() .route("/tokens/revoke", post(routes::revoke_token)) .route("/tokens/permissions", post(routes::modify_permissions)) .route("/tokens/create", post(routes::create_token)) .route("/tokens/clear", delete(routes::revoke_all)) .layer(super_user_middleware); let search_auth = auth::UserAuthIfEnabled::bearer( tokens.clone(), auth::AuthFlags::SEARCH, settings.authentication_key.is_some(), &"Invalid permissions or missing token bearer", )?; let documents_auth = auth::UserAuthIfEnabled::bearer( tokens.clone(), auth::AuthFlags::MODIFY_DOCUMENTS, settings.authentication_key.is_some(), &"Invalid permissions or missing token bearer", )?; let index_auth = auth::UserAuthIfEnabled::bearer( tokens.clone(), auth::AuthFlags::MODIFY_INDEXES, settings.authentication_key.is_some(), &"Invalid permissions or missing token bearer", )?; let index_middleware = ServiceBuilder::new() .layer( SetResponseHeaderLayer::<HeaderValue, hyper::Body>::overriding( header::SERVER, HeaderValue::from_static("lnx"), ), ) .layer(AddExtensionLayer::new(engine)) .layer(MapResponseLayer::new(routes::map_status)) .into_inner(); let app = Router::new() .route( "/indexes/:index_name/search", get(routes::search_index.layer(RequireAuthorizationLayer::custom(search_auth.clone()))) .post( routes::search_index_json.layer(RequireAuthorizationLayer::custom(search_auth)), ), ) .route( "/indexes/:index_name/commit", post( routes::commit_index_changes .layer(RequireAuthorizationLayer::custom(documents_auth.clone())), ), ) .route( "/indexes/:index_name/rollback", post( routes::rollback_index_changes .layer(RequireAuthorizationLayer::custom(documents_auth.clone())), ), ) .route( "/indexes/:index_name", delete( routes::delete_index.layer(RequireAuthorizationLayer::custom(index_auth.clone())), ), ) .route( "/indexes", post(routes::create_index.layer(RequireAuthorizationLayer::custom(index_auth.clone()))), ) .route( "/indexes/:index_name/documents", post( routes::add_document .layer(RequireAuthorizationLayer::custom(documents_auth.clone())), ) .delete( routes::delete_documents .layer(RequireAuthorizationLayer::custom(documents_auth.clone())), ), ) .route( "/indexes/:index_name/documents/:document_id", get(routes::get_document .layer(RequireAuthorizationLayer::custom(documents_auth.clone()))), ) .route( "/indexes/:index_name/documents/clear", delete( routes::delete_all_documents .layer(RequireAuthorizationLayer::custom(documents_auth.clone())), ), ) .layer(index_middleware) .nest("/admin", super_user_app); let addr = format!("{}:{}", &settings.host, settings.port); let handle = match tls { Some(tls) => tokio::spawn(async move { info!("starting https server @ https://{}", addr); let acceptor = TlsAcceptor::from(tls); let listener = TcpListener::bind(&addr).await?; loop { let (stream, _addr) = listener.accept().await?; let acceptor = acceptor.clone(); let ap = app.clone(); tokio::spawn(async move { if let Ok(stream) = acceptor.accept(stream).await { if let Err(e) = Http::new().serve_connection(stream, ap).await { warn!("failed to serve connection: {:?}", e); }; } }); } }), None => tokio::spawn(async move { info!("starting http server @ http://{}", addr); axum::Server::bind(&addr.parse()?) .serve(app.into_make_service()) .await?; Ok::<(), Error>(()) }), }; tokio::signal::ctrl_c().await?; info!("shutting down server..."); handle.abort(); Ok(()) } fn check_tls_files(settings: &Settings) -> Result<Option<Arc<ServerConfig>>> { match (&settings.tls_key_file, &settings.tls_cert_file) { (Some(fp1), Some(fp2)) => Ok(Some(tls_server_config(fp1, fp2)?)), (None, None) => Ok(None), _ => { return Err(Error::msg( "missing a required TLS field, both key and cert must be provided.", )) } } } fn tls_server_config(key: &str, cert: &str) -> Result<Arc<ServerConfig>> { let mut config = ServerConfig::new(NoClientAuth::new()); let mut key_reader = BufReader::new(File::open(key)?); let mut cert_reader = BufReader::new(File::open(cert)?); let key = pkcs8_private_keys(&mut key_reader) .map_err(|_| Error::msg("failed to extract private keys"))? .remove(0); let certs = certs(&mut cert_reader).map_err(|_| Error::msg("failed to extract certificates"))?; config.set_single_cert(certs, key)?; config.set_protocols(&[b"h2".to_vec(), b"http/1.1".to_vec()]); Ok(Arc::new(config)) }
#[macro_use] extern crate log; #[macro_use] extern crate serde_json; use std::fs::File; use std::io::BufReader; use std::sync::Arc; use anyhow::{Error, Result}; use axum::handler::{delete, get, post, Handler}; use axum::http::header; use axum::Router; use fern::colors::{Color, ColoredLevelConfig}; use hyper::http::HeaderValue; use hyper::server::conn::Http; use log::LevelFilter; use structopt::StructOpt; use tokio::net::TcpListener; use tokio_rustls::rustls::internal::pemfile::{certs, pkcs8_private_keys}; use tokio_rustls::rustls::{NoClientAuth, ServerConfig}; use tokio_rustls::TlsAcceptor; use tower::ServiceBuilder; use tower_http::auth::RequireAuthorizationLayer; use tower_http::set_header::SetResponseHeaderLayer; mod auth; mod responders; mod routes; use engine::SearchEngine; use tower::util::MapResponseLayer; use tower_http::add_extension::AddExtensionLayer; #[derive(Debug, StructOpt)] #[structopt(name = "lnx", about = "A ultra-fast, adaptable search engine.")] struct Settings { #[structopt(long, default_value = "info", env)] log_level: LevelFilter, #[structopt(long, env)] pretty_logs: Option<bool>, #[structopt(long, short, default_value = "127.0.0.1", env)] host: String, #[structopt(long, short, default_value = "8000", env)] port: u16, #[structopt(long, env)] tls_key_file: Option<String>, #[structopt(long, env)] tls_cert_file: Option<String>, #[structopt(long, short = "auth", env, hide_env_values = true)] authentication_key: Option<String>, #[structopt(long, short = "threads", env)] runtime_threads: Option<usize>, #[structopt(long, env)] log_file: Option<String>, #[structopt(long, env, takes_value = false)] enable_fast_fuzzy: bool, } fn main() { let settings = match setup() { Ok(s) => s, Err(e) => { eprintln!("error during server setup: {:?}", e); return; } }; let threads = settings.runtime_threads.unwrap_or_else(|| num_cpus::get()); info!("starting runtime with {} threads", threads); let maybe_runtime = tokio::runtime::Builder::new_multi_thread() .worker_threads(threads) .enable_all() .build(); let result = match maybe_runtime { Ok(runtime) => runtime.block_on(start(settings)), Err(e) => { error!("error during runtime creation: {:?}", e); return; } }; if let Err(e) = result { error!("error during server runtime: {:?}", e); } } fn setup_logger(level: LevelFilter, log_file: &Option<String>, pretty: bool) -> Result<()> { let mut colours = ColoredLevelConfig::new(); if pretty { colours = colours .info(Color::Green) .warn(Color::Yellow) .error(Color::BrightRed) .debug(Color::Magenta) .trace(Color::Cyan); } let mut builder = fern::Dispatch::new() .format(move |out, message, record| { out.finish(format_args!( "{} | {} | {:<5} - {}", chrono::Local::now().format("[%Y-%m-%d][%H:%M:%S]"), record.target(), colours.color(record.level()), message, )) }) .level(level) .level_for( "sqlx", if level == LevelFilter::Info { LevelFilter::Warn } else { level }, ) .chain(std::io::stdout()); if let Some(file) = log_file { builder = builder.chain(fern::log_file(file)?); } builder.apply()?; Ok(()) } fn setup() -> Result<Settings> { let config: Settings = Settings::from_args(); setup_logger( config.log_level, &config.log_file, config.pretty_logs.unwrap_or(true), )?; Ok(config) } async fn start(settings: Settings) -> Result<()> { info!("checking tls files"); let tls = check_tls_files(&settings)?; info!("setting up the authorization manager"); let (authorization_manager, tokens) = auth::AuthManager::connect("./lnx-data/data").await?; let authorization_manager = Arc::new(authorization_manager); info!("setting up the search engine"); let engine = Arc::new(SearchEngine::create("./lnx-data/meta", settings.enable_fast_fuzzy).await?); let super_user_middleware = ServiceBuilder::new() .layer(RequireAuthorizationLayer::custom( auth::SuperUserAuthIfEnabled::bearer( settings .authentication_key .as_ref() .map(|v| v.as_str()) .unwrap_or_else(|| ""), settings.authentication_key.is_some(), &"Missing token bearer authorization header.", )?, )) .layer( SetResponseHeaderLayer::<HeaderValue, hyper::Body>::overriding( header::SERVER, HeaderValue::from_static("lnx"), ), ) .layer(AddExtensionLayer::new(authorization_manager)) .layer(MapResponseLayer::new(routes::map_status)) .into_inner(); let super_user_app = Router::new() .route("/tokens/revoke", post(routes::revoke_token)) .route("/tokens/permissions", post(routes::modify_permissions)) .route("/tokens/create", post(routes::create_token)) .route("/tokens/clear", delete(routes::revoke_all)) .layer(super_user_middleware); let search_auth = auth::UserAuthIfEnabled::bearer( tokens.clone(), auth::AuthFlags::SEARCH, settings.authentication_key.is_some(), &"Invalid permissions or missing token bearer", )?; let documents_auth = auth::UserAuthIfEnabled::bearer( tokens.clone(), auth::AuthFlags::MODIFY_DOCUMENTS, settings.authentication_key.is_some(), &"Invalid permissions or missing token bearer", )?; let index_auth = auth::UserAuthIfEnabled::bearer( tokens.clone(), auth::AuthFlags::MODIFY_INDEXES, settings.authentication_key.is_some(), &"Invalid permissions or missing token bearer", )?; let index_middleware = ServiceBuilder::new() .layer( SetResponseHeaderLayer::<HeaderValue, hyper::Body>::overriding( header::SERVER, HeaderValue::from_static("lnx"), ), ) .layer(AddExtensionLayer::new(engine)) .layer(MapResponseLayer::new(routes::map_status)) .into_inner(); let app = Router::new() .route( "/indexes/:index_name/search", get(routes::search_index.layer(RequireAuthorizationLayer::custom(search_auth.clone()))) .post( routes::search_index_json.layer(RequireAuthorizationLayer::custom(search_auth)), ), ) .route( "/indexes/:index_name/commit", post( routes::commit_index_changes .layer(RequireAuthorizationLayer::custom(documents_auth.clone())), ), ) .route( "/indexes/:index_name/rollback", post( routes::rollback_index_changes .layer(RequireAuthorizationLayer::custom(documents_auth.clone())), ), ) .route( "/indexes/:index_name", delete( routes::delete_index.layer(RequireAuthorizationLayer::custom(index_auth.clone())), ), ) .route( "/indexes", post(routes::create_index.layer(RequireAuthorizationLayer::custom(index_auth.clone()))), ) .route( "/indexes/:index_name/documents", post( routes::add_document .layer(RequireAuthorizationLayer::custom(documents_auth.clone())), ) .delete( routes::delete_documents .layer(RequireAuthorizationLayer::custom(documents_auth.clone())), ), ) .route( "/indexes/:index_name/documents/:document_id", get(routes::get_document .layer(RequireAuthorizationLayer::custom(documents_auth.clone()))), ) .route( "/indexes/:index_name/documents/clear", delete( routes::delete_all_documents .layer(RequireAuthorizationLayer::custom(documents_auth.clone())), ), ) .layer(index_middleware) .nest("/admin", super_user_app); let addr = format!("{}:{}", &settings.host, settings.port); let handle = match tls { Some(tls) => tokio::spawn(async move { info!("starting https server @ https://{}", addr); let acceptor = TlsAcceptor::from(tls); let listener = TcpListener::bind(&addr).await?; loop { let (stream, _addr) = listener.accept().await?; let acceptor = acceptor.clone(); let ap = app.clone(); tokio::spawn(async move { if let Ok(stream) = acceptor.accept(stream).await { if let Err(e) = Http::new().serve_connection(stream, ap).await { warn!("failed to serve connection: {:?}", e); }; } }); } }), None => tokio::spawn(async move { info!("starting http server @ http://{}", addr); axum::Server::bind(&addr.parse()?) .serve(app.into_make_service()) .await?; Ok::<(), Error>(()) }), }; tokio::signal::ctrl_c().await?; info!("shutting down server..."); handle.abort(); Ok(()) } fn check_tls_
fn tls_server_config(key: &str, cert: &str) -> Result<Arc<ServerConfig>> { let mut config = ServerConfig::new(NoClientAuth::new()); let mut key_reader = BufReader::new(File::open(key)?); let mut cert_reader = BufReader::new(File::open(cert)?); let key = pkcs8_private_keys(&mut key_reader) .map_err(|_| Error::msg("failed to extract private keys"))? .remove(0); let certs = certs(&mut cert_reader).map_err(|_| Error::msg("failed to extract certificates"))?; config.set_single_cert(certs, key)?; config.set_protocols(&[b"h2".to_vec(), b"http/1.1".to_vec()]); Ok(Arc::new(config)) }
files(settings: &Settings) -> Result<Option<Arc<ServerConfig>>> { match (&settings.tls_key_file, &settings.tls_cert_file) { (Some(fp1), Some(fp2)) => Ok(Some(tls_server_config(fp1, fp2)?)), (None, None) => Ok(None), _ => { return Err(Error::msg( "missing a required TLS field, both key and cert must be provided.", )) } } }
function_block-function_prefixed
[ { "content": "fn main() -> Result<()> {\n\n // Tell Cargo that if the given file changes, to rerun this build script.\n\n println!(\"cargo:rerun-if-changed=./datasets\");\n\n\n\n let _ = fs::remove_dir_all(\"./_dist\");\n\n fs::create_dir_all(\"./_dist\")?;\n\n\n\n compress_frequency_dicts()?;\n\...
Rust
src/forex.rs
iamsauravsharma/alpha_vantage
80fbfb6ea798c4f80b0151d8c6b85ff4793cbe0d
use std::collections::HashMap; use serde::Deserialize; use crate::{ api::{ApiClient, OutputSize, TimeSeriesInterval}, deserialize::from_str, error::{detect_common_helper_error, Error, Result}, }; #[derive(Debug, Clone, Default)] struct MetaData { information: String, from_symbol: String, to_symbol: String, last_refreshed: String, interval: Option<String>, output_size: Option<String>, time_zone: String, } #[derive(Default, Debug, Clone)] pub struct Entry { time: String, open: f64, high: f64, low: f64, close: f64, } impl Entry { #[must_use] pub fn time(&self) -> &str { &self.time } #[must_use] pub fn open(&self) -> f64 { self.open } #[must_use] pub fn high(&self) -> f64 { self.high } #[must_use] pub fn low(&self) -> f64 { self.low } #[must_use] pub fn close(&self) -> f64 { self.close } } #[derive(Debug, Default)] pub struct Forex { meta_data: MetaData, forex: Vec<Entry>, } impl Forex { #[must_use] pub fn information(&self) -> &str { self.return_meta_string("information") } #[must_use] pub fn symbol_from(&self) -> &str { self.return_meta_string("from symbol") } #[must_use] pub fn symbol_to(&self) -> &str { self.return_meta_string("to symbol") } #[must_use] pub fn last_refreshed(&self) -> &str { self.return_meta_string("last refreshed") } #[must_use] pub fn time_zone(&self) -> &str { self.return_meta_string("time zone") } #[must_use] pub fn interval(&self) -> Option<&str> { self.operate_option_meta_value("interval") } #[must_use] pub fn output_size(&self) -> Option<&str> { self.operate_option_meta_value("output size") } #[must_use] pub fn entry(&self) -> &Vec<Entry> { &self.forex } fn return_meta_string(&self, which_val: &str) -> &str { match which_val { "information" => &self.meta_data.information, "from symbol" => &self.meta_data.from_symbol, "to symbol" => &self.meta_data.to_symbol, "time zone" => &self.meta_data.time_zone, "last refreshed" => &self.meta_data.last_refreshed, _ => "", } } fn operate_option_meta_value(&self, which_val: &str) -> Option<&str> { let value = match which_val { "interval" => &self.meta_data.interval, "output size" => &self.meta_data.output_size, _ => &None, }; value.as_deref() } } #[derive(Clone, Debug, Deserialize)] struct EntryHelper { #[serde(rename = "1. open", deserialize_with = "from_str")] open: f64, #[serde(rename = "2. high", deserialize_with = "from_str")] high: f64, #[serde(rename = "3. low", deserialize_with = "from_str")] low: f64, #[serde(rename = "4. close", deserialize_with = "from_str")] close: f64, } #[derive(Debug, Deserialize)] pub(crate) struct ForexHelper { #[serde(rename = "Error Message")] error_message: Option<String>, #[serde(rename = "Information")] information: Option<String>, #[serde(rename = "Note")] note: Option<String>, #[serde(rename = "Meta Data")] meta_data: Option<HashMap<String, String>>, #[serde(flatten)] forex: Option<HashMap<String, HashMap<String, EntryHelper>>>, } impl ForexHelper { pub(crate) fn convert(self) -> Result<Forex> { let mut forex_struct = Forex::default(); detect_common_helper_error(self.information, self.error_message, self.note)?; if self.meta_data.is_none() || self.forex.is_none() { return Err(Error::EmptyResponse); } let meta_data = self.meta_data.unwrap(); let information = &meta_data["1. Information"]; let from_symbol = &meta_data["2. From Symbol"]; let to_symbol = &meta_data["3. To Symbol"]; let last_refreshed = meta_data.get("4. Last Refreshed"); let mut last_refreshed_value = last_refreshed.cloned(); if last_refreshed_value.is_none() { let last_refreshed = meta_data.get("5. Last Refreshed"); last_refreshed_value = last_refreshed.cloned(); } let last_refreshed_value = last_refreshed_value.expect("Last refreshed value contains None"); let time_zone = meta_data.get("5. Time Zone"); let mut time_zone_value = time_zone.cloned(); if time_zone_value.is_none() { let time_zone = meta_data.get("6. Time Zone"); time_zone_value = time_zone.cloned(); } if time_zone_value.is_none() { let time_zone = meta_data.get("7. Time Zone"); time_zone_value = time_zone.cloned(); } let time_zone_value = time_zone_value.expect("Time zone contains None value"); let output_size = meta_data.get("4. Output Size"); let mut output_size_value = output_size.cloned(); if output_size_value.is_none() { let output_size = meta_data.get("6. Output Size"); output_size_value = output_size.cloned(); } let interval = meta_data.get("5. Interval"); let interval_value = interval.cloned(); forex_struct.meta_data = MetaData { information: information.to_string(), from_symbol: from_symbol.to_string(), to_symbol: to_symbol.to_string(), last_refreshed: last_refreshed_value, interval: interval_value, output_size: output_size_value, time_zone: time_zone_value, }; let mut forex_entries: Vec<Entry> = Vec::new(); for hash in self.forex.unwrap().values() { for val in hash.keys() { let mut entry = Entry { time: val.to_string(), ..Entry::default() }; let entry_helper = hash .get(val) .expect("Cannot get a val from hash map") .clone(); entry.open = entry_helper.open; entry.high = entry_helper.high; entry.low = entry_helper.low; entry.close = entry_helper.close; forex_entries.push(entry); } } forex_struct.forex = forex_entries; Ok(forex_struct) } } pub trait VecEntry { fn find(&self, time: &str) -> Option<Entry>; fn latest(&self) -> Entry; fn latestn(&self, n: usize) -> Result<Vec<Entry>>; } impl VecEntry for Vec<Entry> { #[must_use] fn find(&self, time: &str) -> Option<Entry> { for entry in self { if entry.time == time { return Some(entry.clone()); } } None } #[must_use] fn latest(&self) -> Entry { let mut latest = Entry::default(); let mut new_time = String::new(); for entry in self { if new_time < entry.time { latest = entry.clone(); new_time = entry.time.clone(); } } latest } fn latestn(&self, n: usize) -> Result<Vec<Entry>> { let mut time_list = Vec::new(); for entry in self { time_list.push(entry.time.clone()); } time_list.sort(); time_list.reverse(); let time_list_count: usize = time_list.len(); let mut full_list = Self::new(); for i in 0..n { let time = time_list.get(i); if let Some(time) = time { let entry = self .find(time) .unwrap_or_else(|| panic!("Failed to find time value for index {}", i)); full_list.push(entry); } else { return Err(Error::DesiredNumberOfEntryNotPresent(time_list_count)); } } Ok(full_list) } } pub struct ForexBuilder<'a> { api_client: &'a ApiClient, function: ForexFunction, from_symbol: &'a str, to_symbol: &'a str, interval: Option<TimeSeriesInterval>, output_size: Option<OutputSize>, } impl<'a> ForexBuilder<'a> { #[must_use] pub fn new( api_client: &'a ApiClient, function: ForexFunction, from_symbol: &'a str, to_symbol: &'a str, ) -> Self { Self { api_client, function, from_symbol, to_symbol, interval: None, output_size: None, } } #[must_use] pub fn interval(mut self, interval: TimeSeriesInterval) -> Self { self.interval = Some(interval); self } #[must_use] pub fn output_size(mut self, output_size: OutputSize) -> Self { self.output_size = Some(output_size); self } fn create_url(&self) -> String { let function = match self.function { ForexFunction::IntraDay => "FX_INTRADAY", ForexFunction::Daily => "FX_DAILY", ForexFunction::Weekly => "FX_WEEKLY", ForexFunction::Monthly => "FX_MONTHLY", }; let mut url = format!( "query?function={}&from_symbol={}&to_symbol={}", function, self.from_symbol, self.to_symbol ); if let Some(forex_interval) = self.interval { let interval = match forex_interval { TimeSeriesInterval::OneMin => "1min", TimeSeriesInterval::FiveMin => "5min", TimeSeriesInterval::FifteenMin => "15min", TimeSeriesInterval::ThirtyMin => "30min", TimeSeriesInterval::SixtyMin => "60min", }; url.push_str(&format!("&interval={}", interval)); }; if let Some(forex_output_size) = self.output_size { let size = match forex_output_size { OutputSize::Full => "full", OutputSize::Compact => "compact", }; url.push_str(&format!("&outputsize={}", size)); } url } pub async fn json(&self) -> Result<Forex> { let url = self.create_url(); let forex_helper: ForexHelper = self.api_client.get_json(url).await?; forex_helper.convert() } } #[derive(Copy, Clone)] pub enum ForexFunction { IntraDay, Daily, Weekly, Monthly, }
use std::collections::HashMap; use serde::Deserialize; use crate::{ api::{ApiClient, OutputSize, TimeSeriesInterval}, deserialize::from_str, error::{detect_common_helper_error, Error, Result}, }; #[derive(Debug, Clone, Default)] struct MetaData { information: String, from_symbol: String, to_symbol: String, last_refreshed: String, interval: Option<String>, output_size: Option<String>, time_zone: String, } #[derive(Default, Debug, Clone)] pub struct Entry { time: String, open: f64, high: f64, low: f64, close: f64, } impl Entry { #[must_use] pub fn time(&self) -> &str { &self.time } #[must_use] pub fn open(&self) -> f64 { self.open } #[must_use] pub fn high(&self) -> f64 { self.high } #[must_use] pub fn low(&self) -> f64 { self.low } #[must_use] pub fn close(&self) -> f64 { self.close } } #[derive(Debug, Default)] pub struct Forex { meta_data: MetaData, forex: Vec<Entry>, } impl Forex { #[must_use] pub fn information(&self) -> &str { self.return_meta_string("information") } #[must_use] pub fn symbol_from(&self) -> &str { self.return_meta_string("from symbol") } #[must_use] pub fn symbol_to(&self) -> &str { self.return_meta_string("to symbol") } #[must_use] pub fn last_refreshed(&self) -> &str { self.return_meta_string("last refreshed") } #[must_use] pub fn time_zone(&self) -> &str { self.return_meta_string("time zone") } #[must_use] pub fn interval(&self) -> Option<&str> { self.operate_option_meta_value("interval") } #[must_use] pub fn output_size(&self) -> Option<&str> { self.operate_option_meta_value("output size") } #[must_use] pub fn entry(&self) -> &Vec<Entry> { &self.forex } fn return_meta_string(&self, which_val: &str) -> &str { match which_val { "information" => &self.meta_data.information, "from symbol" => &self.meta_data.from_symbol, "to symbol" => &self.meta_data.to_symbol, "time zone" => &self.meta_data.time_zone, "last refreshed" => &self.meta_data.last_refreshed, _ => "", } } fn operate_option_meta_value(&self, which_val: &str) -> Option<&str> { let value = match which_val { "interval" => &self.meta_data.interval, "output size" => &self.meta_data.output_size, _ => &None, }; value.as_deref() } } #[derive(Clone, Debug, Deserialize)] struct EntryHelper { #[serde(rename = "1. open", deserialize_with = "from_str")] open: f64, #[serde(rename = "2. high", deserialize_with = "from_str")] high: f64, #[serde(rename = "3. low", deserialize_with = "from_str")] low: f64, #[serde(rename = "4. close", deserialize_with = "from_str")] close: f64, } #[derive(Debug, Deserialize)] pub(crate) struct ForexHelper { #[serde(rename = "Error Message")] error_message: Option<String>, #[serde(rename = "Information")] information: Option<String>, #[serde(rename = "Note")] note: Option<String>, #[serde(rename = "Meta Data")] meta_data: Option<HashMap<String, String>>, #[serde(flatten)] forex: Option<HashMap<String, HashMap<String, EntryHelper>>>, } impl ForexHelper { pub(crate) fn convert(self) -> Result<Forex> { let mut forex_struct = Forex::default(); detect_common_helper_error(self.information, self.error_message, self.note)?; if self.meta_data.is_none() || self.forex.is_none() { return Err(Error::EmptyResponse); } let meta_data = self.meta_data.unwrap(); let information = &meta_data["1. Information"]; let from_symbol = &meta_data["2. From Symbol"]; let to_symbol = &meta_data["3. To Symbol"]; let last_refreshed = meta_data.get("4. Last Refreshed"); let mut last_refreshed_value = last_refreshed.cloned(); if last_refreshed_value.is_none() { let last_refreshed = meta_data.get("5. Last Refreshed"); last_refreshed_value = last_refreshed.cloned(); } let last_refreshed_value = last_refreshed_value.expect("Last refreshed value contains None"); let time_zone = meta_data.get("5. Time Zone"); let mut time_zone_value = time_zone.cloned(); if time_zone_value.is_none() { let time_zone = meta_data.get("6. Time Zone"); time_zone_value = time_zone.cloned(); } if time_zone_value.is_none() { let time_zone = meta_data.get("7. Time Zone"); time_zone_value = time_zone.cloned(); } let time_zone_value = time_zone_value.expect("Time zone contains None value"); let output_size = meta_data.get("4. Output Size"); let mut output_size_value = output_size.cloned(); if output_size_value.is_none() { let output_size = meta_data.get("6. Output Size"); output_size_value = output_size.cloned(); } let interval = meta_data.get("5. Interval"); let interval_value = interval.cloned(); forex_struct.meta_data = MetaData { information: information.to_string(), from_symbol: from_symbol.to_string(), to_symbol: to_symbol.to_string(), last_refreshed: last_refreshed_value, interval: interval_value, output_size: output_size_value, time_zone: time_zone_value, }; let mut forex_entries: Vec<Entry> = Vec::new(); for hash in self.forex.unwrap().values() { for val in hash.keys() { let mut entry = Entry { time: val.to_string(), ..Entry::default() }; let entry_helper = hash .get(val) .expect("Cannot get a val from hash map") .clone(); entry.open = entry_helper.open; entry.high = entry_helper.high; entry.low = entry_helper.low; entry.close = entry_helper.close; forex_entries.push(entry); } } forex_struct.forex = forex_entries; Ok(forex_struct) } } pub trait VecEntry { fn find(&self, time: &str) -> Option<Entry>; fn latest(&self) -> Entry; fn latestn(&self, n: usize) -> Result<Vec<Entry>>; } impl VecEntry for Vec<Entry> { #[must_use] fn find(&self, time: &str) -> Option<Entry> { for entry in self { if entry.time == time { return Some(entry.clone()); } } None } #[must_use] fn latest(&self) -> Entry {
fn latestn(&self, n: usize) -> Result<Vec<Entry>> { let mut time_list = Vec::new(); for entry in self { time_list.push(entry.time.clone()); } time_list.sort(); time_list.reverse(); let time_list_count: usize = time_list.len(); let mut full_list = Self::new(); for i in 0..n { let time = time_list.get(i); if let Some(time) = time { let entry = self .find(time) .unwrap_or_else(|| panic!("Failed to find time value for index {}", i)); full_list.push(entry); } else { return Err(Error::DesiredNumberOfEntryNotPresent(time_list_count)); } } Ok(full_list) } } pub struct ForexBuilder<'a> { api_client: &'a ApiClient, function: ForexFunction, from_symbol: &'a str, to_symbol: &'a str, interval: Option<TimeSeriesInterval>, output_size: Option<OutputSize>, } impl<'a> ForexBuilder<'a> { #[must_use] pub fn new( api_client: &'a ApiClient, function: ForexFunction, from_symbol: &'a str, to_symbol: &'a str, ) -> Self { Self { api_client, function, from_symbol, to_symbol, interval: None, output_size: None, } } #[must_use] pub fn interval(mut self, interval: TimeSeriesInterval) -> Self { self.interval = Some(interval); self } #[must_use] pub fn output_size(mut self, output_size: OutputSize) -> Self { self.output_size = Some(output_size); self } fn create_url(&self) -> String { let function = match self.function { ForexFunction::IntraDay => "FX_INTRADAY", ForexFunction::Daily => "FX_DAILY", ForexFunction::Weekly => "FX_WEEKLY", ForexFunction::Monthly => "FX_MONTHLY", }; let mut url = format!( "query?function={}&from_symbol={}&to_symbol={}", function, self.from_symbol, self.to_symbol ); if let Some(forex_interval) = self.interval { let interval = match forex_interval { TimeSeriesInterval::OneMin => "1min", TimeSeriesInterval::FiveMin => "5min", TimeSeriesInterval::FifteenMin => "15min", TimeSeriesInterval::ThirtyMin => "30min", TimeSeriesInterval::SixtyMin => "60min", }; url.push_str(&format!("&interval={}", interval)); }; if let Some(forex_output_size) = self.output_size { let size = match forex_output_size { OutputSize::Full => "full", OutputSize::Compact => "compact", }; url.push_str(&format!("&outputsize={}", size)); } url } pub async fn json(&self) -> Result<Forex> { let url = self.create_url(); let forex_helper: ForexHelper = self.api_client.get_json(url).await?; forex_helper.convert() } } #[derive(Copy, Clone)] pub enum ForexFunction { IntraDay, Daily, Weekly, Monthly, }
let mut latest = Entry::default(); let mut new_time = String::new(); for entry in self { if new_time < entry.time { latest = entry.clone(); new_time = entry.time.clone(); } } latest }
function_block-function_prefix_line
[ { "content": "// convert str which has percent form to f64 val\n\nfn convert_str_percent_f64(val: &str) -> f64 {\n\n let mut s = val.to_owned();\n\n s.pop();\n\n s.trim().parse::<f64>().unwrap()\n\n}\n\n\n\n/// Builder to create new Sector\n\npub struct SectorBuilder<'a> {\n\n api_client: &'a ApiCli...
Rust
src/connectivity/wlan/lib/common/rust/src/test_utils/fake_stas.rs
DamieFC/fuchsia
f78a4a1326f4a4bb5834500918756173c01bab4f
use { crate::{ ie::{self, IeType}, mac, test_utils::fake_frames::{ fake_eap_rsne, fake_wpa1_ie, fake_wpa2_enterprise_rsne, fake_wpa2_legacy_rsne, fake_wpa2_mixed_rsne, fake_wpa2_rsne, fake_wpa2_wpa3_rsne, fake_wpa3_enterprise_192_bit_rsne, fake_wpa3_rsne, fake_wpa3_transition_rsne, }, }, anyhow::Context, fidl_fuchsia_wlan_common as fidl_common, fidl_fuchsia_wlan_internal as fidl_internal, }; #[rustfmt::skip] const DEFAULT_MOCK_IES: &'static [u8] = &[ 0x03, 0x01, 0x8c, 0x05, 0x04, 0x00, 0x01, 0x00, 0x02, 0x07, 0x10, 0x55, 0x53, 0x20, 0x24, 0x04, 0x24, 0x34, 0x04, 0x1e, 0x64, 0x0c, 0x1e, 0x95, 0x05, 0x24, 0x00, 0x20, 0x01, 0x00, 0x23, 0x02, 0x09, 0x00, 0x2d, 0x1a, 0xef, 0x09, 0x17, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x3d, 0x16, 0x8c, 0x0d, 0x16, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x7f, 0x08, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x40, 0xbf, 0x0c, 0x91, 0x59, 0x82, 0x0f, 0xea, 0xff, 0x00, 0x00, 0xea, 0xff, 0x00, 0x00, 0xc0, 0x05, 0x00, 0x00, 0x00, 0x00, 0x00, 0xc3, 0x03, 0x01, 0x24, 0x24, 0xdd, 0x07, 0x00, 0x0b, 0x86, 0x01, 0x04, 0x08, 0x09, 0xdd, 0x18, 0x00, 0x50, 0xf2, 0x02, 0x01, 0x01, 0x80, 0x00, 0x03, 0xa4, 0x00, 0x00, 0x27, 0xa4, 0x00, 0x00, 0x42, 0x43, 0x5e, 0x00, 0x62, 0x32, 0x2f, 0x00, ]; pub struct BssCreator { pub bssid: [u8; 6], pub bss_type: fidl_internal::BssTypes, pub beacon_period: u16, pub timestamp: u64, pub local_time: u64, pub cap: u16, pub chan: fidl_fuchsia_wlan_common::WlanChan, pub rssi_dbm: i8, pub snr_db: i8, pub protection_cfg: FakeProtectionCfg, pub ssid: Vec<u8>, pub rates: Vec<u8>, pub ies_overrides: IesOverrides, } impl BssCreator { pub fn create_bss(self) -> Result<fidl_internal::BssDescription, anyhow::Error> { let mut ies_updater = ie::IesUpdater::new(DEFAULT_MOCK_IES.to_vec()); ies_updater.set(IeType::SSID, &self.ssid[..]).context("set SSID")?; let rates_writer = ie::RatesWriter::try_new(&self.rates[..]).context("set rates")?; let mut rates_buf = vec![]; rates_writer.write_supported_rates(&mut rates_buf); ies_updater.set_raw(&rates_buf[..]).context("set rates")?; let mut ext_rates_buf = vec![]; rates_writer.write_ext_supported_rates(&mut ext_rates_buf); ies_updater.set_raw(&ext_rates_buf[..]).context("set extended rates")?; if let Some(rsne) = derive_rsne(self.protection_cfg) { ies_updater.set_raw(&rsne[..]).context("set RSNE")?; } if let Some(wpa1_vendor_ie) = derive_wpa1_vendor_ies(self.protection_cfg) { ies_updater.set_raw(&wpa1_vendor_ie[..]).context("set WPA1 vendor IE")?; } for ovr in self.ies_overrides.overrides { match ovr { IeOverride::Remove(ie_type) => ies_updater.remove(&ie_type), IeOverride::Set(ie_type, bytes) => { ies_updater .set(ie_type, &bytes[..]) .with_context(|| format!("set IE type: {:?}", ie_type))?; } } } Ok(fidl_internal::BssDescription { bssid: self.bssid, bss_type: self.bss_type, beacon_period: self.beacon_period, timestamp: self.timestamp, local_time: self.local_time, cap: self.cap, chan: self.chan, rssi_dbm: self.rssi_dbm, snr_db: self.snr_db, ies: ies_updater.finalize(), }) } } pub struct IesOverrides { overrides: Vec<IeOverride>, } impl IesOverrides { pub fn new() -> Self { Self { overrides: vec![] } } pub fn remove(mut self, ie_type: IeType) -> Self { self.overrides.push(IeOverride::Remove(ie_type)); self } pub fn set(mut self, ie_type: IeType, bytes: Vec<u8>) -> Self { self.overrides.push(IeOverride::Set(ie_type, bytes)); self } } enum IeOverride { Remove(IeType), Set(IeType, Vec<u8>), } #[derive(Debug, Copy, Clone, PartialEq)] pub enum FakeProtectionCfg { Open, Wep, Wpa1, Wpa1Enhanced, Wpa2Legacy, Wpa1Wpa2, Wpa2Mixed, Wpa2Enterprise, Wpa2, Wpa2Wpa3, Wpa3Transition, Wpa3, Wpa3Enterprise, Wpa2NoPrivacy, Eap, } pub fn build_fake_bss_creator__(protection_cfg: FakeProtectionCfg) -> BssCreator { BssCreator { bssid: [7, 1, 2, 77, 53, 8], bss_type: fidl_internal::BssTypes::Infrastructure, beacon_period: 100, timestamp: 0, local_time: 0, chan: fidl_common::WlanChan { primary: 3, secondary80: 0, cbw: fidl_common::Cbw::Cbw40 }, rssi_dbm: 0, snr_db: 0, cap: mac::CapabilityInfo(0) .with_privacy(match protection_cfg { FakeProtectionCfg::Open | FakeProtectionCfg::Wpa2NoPrivacy => false, _ => true, }) .0, protection_cfg, ssid: b"fake-ssid".to_vec(), rates: vec![0x82, 0x84, 0x8b, 0x96, 0x0c, 0x12, 0x18, 0x24, 0x30, 0x48, 0x60, 0x6c], ies_overrides: IesOverrides::new(), } } fn derive_rsne(protection_cfg: FakeProtectionCfg) -> Option<Vec<u8>> { match protection_cfg { FakeProtectionCfg::Wpa3Enterprise => Some(fake_wpa3_enterprise_192_bit_rsne()), FakeProtectionCfg::Wpa2Enterprise => Some(fake_wpa2_enterprise_rsne()), FakeProtectionCfg::Wpa3 => Some(fake_wpa3_rsne()), FakeProtectionCfg::Wpa3Transition => Some(fake_wpa3_transition_rsne()), FakeProtectionCfg::Wpa2Wpa3 => Some(fake_wpa2_wpa3_rsne()), FakeProtectionCfg::Wpa2Mixed => Some(fake_wpa2_mixed_rsne()), FakeProtectionCfg::Wpa2Legacy => Some(fake_wpa2_legacy_rsne()), FakeProtectionCfg::Wpa1Wpa2 | FakeProtectionCfg::Wpa2 | FakeProtectionCfg::Wpa2NoPrivacy => Some(fake_wpa2_rsne()), FakeProtectionCfg::Eap => Some(fake_eap_rsne()), _ => None, } } fn derive_wpa1_vendor_ies(protection_cfg: FakeProtectionCfg) -> Option<Vec<u8>> { match protection_cfg { FakeProtectionCfg::Wpa1 | FakeProtectionCfg::Wpa1Wpa2 => Some(fake_wpa1_ie(false)), FakeProtectionCfg::Wpa1Enhanced => Some(fake_wpa1_ie(true)), _ => None, } } #[macro_export] macro_rules! fake_fidl_bss { ($protection_type:ident $(, $bss_key:ident: $bss_value:expr)* $(,)?) => {{ let bss_creator = $crate::test_utils::fake_stas::BssCreator { $( $bss_key: $bss_value, )* ..$crate::test_utils::fake_stas::build_fake_bss_creator__($crate::test_utils::fake_stas::FakeProtectionCfg::$protection_type) }; let fidl_bss = bss_creator.create_bss().expect("expect creating BSS to succeed"); fidl_bss }} } #[macro_export] macro_rules! fake_bss { ($protection_type:ident $(, $bss_key:ident: $bss_value:expr)* $(,)?) => {{ let fidl_bss = $crate::fake_fidl_bss!($protection_type $(, $bss_key: $bss_value)*); let bss = $crate::bss::BssDescription::from_fidl(fidl_bss) .expect("expect BSS conversion to succeed"); bss }} } #[cfg(tests)] mod tests { use super::*; #[test] fn test_fake_bss_macro_ies() { let bss = fake_bss!(Wpa1Wpa2, ssid: b"fuchsia".to_vec(), rates: vec![11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24], ies_overrides: IesOverrides::new() .remove(IeType::new_vendor([0x00, 0x0b, 0x86, 0x01, 0x04, 0x08])) .set(IeType::DSSS_PARAM_SET, &[136]), ); #[rustfmt::skip] let expected_ies = vec![ 0x00, 0x07, b'f', b'u', b'c', b'h', b's', b'i', b'a', 0x01, 0x08, 11, 12, 13, 14, 15, 16, 17, 18, 0x03, 0x01, 136, 0x05, 0x04, 0x00, 0x01, 0x00, 0x02, 0x07, 0x10, 0x55, 0x53, 0x20, 0x24, 0x04, 0x24, 0x34, 0x04, 0x1e, 0x64, 0x0c, 0x1e, 0x95, 0x05, 0x24, 0x00, 0x20, 0x01, 0x00, 0x23, 0x02, 0x09, 0x00, 0x2d, 0x1a, 0xef, 0x09, 0x17, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x30, 18, 1, 0, 0x00, 0x0F, 0xAC, 4, 1, 0, 0x00, 0x0F, 0xAC, 4, 1, 0, 0x00, 0x0F, 0xAC, 2, 0x32, 0x06, 19, 20, 21, 22, 23, 24, 0x3d, 0x16, 0x8c, 0x0d, 0x16, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x7f, 0x08, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x40, 0xbf, 0x0c, 0x91, 0x59, 0x82, 0x0f, 0xea, 0xff, 0x00, 0x00, 0xea, 0xff, 0x00, 0x00, 0xc0, 0x05, 0x00, 0x00, 0x00, 0x00, 0x00, 0xc3, 0x03, 0x01, 0x24, 0x24, 0xdd, 0x16, 0x00, 0x50, 0xf2, 0x01, 0x01, 0x00, 0x00, 0x50, 0xf2, 0x02, 0x01, 0x00, 0x00, 0x50, 0xf2, 0x02, 0x01, 0x00, 0x00, 0x50, 0xf2, 0x02, 0xdd, 0x18, 0x00, 0x50, 0xf2, 0x02, 0x01, 0x01, 0x80, 0x00, 0x03, 0xa4, 0x00, 0x00, 0x27, 0xa4, 0x00, 0x00, 0x42, 0x43, 0x5e, 0x00, 0x62, 0x32, 0x2f, 0x00, ]; assert_eq!(bss.ies, expected_ies); } }
use { crate::{ ie::{self, IeType}, mac, test_utils::fake_frames::{ fake_eap_rsne, fake_wpa1_ie, fake_wpa2_enterprise_rsne, fake_wpa2_legacy_rsne, fake_wpa2_mixed_rsne, fake_wpa2_rsne, fake_wpa2_wpa3_rsne, fake_wpa3_enterprise_192_bit_rsne, fake_wpa3_rsne, fake_wpa3_transition_rsne, }, }, anyhow::Context, fidl_fuchsia_wlan_common as fidl_common, fidl_fuchsia_wlan_internal as fidl_internal, }; #[rustfmt::skip] const DEFAULT_MOCK_IES: &'static [u8] = &[ 0x03, 0x01, 0x8c, 0x05, 0x04, 0x00, 0x01, 0x00, 0x02, 0x07, 0x10, 0x55, 0x53, 0x20, 0x24, 0x04, 0x24, 0x34, 0x04, 0x1e, 0x64, 0x0c, 0x1e, 0x95, 0x05, 0x24, 0x00, 0x20, 0x01, 0x00, 0x23, 0x02, 0x09, 0x00, 0x2d, 0x1a, 0xef, 0x09, 0x17, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x3d, 0x16, 0x8c, 0x0d, 0x16, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x7f, 0x08, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x40, 0xbf, 0x0c, 0x91, 0x59, 0x82, 0x0f, 0xea, 0xff, 0x00, 0x00, 0xea, 0xff, 0x00, 0x00, 0xc0, 0x05, 0x00, 0x00, 0x00, 0x00, 0x00, 0xc3, 0x03, 0x01, 0x24, 0x24, 0xdd, 0x07, 0x00, 0x0b, 0x86, 0x01, 0x04, 0x08, 0x09, 0xdd, 0x18, 0x00, 0x50, 0xf2, 0x02, 0x01, 0x01, 0x80, 0x00, 0x03, 0xa4, 0x00, 0x00, 0x27, 0xa4, 0x00, 0x00, 0x42, 0x43, 0x5e, 0x00, 0x62, 0x32, 0x2f, 0x00, ]; pub struct BssCreator { pub bssid: [u8; 6], pub bss_type: fidl_internal::BssTypes, pub beacon_period: u16, pub timestamp: u64, pub local_time: u64, pub cap: u16, pub chan: fidl_fuchsia_wlan_common::WlanChan, pub rssi_dbm: i8, pub snr_db: i8, pub protection_cfg: FakeProtectionCfg, pub ssid: Vec<u8>, pub rates: Vec<u8>, pub ies_overrides: IesOverrides, } impl BssCreator { pub fn create_bss(self) -> Result<fidl_internal::BssDescription, anyhow::Error> { let mut ies_updater = ie::IesUpdater::new(DEFAULT_MOCK_IES.to_vec()); ies_updater.set(IeType::SSID, &self.ssid[..]).context("set SSID")?; let rates_writer = ie::RatesWriter::try_new(&self.rates[..]).context("set rates")?; let mut rates_buf = vec![]; rates_writer.write_supported_rates(&mut rates_buf); ies_updater.set_raw(&rates_buf[..]).context("set rates")?; let mut ext_rates_buf = vec![]; rates_writer.write_ext_supported_rates(&mut ext_rates_buf); ies_updater.set_raw(&ext_rates_buf[..]).context("set extended rates")?; if let Some(rsne) = derive_rsne(self.protection_cfg) { ies_updater.set_raw(&rsne[..]).context("set RSNE")?; } if let Some(wpa1_vendor_ie) = derive_wpa1_vendor_ies(self.protection_cfg) { ies_updater.set_raw(&wpa1_vendor_ie[..]).context("set WPA1 vendor IE")?; } for ovr in self.ies_overrides.overrides { match ovr { IeOverride::Remove(ie_type) => ies_updater.remove(&ie_type), IeOverride::Set(ie_type, bytes) => { ies_updater .set(ie_type, &bytes[..]) .with_context(|| format!("set IE type: {:?}", ie_type))?; } } } Ok(fidl_internal::BssDescription { bssid: self.bssid, bss_type: self.bss_type, beacon_period: self.beacon_period, timestamp: self.timestamp, local_time: self.local_time, cap: self.cap, chan: self.chan, rssi_dbm: self.rssi_dbm, snr_db: self.snr_db, ies: ies_updater.finalize(), }) } } pub struct IesOverrides { overrides: Vec<IeOverride>, } impl IesOverrides { pub fn new() -> Self { Self { overrides: vec![] } } pub fn remove(mut self, ie_type: IeType) -> Self { self.overrides.push(IeOverride::Remove(ie_type)); self } pub fn set(mut self, ie_type: IeType, bytes: Vec<u8>) -> Self { self.overrides.push(IeOverride::Set(ie_type, bytes)); self } } enum IeOverride { Remove(IeType), Set(IeType, Vec<u8>), } #[derive(Debug, Copy, Clone, PartialEq)] pub enum FakeProtectionCfg { Open, Wep, Wpa1, Wpa1Enhanced, Wpa2Legacy, Wpa1Wpa2, Wpa2Mixed, Wpa2Enterprise, Wpa2, Wpa2Wpa3, Wpa3Transition, Wpa3, Wpa3Enterprise, Wpa2NoPrivacy, Eap, } pub fn build_fake_bss_creator__(protection_cfg: FakeProtectionCfg) -> BssCreator { BssCreator { bssid: [7, 1, 2, 77, 53, 8], bss_type: fidl_internal::BssTypes::Infrastructure, beacon_period: 100, timestamp: 0, local_time: 0, chan: fidl_common::WlanChan { primary: 3, secondary80: 0, cbw: fidl_common::Cbw::Cbw40 }, rssi_dbm: 0, snr_db: 0, cap: mac::CapabilityInfo(0) .with_privacy(match protection_cfg { FakeProtectionCfg::Open | FakeProtectionCfg::Wpa2NoPrivacy => false, _ => true, }) .0, protection_cfg, ssid: b"fake-ssid".to_vec(), rates: vec![0x82, 0x84, 0x8b, 0x96, 0x0c, 0x12, 0x18, 0x24, 0x30, 0x48, 0x60, 0x6c], ies_overrides: IesOverrides::new(), } } fn derive_rsne(protection_cfg: FakeProtectionCfg) -> Option<Vec<u8>> { match protection_cfg { FakeProtectionCfg::Wpa3Enterprise => Some(fake_wpa3_enterprise_192_bit_rsne()), FakeProtectionCfg::Wpa2Enterprise => Some(fake_wpa2_enterprise_rsne()), FakeProtectionCfg::Wpa3 => Some(fake_wpa3_rsne()), FakeProtectionCfg::Wpa3Transition => Some(fake_wpa3_transition_rsne()), FakeProtectionCfg::Wpa2Wpa3 => Some(fake_wpa2_wpa3_rsne()), FakeProtectionCfg::Wpa2Mixed => Some(fake_wpa2_mixed_rsne()), FakeProtectionCfg::Wpa2Legacy => Some(fake_wpa2_legacy_rsne()), FakeProtectionCfg::Wpa1Wpa2 | FakeProtectionCfg::Wpa2 | FakeProtectionCfg::Wpa2NoPrivacy => Some(fake_wpa2_rsne()), FakeProtectionCfg::Eap => Some(fake_eap_rsne()), _ => None, } } fn derive_wpa1_vendor_ies(protection_cfg: FakeProtectionCfg) -> Option<Vec<u8>> { match protection_cfg { FakeProtectionCfg::Wpa1 | FakeProtectionCfg::Wpa1Wpa2 => Some(fake_wpa1_ie(false)), FakeProtectionCfg::Wpa1Enhanced => Some(fake_wpa1_ie(true)), _ => None, } } #[macro_export] macro_rules! fake_fidl_bss { ($protection_type:ident $(, $bss_key:ident: $bss_value:expr)* $(,)?) => {{ let bss_creator = $crate::test_utils::fake_stas::BssCreator { $( $bss_key: $bss_value, )* ..$crate::test_utils::fake_stas::build_fake_bss_creator__($crate::test_utils::fake_stas::FakeProtectionCfg::$protection_type) }; let fidl_bss = bss_creator.create_bss().expect("expect creating BSS to succeed"); fidl_bss }} } #[macro_export] macro_rules! fake_bss { ($protection_type
0x16, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x7f, 0x08, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x40, 0xbf, 0x0c, 0x91, 0x59, 0x82, 0x0f, 0xea, 0xff, 0x00, 0x00, 0xea, 0xff, 0x00, 0x00, 0xc0, 0x05, 0x00, 0x00, 0x00, 0x00, 0x00, 0xc3, 0x03, 0x01, 0x24, 0x24, 0xdd, 0x16, 0x00, 0x50, 0xf2, 0x01, 0x01, 0x00, 0x00, 0x50, 0xf2, 0x02, 0x01, 0x00, 0x00, 0x50, 0xf2, 0x02, 0x01, 0x00, 0x00, 0x50, 0xf2, 0x02, 0xdd, 0x18, 0x00, 0x50, 0xf2, 0x02, 0x01, 0x01, 0x80, 0x00, 0x03, 0xa4, 0x00, 0x00, 0x27, 0xa4, 0x00, 0x00, 0x42, 0x43, 0x5e, 0x00, 0x62, 0x32, 0x2f, 0x00, ]; assert_eq!(bss.ies, expected_ies); } }
:ident $(, $bss_key:ident: $bss_value:expr)* $(,)?) => {{ let fidl_bss = $crate::fake_fidl_bss!($protection_type $(, $bss_key: $bss_value)*); let bss = $crate::bss::BssDescription::from_fidl(fidl_bss) .expect("expect BSS conversion to succeed"); bss }} } #[cfg(tests)] mod tests { use super::*; #[test] fn test_fake_bss_macro_ies() { let bss = fake_bss!(Wpa1Wpa2, ssid: b"fuchsia".to_vec(), rates: vec![11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24], ies_overrides: IesOverrides::new() .remove(IeType::new_vendor([0x00, 0x0b, 0x86, 0x01, 0x04, 0x08])) .set(IeType::DSSS_PARAM_SET, &[136]), ); #[rustfmt::skip] let expected_ies = vec![ 0x00, 0x07, b'f', b'u', b'c', b'h', b's', b'i', b'a', 0x01, 0x08, 11, 12, 13, 14, 15, 16, 17, 18, 0x03, 0x01, 136, 0x05, 0x04, 0x00, 0x01, 0x00, 0x02, 0x07, 0x10, 0x55, 0x53, 0x20, 0x24, 0x04, 0x24, 0x34, 0x04, 0x1e, 0x64, 0x0c, 0x1e, 0x95, 0x05, 0x24, 0x00, 0x20, 0x01, 0x00, 0x23, 0x02, 0x09, 0x00, 0x2d, 0x1a, 0xef, 0x09, 0x17, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x30, 18, 1, 0, 0x00, 0x0F, 0xAC, 4, 1, 0, 0x00, 0x0F, 0xAC, 4, 1, 0, 0x00, 0x0F, 0xAC, 2, 0x32, 0x06, 19, 20, 21, 22, 23, 24, 0x3d, 0x16, 0x8c, 0x0d,
random
[]
Rust
src/port.rs
carlosmn/gphoto-rs
faf9dbb972c5d66f77d1265cd959314e6283fae1
use std::borrow::Cow; use std::ffi::{CStr, CString}; use std::marker::PhantomData; use std::mem; use ::libc::c_void; #[derive(Debug,PartialEq,Eq,Clone,Copy,Hash)] pub enum PortType { Serial, USB, Disk, PTPIP, Direct, SCSI, Other, } pub struct Port<'a> { inner: ::gphoto2::GPPortInfo, __phantom: PhantomData<&'a c_void>, } impl<'a> Port<'a> { pub fn port_type(&self) -> PortType { let mut port_type = mem::MaybeUninit::uninit(); unsafe { assert_eq!(::gphoto2::GP_OK, ::gphoto2::gp_port_info_get_type(self.inner, port_type.as_mut_ptr())); } match unsafe { port_type.assume_init() } { ::gphoto2::GP_PORT_SERIAL => PortType::Serial, ::gphoto2::GP_PORT_USB => PortType::USB, ::gphoto2::GP_PORT_DISK => PortType::Disk, ::gphoto2::GP_PORT_PTPIP => PortType::PTPIP, ::gphoto2::GP_PORT_USB_DISK_DIRECT => PortType::Direct, ::gphoto2::GP_PORT_USB_SCSI => PortType::SCSI, ::gphoto2::GP_PORT_NONE | _ => PortType::Other, } } pub fn name(&self) -> Cow<str> { let mut name = mem::MaybeUninit::uninit(); unsafe { assert_eq!(::gphoto2::GP_OK, ::gphoto2::gp_port_info_get_name(self.inner, name.as_mut_ptr())); String::from_utf8_lossy(CStr::from_ptr(name.assume_init()).to_bytes()) } } pub fn path(&self) -> Cow<str> { let mut path = mem::MaybeUninit::uninit(); unsafe { assert_eq!(::gphoto2::GP_OK, ::gphoto2::gp_port_info_get_path(self.inner, path.as_mut_ptr())); String::from_utf8_lossy(CStr::from_ptr(path.assume_init()).to_bytes()) } } #[doc(hidden)] pub fn as_ptr(&self) -> ::gphoto2::GPPortInfo { self.inner } } #[doc(hidden)] pub fn from_libgphoto2(_camera: & ::camera::Camera, ptr: ::gphoto2::GPPortInfo) -> Port { Port { inner: ptr, __phantom: PhantomData, } } #[doc(hidden)] pub fn from_libgphoto2_list(_camera: & PortList, ptr: ::gphoto2::GPPortInfo) -> Port { Port { inner: ptr, __phantom: PhantomData, } } #[repr(transparent)] pub struct PortList(*mut ::gphoto2::GPPortInfoList); impl Drop for PortList { fn drop(&mut self) { unsafe { ::gphoto2::gp_port_info_list_free(self.0); } } } impl PortList { pub fn new() -> ::Result<Self> { let mut list = mem::MaybeUninit::uninit(); try_unsafe!(::gphoto2::gp_port_info_list_new(list.as_mut_ptr())); let list = unsafe { list.assume_init() }; Ok(PortList(list as *mut _)) } pub fn load(&mut self) -> ::Result<()> { try_unsafe!(::gphoto2::gp_port_info_list_load(self.as_mut_ptr())); Ok(()) } pub fn lookup_name(&mut self, name: &str) -> ::Result<usize> { let cname = match CString::new(name) { Ok(s) => s, Err(_) => return Err(::error::from_libgphoto2(::gphoto2::GP_ERROR_BAD_PARAMETERS)), }; let idx = match unsafe { ::gphoto2::gp_port_info_list_lookup_name(self.as_mut_ptr(), cname.as_ptr()) } { idx if idx >= 0 => idx, err => return Err(::error::from_libgphoto2(err)), }; Ok(idx as usize) } pub fn lookup_path(&mut self, path: &str) -> ::Result<usize> { let cpath = match CString::new(path) { Ok(s) => s, Err(_) => return Err(::error::from_libgphoto2(::gphoto2::GP_ERROR_BAD_PARAMETERS)), }; let idx = match unsafe { ::gphoto2::gp_port_info_list_lookup_path(self.as_mut_ptr(), cpath.as_ptr()) } { idx if idx >= 0 => idx, err => return Err(::error::from_libgphoto2(err)), }; Ok(idx as usize) } pub fn get(&mut self, i: usize) -> ::Result<Port> { let i = i as libc::c_int; let mut cport = mem::MaybeUninit::uninit(); try_unsafe! { ::gphoto2::gp_port_info_list_get_info(self.0, i, cport.as_mut_ptr()) }; let cport = unsafe { cport.assume_init() }; Ok(from_libgphoto2_list(self, cport)) } fn as_mut_ptr(&mut self) -> *mut ::gphoto2::GPPortInfoList { self.0 } pub fn len(&mut self) -> usize { let l = unsafe { ::gphoto2::gp_port_info_list_count(self.0) }; if l < 0 { panic!(); } l as usize } }
use std::borrow::Cow; use std::ffi::{CStr, CString}; use std::marker::PhantomData; use std::mem; use ::libc::c_void; #[derive(Debug,PartialEq,Eq,Clone,Copy,Hash)] pub enum PortType { Serial, USB, Disk, PTPIP, Direct, SCSI, Other, } pub struct Port<'a> { inner: ::gphoto2::GPPortInfo, __phantom: PhantomData<&'a c_void>, } impl<'a> Port<'a> { pub fn port_type(&self) -> PortType { let mut port_type = mem::MaybeUninit::uninit(); unsafe { assert_eq!(::gphoto2::GP_OK, ::gphoto2::gp_port_info_get_type(self.inner, port_type.as_mut_ptr())); } match unsafe { port_type.assume_init() } { ::gphoto2::GP_PORT_SERIAL => PortType::Serial, ::gphoto2::GP_PORT_USB => PortType::USB, ::gphoto2::GP_PORT_DISK => PortType::Disk, ::gphoto2::GP_PORT_PTPIP => PortType::PTPIP, ::gphoto2::GP_PORT_USB_DISK_DIRECT => PortType::Direct, ::gphoto2::GP_PORT_USB_SCSI => PortType::SCSI, ::gphoto2::GP_PORT_NONE | _ => PortType::Other, } } pub fn name(&self) -> Cow<str> { let mut name = mem::MaybeUninit::uninit(); unsafe { assert_eq!(::gphoto2::GP_OK, ::gphoto2::gp_port_info_get_name(self.inner, name.as_mut_ptr())); String::from_utf8_lossy(CStr::from_ptr(name.assume_init()).to_bytes()) } } pub fn path(&self) -> Cow<str> { let mut path = mem::MaybeUninit::uninit(); unsafe { assert_eq!(::gphoto2::GP_OK, ::gphoto2::gp_port_info_get_path(self.inner, path.as_mut_ptr())); String::from_utf8_lossy(CStr::from_ptr(path.assume_init()).to_bytes()) } } #[doc(hidden)] pub fn as_ptr(&self) -> ::gphoto2::GPPortInfo { self.inner } } #[doc(hidden)] pub fn from_libgphoto2(_camera: & ::camera::Camera, ptr: ::gphoto2::GPPortInfo) -> Port { Port { inner: ptr, __phantom: PhantomData, } } #[doc(hidden)] pub fn from_libgphoto2_list(_camera: & PortList, ptr: ::gphoto2::GPPortInfo) -> Port { Port { inner: ptr, __phantom: PhantomData, } } #[repr(transparent)] pub struct PortList(*mut ::gphoto2::GPPortInfoList); impl Drop for PortList { fn drop(&mut self) { unsafe { ::gphoto2::gp_port_info_list_free(self.0); } } } impl PortList { pub fn new() -> ::Result<Self> { let mut list = mem::MaybeUninit::uninit(); try_unsafe!(::gphoto2::gp_port_info_list_new(list.as_mut_ptr())); let list = unsafe { list.assume_init() }; Ok(PortList(list as *mut _)) } pub fn load(&mut self) -> ::Result<()> { try_unsafe!(::gphoto2::gp_port_info_list_load(self.as_mut_ptr())); Ok(()) } pub fn lookup_name(&mut self, name: &str) -> ::Result<usize> { let cname = match CString::new(name) { Ok(s) => s, Err(_) => return Err(::error::from_libgphoto2(::gphoto2::GP_ERROR_BAD_PARAMETERS)), }; let idx =
; Ok(idx as usize) } pub fn lookup_path(&mut self, path: &str) -> ::Result<usize> { let cpath = match CString::new(path) { Ok(s) => s, Err(_) => return Err(::error::from_libgphoto2(::gphoto2::GP_ERROR_BAD_PARAMETERS)), }; let idx = match unsafe { ::gphoto2::gp_port_info_list_lookup_path(self.as_mut_ptr(), cpath.as_ptr()) } { idx if idx >= 0 => idx, err => return Err(::error::from_libgphoto2(err)), }; Ok(idx as usize) } pub fn get(&mut self, i: usize) -> ::Result<Port> { let i = i as libc::c_int; let mut cport = mem::MaybeUninit::uninit(); try_unsafe! { ::gphoto2::gp_port_info_list_get_info(self.0, i, cport.as_mut_ptr()) }; let cport = unsafe { cport.assume_init() }; Ok(from_libgphoto2_list(self, cport)) } fn as_mut_ptr(&mut self) -> *mut ::gphoto2::GPPortInfoList { self.0 } pub fn len(&mut self) -> usize { let l = unsafe { ::gphoto2::gp_port_info_list_count(self.0) }; if l < 0 { panic!(); } l as usize } }
match unsafe { ::gphoto2::gp_port_info_list_lookup_name(self.as_mut_ptr(), cname.as_ptr()) } { idx if idx >= 0 => idx, err => return Err(::error::from_libgphoto2(err)), }
if_condition
[ { "content": "/// Returns a structure with the version of the `libgphoto2` C library.\n\npub fn libgphoto2_version() -> LibraryVersion {\n\n LibraryVersion::new()\n\n}\n", "file_path": "src/version.rs", "rank": 2, "score": 62897.67397201661 }, { "content": "fn main() {\n\n let mut cont...
Rust
src/cai/uciv.rs
provotum/rust-crypto
3f284c3e6f3e3412a44d3cea11cd6487862c0d9f
use std::vec::Vec; use num::pow::Pow; use num::Zero; use std::ops::{Mul, Div, Sub, Add, Neg}; use ::arithmetic::mod_int::From; use ::arithmetic::mod_int::RandModInt; use ::arithmetic::mod_int::ModInt; use ::el_gamal::encryption::PublicKey; use ::el_gamal::ciphertext::CipherText; use ::el_gamal::serializer::Serializer; #[derive(Clone, Serialize, Deserialize)] pub struct PreImageSet { pub pre_images: Vec<ModInt> } #[derive(Clone, Serialize, Deserialize)] pub struct ImageSet { pub images: Vec<ModInt> } impl ImageSet { pub fn new(generator: ModInt, pre_image_set: PreImageSet) -> Self { let mut vec = vec![]; for pre_image in pre_image_set.pre_images.iter() { vec.push(generator.clone().pow(pre_image.clone())); } ImageSet { images: vec } } } #[derive(Eq, PartialEq, Debug, Clone, Hash, Serialize, Deserialize)] pub struct CaiProof { s1_options: Vec<ModInt>, s2_options: Vec<ModInt>, h1_options: Vec<ModInt>, h2_options: Vec<ModInt>, h: ModInt } impl CaiProof { pub fn new(public_key: PublicKey, cipher_text: CipherText, pre_image_set: PreImageSet, image_set: ImageSet, chosen_vote_idx: usize, voting_options: Vec<ModInt>) -> Self { assert_eq!(pre_image_set.pre_images.len(), image_set.images.len(), "The amount of pre-images and images must be equal"); assert_eq!(pre_image_set.pre_images.len(), voting_options.len(), "The amount of pre-images must be equal to the amount of voting options"); assert!(chosen_vote_idx < pre_image_set.pre_images.len(), "The chosen vote index must refer to a voting option for which a pre-image exists"); let mut s1_options: Vec<ModInt> = vec![ModInt::zero(); pre_image_set.pre_images.len()]; let mut s2_options: Vec<ModInt> = vec![ModInt::zero(); pre_image_set.pre_images.len()]; let mut h1_options: Vec<ModInt> = vec![ModInt::zero(); pre_image_set.pre_images.len()]; let mut h2_options: Vec<ModInt> = vec![ModInt::zero(); pre_image_set.pre_images.len()]; let mut a_options: Vec<ModInt> = vec![ModInt::zero(); pre_image_set.pre_images.len()]; let mut b_options: Vec<ModInt> = vec![ModInt::zero(); pre_image_set.pre_images.len()]; let j = chosen_vote_idx; let c1 = cipher_text.big_g; let c2 = cipher_text.big_h; let mut string_to_hash = String::new(); string_to_hash += &c1.to_string(); string_to_hash += &c2.to_string(); for i in 0..pre_image_set.pre_images.len() { if i != j { let s1_i = ModInt::gen_modint(public_key.q.clone()); let h1_i = ModInt::gen_modint(public_key.q.clone()); s1_options[i] = s1_i.clone(); h1_options[i] = h1_i.clone(); let c1_i = public_key.g.clone().pow(s1_i.clone()).mul(c1.clone().pow(h1_i.clone().neg())); let c2_i = public_key.h.clone().pow(s1_i.clone()).mul((c2.clone().div(public_key.g.clone().pow(voting_options[i].clone()))).pow(h1_i.clone().neg())); let a_i = ModInt::gen_modint(public_key.q.clone()); a_options[i] = a_i.clone(); let r_i = public_key.g.clone().pow(a_i); string_to_hash += &c1_i.to_string(); string_to_hash += &c2_i.to_string(); string_to_hash += &r_i.to_string(); } else { let s2_j = ModInt::gen_modint(public_key.q.clone()); let h2_j = ModInt::gen_modint(public_key.q.clone()); s2_options[j] = s2_j.clone(); h2_options[j] = h2_j.clone(); let b_j = ModInt::gen_modint(public_key.q.clone()); b_options[j] = b_j.clone(); let c1_j = public_key.g.clone().pow(b_j.clone()); let c2_j = public_key.h.clone().pow(b_j.clone()); let r_j = public_key.g.clone().pow(s2_j).mul(image_set.images[j].clone().pow(h2_j.clone().neg())); string_to_hash += &c1_j.to_string(); string_to_hash += &c2_j.to_string(); string_to_hash += &r_j.to_string(); } } let h_hash = Serializer::string_to_sha512(string_to_hash); let h = ModInt::from_hex_string(h_hash, public_key.q.value.clone()); for i in 0..pre_image_set.pre_images.len() { if i != j { let h2_i = h.clone().sub(h1_options[i].clone()); h2_options[i] = h2_i.clone(); let s2_i = a_options[i].clone().add(pre_image_set.pre_images[i].clone().mul(h2_i.clone())); s2_options[i] = s2_i; } else { let h1_j = h.clone().sub(h2_options[j].clone()); h1_options[j] = h1_j.clone(); let s1_j = b_options[j].clone().add(cipher_text.random.clone().mul(h1_j.clone())); s1_options[j] = s1_j.clone(); } } CaiProof { s1_options, s2_options, h1_options, h2_options, h } } pub fn verify(&self, public_key: PublicKey, cipher_text: CipherText, image_set: ImageSet, voting_options: Vec<ModInt>) -> bool { let c1 = cipher_text.big_g; let c2 = cipher_text.big_h; let mut string_to_hash = String::new(); string_to_hash += &c1.to_string(); string_to_hash += &c2.to_string(); for i in 0..self.s1_options.len() { let c1_i = public_key.g.clone().pow(self.s1_options[i].clone()).mul(c1.clone().pow(self.h1_options[i].clone().neg())); let c2_i = public_key.h.clone().pow(self.s1_options[i].clone()).mul((c2.clone().div(public_key.g.clone().pow(voting_options[i].clone()))).pow(self.h1_options[i].clone().neg())); let r_i = public_key.g.clone().pow(self.s2_options[i].clone()).mul(image_set.images[i].clone().pow(self.h2_options[i].clone().neg())); string_to_hash += &c1_i.to_string(); string_to_hash += &c2_i.to_string(); string_to_hash += &r_i.to_string(); } let h_hash = Serializer::string_to_sha512(string_to_hash); let h = ModInt::from_hex_string(h_hash, public_key.q.value.clone()); self.h == h } } #[cfg(test)] mod uciv_proof_test { use ::el_gamal::encryption::PublicKey; use ::el_gamal::encryption::{encrypt}; use ::el_gamal::ciphertext::CipherText; use ::arithmetic::mod_int::ModInt; use arithmetic::mod_int::From; use ::num::bigint::BigInt; use ::num::Zero; use ::num::One; use std::vec::Vec; use std::clone::Clone; use ::cai::uciv::{CaiProof, ImageSet, PreImageSet}; #[test] pub fn test_valid_proof() { let pub_key: PublicKey = PublicKey { p: ModInt::from_value_modulus(BigInt::from(5), BigInt::zero()), q: ModInt::from_value_modulus(BigInt::from(2), BigInt::zero()), h: ModInt::from_value_modulus(BigInt::from(32), BigInt::from(5)), g: ModInt::from_value_modulus(BigInt::from(2), BigInt::from(5)) }; let mut voting_options = Vec::new(); voting_options.push(ModInt::zero()); voting_options.push(ModInt::one()); let message: ModInt = ModInt { value: BigInt::one(), modulus: BigInt::from(5) }; let cipher_text = encrypt(&pub_key, message.clone()); let chosen_vote_idx = 1; let pre_image_set = PreImageSet { pre_images: vec![ ModInt::from_value_modulus(BigInt::from(1), BigInt::from(5)), ModInt::from_value_modulus(BigInt::from(0), BigInt::from(5)) ] }; let image_set = ImageSet::new(pub_key.g.clone(), pre_image_set.clone()); let proof = CaiProof::new( pub_key.clone(), cipher_text.clone(), pre_image_set.clone(), image_set.clone(), chosen_vote_idx, voting_options.clone() ); let is_proven = proof.verify( pub_key.clone(), cipher_text.clone(), image_set.clone(), voting_options.clone() ); assert!(is_proven); } #[test] pub fn test_invalid_proof() { let pub_key: PublicKey = PublicKey { p: ModInt::from_value_modulus(BigInt::from(5), BigInt::zero()), q: ModInt::from_value_modulus(BigInt::from(2), BigInt::zero()), h: ModInt::from_value_modulus(BigInt::from(32), BigInt::from(5)), g: ModInt::from_value_modulus(BigInt::from(2), BigInt::from(5)) }; let mut voting_options = Vec::new(); voting_options.push(ModInt::zero()); voting_options.push(ModInt::one()); let message: ModInt = ModInt { value: BigInt::one(), modulus: BigInt::from(5) }; let cipher_text = encrypt(&pub_key, message.clone()); let chosen_vote_idx = 1; let pre_image_set = PreImageSet { pre_images: vec![ ModInt::from_value_modulus(BigInt::from(1), BigInt::from(5)), ModInt::from_value_modulus(BigInt::from(0), BigInt::from(5)) ] }; let image_set = ImageSet::new(pub_key.g.clone(), pre_image_set.clone()); let proof = CaiProof::new( pub_key.clone(), cipher_text.clone(), pre_image_set.clone(), image_set.clone(), chosen_vote_idx, voting_options.clone() ); let fake_cipher_text = CipherText { big_g: ModInt::from_value_modulus(BigInt::from(1), BigInt::from(0)), big_h: ModInt::from_value_modulus(BigInt::from(2), BigInt::from(0)), random: ModInt::from_value_modulus(BigInt::from(3), BigInt::from(0)) }; let is_proven = proof.verify( pub_key.clone(), fake_cipher_text.clone(), image_set.clone(), voting_options.clone() ); assert!(!is_proven); } }
use std::vec::Vec; use num::pow::Pow; use num::Zero; use std::ops::{Mul, Div, Sub, Add, Neg}; use ::arithmetic::mod_int::From; use ::arithmetic::mod_int::RandModInt; use ::arithmetic::mod_int::ModInt; use ::el_gamal::encryption::PublicKey; use ::el_gamal::ciphertext::CipherText; use ::el_gamal::serializer::Serializer; #[derive(Clone, Serialize, Deserialize)] pub struct PreImageSet { pub pre_images: Vec<ModInt> } #[derive(Clone, Serialize, Deserialize)] pub struct ImageSet { pub images: Vec<ModInt> } impl ImageSet { pub fn new(generator: ModInt, pre_image_set: PreImageSet) -> Self { let mut vec = vec![]; for pre_image in pre_image_set.pre_images.iter() { vec.push(generator.clone().pow(pre_image.clone())); } ImageSet { images: vec } } } #[derive(Eq, PartialEq, Debug, Clone, Hash, Serialize, Deserialize)] pub struct CaiProof { s1_options: Vec<ModInt>, s2_options: Vec<ModInt>, h1_options: Vec<ModInt>, h2_options: Vec<ModInt>, h: ModInt } impl CaiProof { pub fn new(public_key: PublicKey, cipher_text: CipherText, pre_image_set: PreImageSet, image_set: ImageSet, chosen_vote_idx: usize, voting_options: Vec<ModInt>) -> Self { assert_eq!(pre_image_set.pre_images.len(), image_set.images.len(), "The amount of pre-images and images must be equal"); assert_eq!(pre_image_set.pre_images.len(), voting_options.len(), "The amount of pre-images must be equal to the amount of voting options"); assert!(chosen_vote_idx < pre_image_set.pre_images.len(), "The chosen vote index must refer to a voting option for which a pre-image exists"); let mut s1_options: Vec<ModInt> = vec![ModInt::zero(); pre_image_set.pre_images.len()]; let mut s2_options: Vec<ModInt> = vec![ModInt::zero(); pre_image_set.pre_images.len()]; let mut h1_options: Vec<ModInt> = vec![ModInt::zero(); pre_image_set.pre_images.len()]; let mut h2_options: Vec<ModInt> = vec![ModInt::zero(); pre_image_set.pre_images.len()]; let mut a_options: Vec<ModInt> = vec![ModInt::zero(); pre_image_set.pre_images.len()]; let mut b_options: Vec<ModInt> = vec![ModInt::zero(); pre_image_set.pre_images.len()]; let j = chosen_vote_idx; let c1 = cipher_text.big_g; let c2 = cipher_text.big_h; let mut string_to_hash = String::new(); string_to_hash += &c1.to_string(); string_to_hash += &c2.to_string(); for i in 0..pre_image_set.pre_images.len() { if i != j { let s1_i = ModInt::gen_modint(public_key.q.clone()); let h1_i = ModInt::gen_modint(public_key.q.clone()); s1_options[i] = s1_i.clone(); h1_options[i] = h1_i.clone(); let c1_i = public_key.g.clone().pow(s1_i.clone()).mul(c1.clone().pow(h1_i.clone().neg())); let c2_i = public_key.h.clone().pow(s1_i.clone()).mul((c2.clone().div(public_key.g.clone().pow(voting_options[i].clone()))).pow(h1_i.clone().neg())); let a_i = ModInt::gen_modint(public_key.q.clone()); a_options[i] = a_i.clone(); let r_i = public_key.g.clone().pow(a_i); string_to_hash += &c1_i.to_string(); string_to_hash += &c2_i.to_string(); string_to_hash += &r_i.to_string(); } else { let s2_j = ModInt::gen_modint(public_key.q.clone()); let h2_j = ModInt::gen_modint(public_key.q.clone()); s2_options[j] = s2_j.clone(); h2_options[j] = h2_j.clone(); let b_j = ModInt::gen_modint(public_key.q.clone()); b_options[j] = b_j.clone(); let c1_j = public_key.g.clone().pow(b_j.clone()); let c2_j = public_key.h.clone().pow(b_j.clone()); let r_j = public_key.g.clone().pow(s2_j).mul(image_set.images[j].clone().pow(h2_j.clone().neg())); string_to_hash += &c1_j.to_string(); string_to_hash += &c2_j.to_string(); string_to_hash += &r_j.to_string(); } } let h_hash = Serializer::string_to_sha512(string_to_hash); let h = ModInt::from_hex_string(h_hash, public_key.q.value.clone()); for i in 0..pre_image_set.pre_images.len() { if i != j { let h2_i = h.clone().sub(h1_options[i].clone()); h2_options[i] = h2_i.clone(); let s2_i = a_options[i].clone().add(pre_image_set.pre_images[i].clone().mul(h2_i.clone())); s2_options[i] = s2_i; } else { let h1_j = h.clone().sub(h2_options[j].clone()); h1_options[j] = h1_j.clone(); let s1_j = b_options[j].clone().add(cipher_text.random.clone().mul(h1_j.clone())); s1_options[j] = s1_j.clone(); } } CaiProof { s1_options, s2_options, h1_options, h2_options, h } } pub fn verify(&self, public_key: PublicKey, cipher_text: CipherText, image_set: ImageSet, voting_options: Vec<ModInt>) -> bool { let c1 = cipher_text.big_g; let c2 = cipher_text.big_h; let mut string_to_hash = String::new(); string_to_hash += &c1.to_string(); string_to_hash += &c2.to_string(); for i in 0..self.s1_options.len() { let c1_i = public_key.g.clone().pow(self.s1_options[i].clone()).mul(c1.clone().pow(self.h1_options[i].clone().neg())); let c2_i = public_key.h.clone().pow(self.s1_options[i].clone()).mul((c2.clone().div(public_key.g.clone().pow(voting_options[i].clone()))).pow(self.h1_options[i].clone().neg())); let r_i = public_key.g.clone().pow(self.s2_options[i].clone()).mul(image_set.images[i].clone().pow(self.h2_options[i].clone().neg())); string_to_hash += &c1_i.to_string(); string_to_hash += &c2_i.to_string(); string_to_hash += &r_i.to_string(); } let h_hash = Serializer::string_to_sha512(string_to_hash); let h = ModInt::from_hex_string(h_hash, public_key.q.value.clone()); self.h == h } } #[cfg(test)] mod uciv_proof_test { use ::el_gamal::encryption::PublicKey; use ::el_gamal::encryption::{encrypt}; use ::el_gamal::ciphertext::CipherText; use ::arithmetic::mod_int::ModInt; use arithmetic::mod_int::From; use ::num::bigint::BigInt; use ::num::Zero; use ::num::One; use std::vec::Vec; use std::clone::Clone; use ::cai::uciv::{CaiProof, ImageSet, PreImageSet}; #[test] pub fn test_valid_proof() { let pub_key: PublicKey = PublicKey { p: ModInt::from_value_modulus(BigInt::from(5), BigInt::zero()), q: ModInt::from_value_modulus(BigInt::from(2), BigInt::zero()), h: ModInt::from_value_modulus(BigInt::from(32), BigInt::from(5)), g: ModInt::from_value_modulus(BigInt::from(2), BigInt::from(5)) }; let mut voting_options = Vec::new(); voting_options.push(ModInt::zero()); voting_options.push(ModInt::one()); let message: ModInt = ModInt { value: BigInt::one(), modulus: BigInt::from(5) }; let cipher_text = encrypt(&pub_key, message.clone()); let chosen_vote_idx = 1; let pre_image_set = PreImageSet { pre_images: vec![ ModInt::from_value_modulus(BigInt::from(1), BigInt::from(5)), ModInt::from_value_modulus(BigInt::from(0), BigInt::from(5)) ] }; let image_set = ImageSet::new(pub_key.g.clone(), pre_image_set.clone()); let proof = CaiProof::new( pub_key.clone(), cipher_text.clone(), pre_image_set.clone(), image_set.clone(), chosen_vote_idx, voting_options.clone() ); let is_proven = proof.verify( pub_key.clone(), cipher_text.clone(), image_set.clone(), voting_options.clone() ); assert!(is_proven); } #[test] pub fn test_invalid_proof() { let pub_key: PublicKey = PublicKey { p: ModInt::from_value_modulus(BigInt::from(5), BigInt::zero()), q: ModInt::from_value_modulus(BigInt::from(2), BigInt::zero()), h: ModInt::from_value_modulus(BigInt::from(32), BigInt::from(5)), g: ModInt::from_value_modulus(BigInt::from(2), BigInt::from(5)) }; let m
}
ut voting_options = Vec::new(); voting_options.push(ModInt::zero()); voting_options.push(ModInt::one()); let message: ModInt = ModInt { value: BigInt::one(), modulus: BigInt::from(5) }; let cipher_text = encrypt(&pub_key, message.clone()); let chosen_vote_idx = 1; let pre_image_set = PreImageSet { pre_images: vec![ ModInt::from_value_modulus(BigInt::from(1), BigInt::from(5)), ModInt::from_value_modulus(BigInt::from(0), BigInt::from(5)) ] }; let image_set = ImageSet::new(pub_key.g.clone(), pre_image_set.clone()); let proof = CaiProof::new( pub_key.clone(), cipher_text.clone(), pre_image_set.clone(), image_set.clone(), chosen_vote_idx, voting_options.clone() ); let fake_cipher_text = CipherText { big_g: ModInt::from_value_modulus(BigInt::from(1), BigInt::from(0)), big_h: ModInt::from_value_modulus(BigInt::from(2), BigInt::from(0)), random: ModInt::from_value_modulus(BigInt::from(3), BigInt::from(0)) }; let is_proven = proof.verify( pub_key.clone(), fake_cipher_text.clone(), image_set.clone(), voting_options.clone() ); assert!(!is_proven); }
function_block-function_prefixed
[ { "content": "pub fn encrypt(public_key: &PublicKey, message: ModInt) -> CipherText {\n\n let random: ModInt = ModInt::gen_modint(public_key.q.clone());\n\n\n\n let g = public_key.g.clone();\n\n let h = public_key.h.clone();\n\n\n\n let big_g = g.clone().pow(random.clone());\n\n let big_h1= h.clo...
Rust
third-party/stringprep/tests/nameprep_tests.rs
capyloon/api-daemon
ab4e4b60aa9bb617734c64655c0b8940fff098bc
extern crate stringprep; use stringprep::{Error, nameprep}; fn assert_prohibited_character<T>(result: Result<T, Error>) { assert!(result.is_err()); } fn assert_prohibited_bidirectional_text<T>(result: Result<T, Error>) { assert!(result.is_err()); } #[test] fn test_nameprep() { assert_eq!("安室奈美恵-with-super-monkeys", nameprep("安室奈美恵-with-SUPER-MONKEYS").unwrap()); assert_eq!("미술", nameprep("미술").unwrap()); assert_eq!("ليهمابتكلموشعربي؟", nameprep("ليهمابتكلموشعربي؟").unwrap()); assert_eq!("他们为什么不说中文", nameprep("他们为什么不说中文").unwrap()); assert_eq!("למההםפשוטלאמדבריםעברית", nameprep("למההםפשוטלאמדבריםעברית").unwrap()); assert_eq!("почемужеонинеговорятпорусски", nameprep("почемужеонинеговорятпорусски").unwrap()); assert_eq!("tạisaohọkhôngthểchỉnóitiếngviệt", nameprep("TạisaohọkhôngthểchỉnóitiếngViệt").unwrap()); assert_eq!("ひとつ屋根の下2", nameprep("ひとつ屋根の下2").unwrap()); assert_eq!("pročprostěnemluvíčesky", nameprep("Pročprostěnemluvíčesky").unwrap()); assert_eq!("यहलोगहिन्दीक्योंनहींबोलसकतेहैं", nameprep("यहलोगहिन्दीक्योंनहींबोलसकतेहैं").unwrap()); assert_eq!("ພາສາລາວ", nameprep("ພາສາລາວ").unwrap()); assert_eq!("bonġusaħħa", nameprep("bonġusaħħa").unwrap()); assert_eq!("ελληνικά", nameprep("ελληνικά").unwrap()); } #[test] fn should_map_to_nothing() { let input = "foo\u{00ad}\u{034f}\u{1806}\u{180b}bar\u{200b}\u{2060}baz\u{fe00}\u{fe08}\u{fe0f}\u{feff}"; assert_eq!("foobarbaz", nameprep(input).unwrap()); } #[test] fn should_case_fold_ascii() { assert_eq!("cafe", nameprep("CAFE").unwrap()); } #[test] fn should_case_fold_8bit() { assert_eq!("ss", nameprep("\u{00df}").unwrap()); } #[test] fn should_case_fold_16bit() { assert_eq!("\u{0069}\u{0307}", nameprep("\u{0130}").unwrap()); } #[test] fn should_case_fold_multibyte() { let input = "\u{0143}\u{037a}"; let output = "\u{0144} \u{03b9}"; assert_eq!(output, nameprep(input).unwrap()); } #[test] fn should_case_fold() { let input = "\u{2121}\u{33c6}\u{1d7bb}"; let output = "telc\u{2215}\u{006b}\u{0067}\u{03c3}"; assert_eq!(output, nameprep(input).unwrap()); } #[test] fn should_normalize() { let input = "j\u{030c}\u{00a0}\u{00aa}"; let output = "\u{01f0} a"; assert_eq!(output, nameprep(input).unwrap()); } #[test] fn should_case_fold_and_normalize() { let input = "\u{1fb7}"; let output = "\u{1fb6}\u{03b9}"; assert_eq!(output, nameprep(input).unwrap()); } #[test] fn should_revert_case_fold_and_normalization() { let inputs = ["\u{01f0}", "\u{0390}", "\u{03b0}", "\u{1e96}", "\u{1f56}"]; for input in inputs.iter() { assert_eq!(input.clone(), nameprep(input).unwrap()); } } #[test] fn should_permit_ascii_space() { assert_eq!(" ", nameprep(" ").unwrap()); } #[test] fn should_map_8bit_space() { assert_eq!(" ", nameprep("\u{00a0}").unwrap()); } #[test] fn should_prohibit_multibyte_space() { assert_prohibited_character(nameprep("\u{1680}")); } #[test] fn should_map_multibyte_space1() { assert_eq!(" ", nameprep("\u{2000}").unwrap()); } #[test] fn should_drop_zero_width_space() { assert_eq!("", nameprep("\u{200b}").unwrap()); } #[test] fn should_map_multibyte_space2() { assert_eq!(" ", nameprep("\u{3000}").unwrap()); } #[test] fn should_permit_ascii_control() { assert_eq!("\u{0010}\u{007f}", nameprep("\u{0010}\u{007f}").unwrap()); } #[test] fn should_prohibit_8bit_control() { assert_prohibited_character(nameprep("\u{0085}")); } #[test] fn should_prohibit_multibyte_control() { assert_prohibited_character(nameprep("\u{180e}")); } #[test] fn should_drop_zero_width_no_break_space() { assert_eq!("", nameprep("\u{feff}").unwrap()); } #[test] fn should_prohibit_non_ascii_control() { assert_prohibited_character(nameprep("\u{1d175}")); } #[test] fn should_prohibit_plane0_private_use() { assert_prohibited_character(nameprep("\u{f123}")); } #[test] fn should_prohibit_plane15_private_use() { assert_prohibited_character(nameprep("\u{f1234}")); } #[test] fn should_prohibit_plane16_private_use() { assert_prohibited_character(nameprep("\u{10f234}")); } #[test] fn should_prohibit_non_character1() { assert_prohibited_character(nameprep("\u{8fffe}")); } #[test] fn should_prohibit_non_character2() { assert_prohibited_character(nameprep("\u{10ffff}")); } #[test] fn should_prohibit_non_plain_text() { assert_prohibited_character(nameprep("\u{fffd}")); } #[test] fn should_prohibit_ideographic_description() { assert_prohibited_character(nameprep("\u{2ff5}")); } #[test] fn should_normalize_display_property() { assert_eq!("\u{0301}", nameprep("\u{0341}").unwrap()); } #[test] fn should_prohibit_left_to_right_mark() { assert_prohibited_character(nameprep("\u{200e}")); } #[test] fn should_prohibit_deprecated() { assert_prohibited_character(nameprep("\u{202a}")); } #[test] fn should_prohibit_language_tagging1() { assert_prohibited_character(nameprep("\u{e0001}")); } #[test] fn should_prohibit_language_tagging2() { assert_prohibited_character(nameprep("\u{e0042}")); } #[test] fn should_prohibit_randalcat_with_lcat1() { assert_prohibited_bidirectional_text(nameprep("foo\u{05be}bar")); } #[test] fn should_prohibit_randalcat_with_lcat2() { assert_prohibited_bidirectional_text(nameprep("foo\u{fd50}bar")); } #[test] fn should_permit_randalcat1() { assert_eq!("foo \u{064e}bar", nameprep("foo\u{fe76}bar").unwrap()); } #[test] fn should_prohibit_mixed_randalcat() { assert_prohibited_bidirectional_text(nameprep("\u{0672}\u{0031}")); } #[test] fn should_permit_randalcat2() { assert_eq!("\u{0627}\u{0031}\u{0628}", nameprep("\u{0627}\u{0031}\u{0628}").unwrap()); } #[test] fn should_prohibit_unassigned_code_point() { assert_prohibited_character(nameprep("\u{e0002}")); } #[test] fn should_shrink() { let input = "X\u{00ad}\u{00df}\u{0130}\u{2121}j\u{030c}\u{00a0}\u{00aa}\u{03b0}\u{2000}"; let output = "xssi\u{0307}tel\u{01f0} a\u{03b0}\u{0020}"; assert_eq!(output, nameprep(input).unwrap()); } #[test] fn should_expand() { let input = "X\u{00df}\u{3316}\u{0130}\u{2121}\u{249f}\u{3300}"; let output = "xss\u{30ad}\u{30ed}\u{30e1}\u{30fc}\u{30c8}\u{30eb}\u{0069}\u{0307}\u{0074}\u{0065}\u{006c}\u{0028}\u{0064}\u{0029}\u{30a2}\u{30d1}\u{30fc}\u{30c8}"; assert_eq!(output, nameprep(input).unwrap()); }
extern crate stringprep; use stringprep::{Error, nameprep}; fn assert_prohibited_character<T>(result: Result<T, Error>) { assert!(result.is_err()); } fn assert_prohibited_bidirectional_text<T>(result: Result<T, Error>) { assert!(result.is_err()); } #[test] fn test_nameprep() { assert_eq!("安室奈美恵-with-super-monkeys", nameprep("安室奈美恵-with-SUPER-MONKEYS").unwrap()); assert_eq!("미술", nameprep("미술").unwrap()); assert_eq!("ليهمابتكلموشعربي؟", nameprep("ليهمابتكلموشعربي؟").unwrap()); assert_eq!("他们为什么不说中文", nameprep("他们为什么不说中文").unwrap()); assert_eq!("למההםפשוטלאמדבריםעברית", nameprep("למההםפשוטלאמדבריםעברית").unwrap()); assert_eq!("почемужеонинеговорятпорусски", nameprep("почемужеонинеговорятпорусски").unwrap()); assert_eq!("tạisaohọkhôngthểchỉnóitiếngviệt", nameprep("TạisaohọkhôngthểchỉnóitiếngViệt").unwrap()); assert_eq!("ひとつ屋根の下2", nameprep("ひとつ屋根の下2").unwrap()); assert_eq!("pročprostěnemluvíčesky", nameprep("Pročprostěnemluvíčesky").unwrap()); assert_eq!("यहलोगहिन्दीक्योंनहींबोलसकतेहैं", nameprep("यहलोगहिन्दीक्योंनहींबोलसकतेहैं").unwrap()); assert_eq!("ພາສາລາວ", nameprep("ພາສາລາວ").unwrap()); assert_eq!("bonġusaħħa", nameprep("bonġusaħħa").unwrap()); assert_eq!("ελληνικά", nameprep("ελληνικά").unwrap()); } #[test] fn should_map_to_nothing() { let input = "foo\u{00ad}\u{034f}\u{1806}\u{180b}bar\u{200b}\u{2060}baz\u{fe00}\u{fe08}\u{fe0f}\u{feff}"; assert_eq!("foobarbaz", nameprep(input).unwrap()); } #[test] fn should_case_fold_ascii() { assert_eq!("cafe", nameprep("CAFE").unwrap()); } #[test] fn should_case_fold_8bit() { assert_eq!("ss", nameprep("\u{00df}").unwrap()); } #[test] fn should_case_fold_16bit() { assert_eq!("\u{0069}\u{0307}", nameprep("\u{0130}").unwrap()); } #[test] fn should_case_fold_multibyte() { let input = "\u{0143}\u{037a}"; let output = "\u{0144} \u{03b9}"; assert_eq!(output, nameprep(input).unwrap()); } #[test] fn should_case_fold() { let input = "\u{2121}\u{33c6}\u{1d7bb}"; let output = "telc\u{2215}\u{006b}\u{0067}\u{03c3}"; assert_eq!(output, nameprep(input).unwrap()); } #[test] fn should_normalize() { let input = "j\u{030c}\u{00a0}\u{00aa}"; let output = "\u{01f0} a"; assert_eq!(output, nameprep(input).unwrap()); } #[test] fn should_case_fold_and_normalize() { let input = "\u{1fb7}"; let output = "\u{1fb6}\u{03b9}"; assert_eq!(output, nameprep(input).unwrap()); } #[test]
#[test] fn should_permit_ascii_space() { assert_eq!(" ", nameprep(" ").unwrap()); } #[test] fn should_map_8bit_space() { assert_eq!(" ", nameprep("\u{00a0}").unwrap()); } #[test] fn should_prohibit_multibyte_space() { assert_prohibited_character(nameprep("\u{1680}")); } #[test] fn should_map_multibyte_space1() { assert_eq!(" ", nameprep("\u{2000}").unwrap()); } #[test] fn should_drop_zero_width_space() { assert_eq!("", nameprep("\u{200b}").unwrap()); } #[test] fn should_map_multibyte_space2() { assert_eq!(" ", nameprep("\u{3000}").unwrap()); } #[test] fn should_permit_ascii_control() { assert_eq!("\u{0010}\u{007f}", nameprep("\u{0010}\u{007f}").unwrap()); } #[test] fn should_prohibit_8bit_control() { assert_prohibited_character(nameprep("\u{0085}")); } #[test] fn should_prohibit_multibyte_control() { assert_prohibited_character(nameprep("\u{180e}")); } #[test] fn should_drop_zero_width_no_break_space() { assert_eq!("", nameprep("\u{feff}").unwrap()); } #[test] fn should_prohibit_non_ascii_control() { assert_prohibited_character(nameprep("\u{1d175}")); } #[test] fn should_prohibit_plane0_private_use() { assert_prohibited_character(nameprep("\u{f123}")); } #[test] fn should_prohibit_plane15_private_use() { assert_prohibited_character(nameprep("\u{f1234}")); } #[test] fn should_prohibit_plane16_private_use() { assert_prohibited_character(nameprep("\u{10f234}")); } #[test] fn should_prohibit_non_character1() { assert_prohibited_character(nameprep("\u{8fffe}")); } #[test] fn should_prohibit_non_character2() { assert_prohibited_character(nameprep("\u{10ffff}")); } #[test] fn should_prohibit_non_plain_text() { assert_prohibited_character(nameprep("\u{fffd}")); } #[test] fn should_prohibit_ideographic_description() { assert_prohibited_character(nameprep("\u{2ff5}")); } #[test] fn should_normalize_display_property() { assert_eq!("\u{0301}", nameprep("\u{0341}").unwrap()); } #[test] fn should_prohibit_left_to_right_mark() { assert_prohibited_character(nameprep("\u{200e}")); } #[test] fn should_prohibit_deprecated() { assert_prohibited_character(nameprep("\u{202a}")); } #[test] fn should_prohibit_language_tagging1() { assert_prohibited_character(nameprep("\u{e0001}")); } #[test] fn should_prohibit_language_tagging2() { assert_prohibited_character(nameprep("\u{e0042}")); } #[test] fn should_prohibit_randalcat_with_lcat1() { assert_prohibited_bidirectional_text(nameprep("foo\u{05be}bar")); } #[test] fn should_prohibit_randalcat_with_lcat2() { assert_prohibited_bidirectional_text(nameprep("foo\u{fd50}bar")); } #[test] fn should_permit_randalcat1() { assert_eq!("foo \u{064e}bar", nameprep("foo\u{fe76}bar").unwrap()); } #[test] fn should_prohibit_mixed_randalcat() { assert_prohibited_bidirectional_text(nameprep("\u{0672}\u{0031}")); } #[test] fn should_permit_randalcat2() { assert_eq!("\u{0627}\u{0031}\u{0628}", nameprep("\u{0627}\u{0031}\u{0628}").unwrap()); } #[test] fn should_prohibit_unassigned_code_point() { assert_prohibited_character(nameprep("\u{e0002}")); } #[test] fn should_shrink() { let input = "X\u{00ad}\u{00df}\u{0130}\u{2121}j\u{030c}\u{00a0}\u{00aa}\u{03b0}\u{2000}"; let output = "xssi\u{0307}tel\u{01f0} a\u{03b0}\u{0020}"; assert_eq!(output, nameprep(input).unwrap()); } #[test] fn should_expand() { let input = "X\u{00df}\u{3316}\u{0130}\u{2121}\u{249f}\u{3300}"; let output = "xss\u{30ad}\u{30ed}\u{30e1}\u{30fc}\u{30c8}\u{30eb}\u{0069}\u{0307}\u{0074}\u{0065}\u{006c}\u{0028}\u{0064}\u{0029}\u{30a2}\u{30d1}\u{30fc}\u{30c8}"; assert_eq!(output, nameprep(input).unwrap()); }
fn should_revert_case_fold_and_normalization() { let inputs = ["\u{01f0}", "\u{0390}", "\u{03b0}", "\u{1e96}", "\u{1f56}"]; for input in inputs.iter() { assert_eq!(input.clone(), nameprep(input).unwrap()); } }
function_block-full_function
[]
Rust
src/airport_gates.rs
DarrenTsung/interview-rs
aa195501f35ba8d0d1be1c681de29145a8c3c054
use binary_heap_plus::*; /* At an airport you have a timetable for arrivals and departures. You need to determine the minimum number of gates you'd need to provide so that all the planes can be placed at a gate as per their schedule. The arrival and departure times for each plane are presented in two arrays, sorted by arrival time, and you're told the total number of flights for the day. Assume that no planes remain overnight at the airport; all fly in and back out on the same day. Assume that if a plane departs in the same minute as another plane arrives, the arriving plane takes priority (i.e. you'll still need the gate for the departing plane). Write a function that returns the minimum number of gates needed for the schedules you're given. Example: arrQ = {900, 940, 950,1100,1500,1800} depQ = {910,1200,1120,1130,1900,2000} flights = 6 */ #[derive(Debug, Clone, Copy, PartialOrd, Ord, PartialEq, Eq)] pub struct Schedule { start: u32, end: u32, } impl From<(u32, u32)> for Schedule { fn from(v: (u32, u32)) -> Self { Self { start: v.0, end: v.1, } } } pub trait AirportGatesSolution { fn airport_gates(&self, schedules_sorted_by_arrival: &[(u32, u32)]) -> u32; } pub struct AirportGatesSolutionNaive; impl AirportGatesSolution for AirportGatesSolutionNaive { fn airport_gates(&self, schedules_sorted_by_arrival: &[(u32, u32)]) -> u32 { let mut gates: Vec<Schedule> = vec![]; for schedule in schedules_sorted_by_arrival { let schedule: Schedule = (*schedule).into(); let mut found_gate = false; for gate in &mut gates { if gate.end < schedule.start { gate.end = schedule.end; found_gate = true; break; } } if found_gate { continue; } gates.push(schedule); } gates.len() as u32 } } pub struct AirportGatesSolutionMoreEfficient; impl AirportGatesSolution for AirportGatesSolutionMoreEfficient { fn airport_gates(&self, schedules_sorted_by_arrival: &[(u32, u32)]) -> u32 { let mut gate_end_times: BinaryHeap<u32, MinComparator> = BinaryHeap::from_vec(vec![]); for schedule in schedules_sorted_by_arrival { let schedule: Schedule = (*schedule).into(); if let Some(mut min_end_time) = gate_end_times.peek_mut() { if *min_end_time < schedule.start { *min_end_time = schedule.end; continue; } } gate_end_times.push(schedule.end); } gate_end_times.len() as u32 } } pub struct AirportGatesSolutionCounter; impl AirportGatesSolution for AirportGatesSolutionCounter { fn airport_gates(&self, schedules_sorted_by_arrival: &[(u32, u32)]) -> u32 { #[derive(PartialEq, Eq, PartialOrd, Ord)] enum EventType { Arrival, Departure, } let sorted_events = { let mut events = vec![]; for (arrival, departure) in schedules_sorted_by_arrival { events.push((arrival, EventType::Arrival)); events.push((departure, EventType::Departure)); } events.sort(); events }; let mut max_gate_count = 0; let mut gate_count = 0; for (_time, event_type) in sorted_events { match event_type { EventType::Arrival => gate_count += 1, EventType::Departure => gate_count -= 1, } max_gate_count = std::cmp::max(gate_count, max_gate_count); } max_gate_count } } #[cfg(test)] mod tests { use super::*; fn check_correctness_for_all_solutions(assertions: impl Fn(&dyn AirportGatesSolution)) { assertions(&AirportGatesSolutionNaive); assertions(&AirportGatesSolutionMoreEfficient); assertions(&AirportGatesSolutionCounter); } #[test] fn trivial() { check_correctness_for_all_solutions(|s| { assert_eq!(s.airport_gates(&[(0, 10)]), 1); assert_eq!(s.airport_gates(&[(0, 10), (11, 30)]), 1); }) } #[test] fn arriving_at_same_time_as_departing() { check_correctness_for_all_solutions(|s| { assert_eq!(s.airport_gates(&[(0, 10), (10, 20)]), 2); }) } #[test] fn example() { check_correctness_for_all_solutions(|s| { assert_eq!( s.airport_gates(&[ (900, 910), (940, 1200), (950, 1120), (1100, 1130), (1500, 1900), (1800, 2000), ]), 3 ); }) } }
use binary_heap_plus::*; /* At an airport you have a timetable for arrivals and departures. You need to determine the minimum number of gates you'd need to provide so that all the planes can be placed at a gate as per their schedule. The arrival and departure times for each plane are presented in two arrays, sorted by arrival time, and you're told the total number of flights for the day. Assume that no planes remain overnight at the airport; all fly in and back out on the same day. Assume that if a plane departs in the same minute as another plane arrives, the arriving plane takes priority (i.e. you'll still need the gate for the departing plane). Write a function that returns the minimum number of gates needed for the schedules you're given. Example: arrQ = {900, 940, 950,1100,1500,1800} depQ = {910,1200,1120,1130,1900,2000} flights = 6 */ #[derive(Debug, Clone, Copy, PartialOrd, Ord, PartialEq, Eq)] pub struct Schedule { start: u32, end: u32, } impl From<(u32, u32)> for Schedule { fn from(v: (u32, u32)) -> Self { Self { start: v.0, end: v.1, } } } pub trait AirportGatesSolution { fn airport_gates(&self, schedules_sorted_by_arrival: &[(u32, u32)]) -> u32; } pub struct AirportGatesSolutionNaive; impl AirportGatesSolution for AirportGatesSolutionNaive { fn airport_gates(&self, schedules_sorted_by_arrival: &[(u32, u32)]) -> u32 { let mut gates: Vec<Schedule> = vec![]; for schedule in schedules_sorted_by_arrival { let schedule: Schedule = (*schedule).into(); let mut found_gate = false; for gate in &mut gates { if gate.end < schedule.start { gate.end = schedule.end; found_gate = true; break; } } if found_gate { continue; } gates.push(schedule); } gates.len() as u32 } } pub struct AirportGatesSolutionMoreEfficient; impl AirportGatesSolution for AirportGatesSolutionMoreEfficient { fn airport_gates(&self, schedules_sorted_by_arrival: &[(u32, u32)]) -> u32 { let mut gate_end_times: BinaryHeap<u32, MinComparator> = BinaryHeap::from_vec(vec![]); for schedule in schedules_sorted_by_arrival { let schedule: Schedule = (*schedule).into(); if let Some(mut min_end_time) = gate_end_times.peek_mut() { if *min_end_time < schedule.start { *min_end_time = schedule.end; continue; } } gate_end_times.push(schedule.end); } gate_end_times.len() as u32 } } pub struct AirportGatesSolutionCounter; impl AirportGatesSolution for AirportGatesSolutionCounter {
} #[cfg(test)] mod tests { use super::*; fn check_correctness_for_all_solutions(assertions: impl Fn(&dyn AirportGatesSolution)) { assertions(&AirportGatesSolutionNaive); assertions(&AirportGatesSolutionMoreEfficient); assertions(&AirportGatesSolutionCounter); } #[test] fn trivial() { check_correctness_for_all_solutions(|s| { assert_eq!(s.airport_gates(&[(0, 10)]), 1); assert_eq!(s.airport_gates(&[(0, 10), (11, 30)]), 1); }) } #[test] fn arriving_at_same_time_as_departing() { check_correctness_for_all_solutions(|s| { assert_eq!(s.airport_gates(&[(0, 10), (10, 20)]), 2); }) } #[test] fn example() { check_correctness_for_all_solutions(|s| { assert_eq!( s.airport_gates(&[ (900, 910), (940, 1200), (950, 1120), (1100, 1130), (1500, 1900), (1800, 2000), ]), 3 ); }) } }
fn airport_gates(&self, schedules_sorted_by_arrival: &[(u32, u32)]) -> u32 { #[derive(PartialEq, Eq, PartialOrd, Ord)] enum EventType { Arrival, Departure, } let sorted_events = { let mut events = vec![]; for (arrival, departure) in schedules_sorted_by_arrival { events.push((arrival, EventType::Arrival)); events.push((departure, EventType::Departure)); } events.sort(); events }; let mut max_gate_count = 0; let mut gate_count = 0; for (_time, event_type) in sorted_events { match event_type { EventType::Arrival => gate_count += 1, EventType::Departure => gate_count -= 1, } max_gate_count = std::cmp::max(gate_count, max_gate_count); } max_gate_count }
function_block-full_function
[ { "content": "pub fn has_two_movies_for_flight(flight_length: u32, movie_lengths: Vec<u32>) -> bool {\n\n let mut complement_movie_lengths = HashSet::new();\n\n\n\n for movie_length in movie_lengths {\n\n // If movie is not valid, ignore.\n\n if movie_length > flight_length {\n\n ...
Rust
src/server/entry_api.rs
bingryan/quake
be1aae0ff36a22d47bdef5c99797d95293792a33
use std::collections::HashMap; use std::fs; use std::fs::File; use std::path::PathBuf; use rocket::fs::NamedFile; use rocket::response::status::NotFound; use rocket::response::Redirect; use rocket::serde::json::Json; use rocket::serde::{Deserialize, Serialize}; use rocket::tokio::task::spawn_blocking; use rocket::State; use rocket::{get, post}; use quake_core::entry::entry_file::EntryFile; use quake_core::entry::entry_paths::EntryPaths; use quake_core::helper::file_filter; use quake_core::usecases::entry_usecases; use quake_core::QuakeConfig; use crate::server::helper::csv_to_json::csv_to_json; use crate::server::ApiError; #[get("/<entry_type>")] pub(crate) async fn get_entries(entry_type: &str, config: &State<QuakeConfig>) -> Redirect { let request_url = format!("{:}/indexes/{:}/search", &config.search_url, entry_type); Redirect::to(request_url) } #[get("/<entry_type>/from_csv")] pub(crate) async fn get_entries_from_csv( entry_type: String, config: &State<QuakeConfig>, ) -> Result<Json<String>, NotFound<Json<ApiError>>> { let path = PathBuf::from(config.workspace.clone()) .join(entry_type) .join(EntryPaths::entries_csv()); let content = spawn_blocking(|| { let mut rdr = csv::Reader::from_reader(File::open(path).unwrap()); csv_to_json(&mut rdr).unwrap().to_string() }) .await .map_err(|err| ApiError { msg: err.to_string(), }) .unwrap(); Ok(Json(content)) } #[get("/<entry_type>/csv")] pub(crate) async fn get_entries_csv( entry_type: &str, config: &State<QuakeConfig>, ) -> Option<NamedFile> { let paths = EntryPaths::init(&config.workspace, &entry_type.to_string()); let file = NamedFile::open(paths.entries_csv); file.await.ok() } #[derive(Debug, Clone, Deserialize, Serialize)] #[serde(crate = "rocket::serde")] struct EntryResponse { content: String, } #[post("/<entry_type>/new?<text>")] pub(crate) async fn create_entry( entry_type: String, text: String, config: &State<QuakeConfig>, ) -> Result<Json<EntryFile>, NotFound<Json<ApiError>>> { let workspace = config.workspace.to_string(); return match entry_usecases::create_entry(&workspace, &entry_type, &text) { Ok((_path, file)) => Ok(Json(file)), Err(err) => Err(NotFound(Json(ApiError { msg: err.to_string(), }))), }; } #[get("/<entry_type>/<id>")] pub(crate) async fn get_entry( entry_type: &str, id: usize, config: &State<QuakeConfig>, ) -> Result<Json<EntryFile>, NotFound<Json<ApiError>>> { let base_path = PathBuf::from(&config.workspace).join(entry_type); let index = id; let prefix = EntryFile::file_prefix(index); let vec = file_filter::filter_by_prefix(base_path, prefix); if vec.len() == 0 { return Err(NotFound(Json(ApiError { msg: "file not found".to_string(), }))); } let file_path = vec[0].clone(); let str = fs::read_to_string(file_path).expect("cannot read entry type"); let file = EntryFile::from(str.as_str(), id).unwrap(); return Ok(Json(file)); } #[derive(Debug, Clone, Deserialize, Serialize)] #[serde(crate = "rocket::serde")] pub struct EntryUpdate { fields: HashMap<String, String>, } #[post("/<entry_type>/<id>", data = "<entry>")] pub(crate) async fn update_entry( entry_type: &str, id: usize, entry: Json<EntryUpdate>, config: &State<QuakeConfig>, ) -> Result<Json<EntryFile>, NotFound<Json<ApiError>>> { let path = PathBuf::from(&config.workspace).join(entry_type); return match entry_usecases::update_entry_fields(path, entry_type, id, &entry.fields) { Ok(file) => Ok(Json(file)), Err(err) => Err(NotFound(Json(ApiError { msg: err.to_string(), }))), }; }
use std::collections::HashMap; use std::fs; use std::fs::File; use std::path::PathBuf; use rocket::fs::NamedFile; use rocket::response::status::NotFound; use rocket::response::Redirect; use rocket::serde::json::Json; use rocket::serde::{Deserialize, Serialize}; use rocket::tokio::task::spawn_blocking; use rocket::State; use rocket::{get, post}; use quake_core::entry::entry_file::EntryFile; use quake_core::entry::entry_paths::EntryPaths; use quake_core::helper::file_filter; use quake_core::usecases::entry_usecases; use quake_core::QuakeConfig; use crate::server::helper::csv_to_json::csv_to_json; use crate::server::ApiError; #[get("/<entry_type>")] pub(crate) async fn get_entries(entry_type: &str, config: &State<QuakeConfig>) -> Redirect { let request_url = format!("{:}/indexes/{:}/search", &config.search_url, entry_type); Redirect::to(request_url) } #[get("/<entry_type>/from_csv")] pub(crate) async fn get_entries_from_csv( entry_type: String, config: &State<QuakeConfig>, ) -> Result<Json<String>, NotFound<Json<ApiError>>> { let path = PathBuf::from(config.workspace.clone()) .join(entry_type) .join(EntryPaths::entries_csv()); let content = spawn_blocking(|| { let mut rdr = csv::Reader::from_reader(File::open(path).unwrap()); csv_to_json(&mut rdr).unwrap().to_string() }) .await .map_err(|err| ApiError { msg: err.to_string(), }) .unwrap(); Ok(Json(content)) } #[get("/<entry_type>/csv")] pub(crate) async fn get_entries_csv( entry_type: &str, config: &State<QuakeConfig>, ) -> Option<NamedFile> { let paths = EntryPaths::init(&config.workspace, &entry_type.to_string()); let file = NamedFile::open(paths.entries_csv); file.await.ok() } #[derive(Debug, Clone, Deserialize, Serialize)] #[serde(crate = "rocket::serde")] struct EntryResponse { content: String, } #[post("/<entry_type>/new?<text>")] p
#[get("/<entry_type>/<id>")] pub(crate) async fn get_entry( entry_type: &str, id: usize, config: &State<QuakeConfig>, ) -> Result<Json<EntryFile>, NotFound<Json<ApiError>>> { let base_path = PathBuf::from(&config.workspace).join(entry_type); let index = id; let prefix = EntryFile::file_prefix(index); let vec = file_filter::filter_by_prefix(base_path, prefix); if vec.len() == 0 { return Err(NotFound(Json(ApiError { msg: "file not found".to_string(), }))); } let file_path = vec[0].clone(); let str = fs::read_to_string(file_path).expect("cannot read entry type"); let file = EntryFile::from(str.as_str(), id).unwrap(); return Ok(Json(file)); } #[derive(Debug, Clone, Deserialize, Serialize)] #[serde(crate = "rocket::serde")] pub struct EntryUpdate { fields: HashMap<String, String>, } #[post("/<entry_type>/<id>", data = "<entry>")] pub(crate) async fn update_entry( entry_type: &str, id: usize, entry: Json<EntryUpdate>, config: &State<QuakeConfig>, ) -> Result<Json<EntryFile>, NotFound<Json<ApiError>>> { let path = PathBuf::from(&config.workspace).join(entry_type); return match entry_usecases::update_entry_fields(path, entry_type, id, &entry.fields) { Ok(file) => Ok(Json(file)), Err(err) => Err(NotFound(Json(ApiError { msg: err.to_string(), }))), }; }
ub(crate) async fn create_entry( entry_type: String, text: String, config: &State<QuakeConfig>, ) -> Result<Json<EntryFile>, NotFound<Json<ApiError>>> { let workspace = config.workspace.to_string(); return match entry_usecases::create_entry(&workspace, &entry_type, &text) { Ok((_path, file)) => Ok(Json(file)), Err(err) => Err(NotFound(Json(ApiError { msg: err.to_string(), }))), }; }
function_block-function_prefixed
[ { "content": "fn highlight_content(string: &str, lang: &str) {\n\n use syntect::easy::HighlightLines;\n\n use syntect::highlighting::{Style, ThemeSet};\n\n use syntect::parsing::SyntaxSet;\n\n use syntect::util::{as_24_bit_terminal_escaped, LinesWithEndings};\n\n\n\n // Load these once at the sta...
Rust
api/src/lib.rs
fdeantoni/ph-quakes
75d888276a091335436c9435f22d9c81f3870ad3
use serde_derive::*; pub use chrono::prelude::*; pub mod time { pub use ::chrono::Duration; } pub use geojson::{FeatureCollection, Feature, GeoJson, Geometry, Value}; use serde_json::{Map, to_value}; #[derive(Debug, Clone, Serialize, Deserialize)] pub struct Quake { datetime: DateTime<Utc>, longitude: f64, latitude: f64, magnitude: f64, depth: u16, location: String, province: String, url: String, source: String, } fn format_f64(coord: &f64) -> String { format!("{:.6}", coord) } impl PartialEq for Quake { fn eq(&self, other: &Self) -> bool { let lng = format_f64(&self.longitude); let lat = format_f64(&self.latitude); let mag = format_f64(&self.magnitude); self.datetime.eq(&other.datetime) && lng.eq(&format_f64(&other.longitude)) && lat.eq(&format_f64(&other.latitude)) && mag.eq(&format_f64(&other.magnitude)) && self.depth.eq(&other.depth) } } impl Eq for Quake {} impl Hash for Quake { fn hash<H: Hasher>(&self, state: &mut H) { let lng = format_f64(&self.longitude); let lat = format_f64(&self.latitude); let mag = format_f64(&self.magnitude); self.datetime.hash(state); lng.hash(state); lat.hash(state); mag.hash(state); self.depth.hash(state); } } impl Ord for Quake { fn cmp(&self, other: &Self) -> Ordering { self.datetime.cmp(&other.datetime) } } impl PartialOrd for Quake { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { Some(self.cmp(other)) } } impl Quake { pub fn get_datetime(&self) -> DateTime<Utc> { self.datetime.clone() } pub fn get_longitude(&self) -> f64 { self.longitude } pub fn get_latitude(&self) -> f64 { self.latitude } pub fn get_magnitude(&self) -> f64 { self.magnitude } pub fn get_depth(&self) -> u16 { self.depth } pub fn get_location(&self) -> String { self.location.clone() } pub fn get_province(&self) -> String { self.province.clone() } pub fn get_url(&self) -> String { self.url.clone() } pub fn get_source(&self) -> String { self.source.clone() } pub fn new(datetime: DateTime<Utc>, longitude: f64, latitude: f64, magnitude: f64, depth: u16, location: String, province: String, url: String, source: String) -> Quake { Quake { datetime, longitude, latitude, magnitude, depth, location, province, url, source, } } pub fn to_geojson_feature(&self) -> Feature { let geometry = Geometry::new( Value::Point(vec![self.longitude, self.latitude]) ); let mut properties = Map::new(); properties.insert( String::from("datetime"), to_value(format!("{:?}", self.datetime)).unwrap(), ); properties.insert( String::from("start"), to_value(format!("{:?}", self.datetime)).unwrap(), ); properties.insert( String::from("end"), to_value(format!("{:?}", self.datetime + chrono::Duration::days(1))).unwrap(), ); properties.insert( String::from("longitude"), to_value(self.longitude).unwrap(), ); properties.insert( String::from("latitude"), to_value(self.latitude).unwrap(), ); properties.insert( String::from("magnitude"), to_value(self.magnitude).unwrap(), ); properties.insert( String::from("depth"), to_value(self.depth).unwrap(), ); properties.insert( String::from("location"), to_value(self.location.clone()).unwrap(), ); properties.insert( String::from("province"), to_value(self.province.clone()).unwrap(), ); properties.insert( String::from("url"), to_value(self.url.clone()).unwrap(), ); properties.insert( String::from("source"), to_value(self.source.clone()).unwrap(), ); Feature { bbox: None, geometry: Some(geometry), id: None, properties: Some(properties), foreign_members: None, } } pub fn find_province(text: String) -> (String, String) { match text.rfind("(") { Some(pos) => { let len = text.len(); let province = &text[pos + 1..len - 1]; let location = &text[0..pos - 1]; (location.to_string(), province.to_string()) } None => { let location = text.clone(); let mut province = ""; if let Some(pos) = text.rfind("of ") { province = &text[pos + 3..text.len()] } (location, province.to_string()) } } } } #[derive(Debug, Clone)] pub struct QuakeList(Box<[Quake]>); impl QuakeList { pub fn list(&self) -> Box<[Quake]> { self.0.clone() } pub fn new(vec: Vec<Quake>) -> QuakeList { QuakeList(vec.into_boxed_slice()) } pub fn to_geojson(&self) -> GeoJson { let bbox = None; let foreign_members = None; let features: Vec<Feature> = self.0.iter().map(|quake| quake.to_geojson_feature()).collect(); GeoJson::FeatureCollection(FeatureCollection { bbox, features, foreign_members, } ) } } use std::borrow::Cow; use std::hash::{Hash, Hasher}; use std::cmp::Ordering; #[derive(Clone, Debug, PartialEq, Eq)] pub struct QuakeError { description: Cow<'static, str>, } impl QuakeError { pub fn new<S>(description: S) -> Self where S: Into<Cow<'static, str>>, { QuakeError { description: description.into(), } } } impl std::error::Error for QuakeError { fn description(&self) -> &str { &self.description } } impl std::fmt::Display for QuakeError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.write_str("Quake error: ")?; f.write_str(&self.description) } } impl From<std::io::Error> for QuakeError { fn from(error: std::io::Error) -> Self { QuakeError::new(format!( "IO error occurred! {}", error.to_string() )) } } #[cfg(test)] mod tests { use super::*; use std::collections::HashSet; fn test_quake() -> Quake { let datetime = Utc::now(); let longitude: f64 = 1.0; let latitude: f64 = 0.0; let magnitude: f64 = 2.4; let depth: u16 = 134; let location = "Some location".to_string(); let province = "Some province".to_string(); let url = "http://example.com".to_string(); let source = "Some source".to_string(); Quake::new(datetime, longitude, latitude, magnitude, depth, location, province, url, source) } #[test] fn geojson_conversion() { let quake = test_quake(); let feature = quake.to_geojson_feature(); let geojson = GeoJson::Feature(feature); println!("{}", geojson.to_string()); } #[test] fn compare_quakes() { let one = test_quake(); let mut two = one.clone(); two.url = "https://some.other.url".to_string(); assert_eq!(one, two); } #[test] fn compare_quakes_set() { let one = test_quake(); let mut two = one.clone(); two.url = "https://some.other.url".to_string(); let mut three = test_quake(); three.depth = 100; let one_vec = vec![one.clone()]; let two_vec = vec![two.clone(), three.clone()]; let mut set: HashSet<Quake> = HashSet::new(); set.extend(one_vec); set.extend(two_vec); let mut sorted = set.clone().into_iter().collect::<Vec<Quake>>(); sorted.sort(); println!("{:?}", &set); assert_eq!(sorted[0], one); assert_eq!(sorted[1], three); } #[actix_rt::test] async fn retrieve_philvolcs_quakes() { let quake = test_quake(); let list = QuakeList::new(vec![quake]); let geojson = list.to_geojson(); println!("{}", geojson.to_string()); } }
use serde_derive::*; pub use chrono::prelude::*; pub mod time { pub use ::chrono::Duration; } pub use geojson::{FeatureCollection, Feature, GeoJson, Geometry, Value}; use serde_json::{Map, to_value}; #[derive(Debug, Clone, Serialize, Deserialize)] pub struct Quake { datetime: DateTime<Utc>, longitude: f64, latitude: f64, magnitude: f64, depth: u16, location: String, province: String, url: String, source: String, } fn format_f64(coord: &f64) -> String { format!("{:.6}", coord) } impl PartialEq for Quake { fn eq(&self, other: &Self) -> bool { let lng = format_f64(&self.longitude); let lat = format_f64(&self.latitude); let mag = format_f64(&self.magnitude); self.datetime.eq(&other.datetime) && lng.eq(&format_f64(&other.longitude)) && lat.eq(&format_f64(&other.latitude)) && mag.eq(&format_f64(&other.magnitude)) && self.depth.eq(&other.depth) } } impl Eq for Quake {} impl Hash for Quake { fn hash<H: Hasher>(&self, state: &mut H) { let lng = format_f64(&self.longitude); let lat = format_f64(&self.latitude); let mag = format_f64(&self.magnitude); self.datetime.hash(state); lng.hash(state); lat.hash(state); mag.hash(state); self.depth.hash(state); } } impl Ord for Quake { fn cmp(&self, other: &Self) -> Ordering { self.datetime.cmp(&other.datetime) } } impl PartialOrd for Quake { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { Some(self.cmp(other)) } } impl Quake { pub fn get_datetime(&self) -> DateTime<Utc> { self.datetime.clone() } pub fn get_longitude(&self) -> f64 { self.longitude } pub fn get_latitude(&self) -> f64 { self.latitude } pub fn get_magnitude(&self) -> f64 { self.magnitude } pub fn get_depth(&self) -> u16 { self.depth } pub fn get_location(&self) -> String { self.location.clone() } pub fn get_province(&self) -> String { self.province.clone() } pub fn get_url(&self) -> String { self.url.clone() } pub fn get_source(&self) -> String { self.source.clone() } pub fn new(datetime: DateTime<Utc>, longitude: f64, latitude: f64, magnitude: f64, depth: u16, location: String, province: String, url: String, source: String) -> Quake { Quake { datetime, longitude, latitude, magnitude, depth, location, province, url, source, } } pub fn to_geojson_feature(&self) -> Feature { let geometry = Geometry::new( Value::Point(vec![self.longitude, self.latitude]) ); let mut properties = Map::new(); properties.insert( String::from("datetime"), to_value(format!("{:?}", self.datetime)).unwrap(), ); properties.insert( String::from("start"), to_value(format!("{:?}", self.datetime)).unwrap(), ); properties.insert( String::from("end"), to_value(format!("{:?}", self.datetime + chrono::Duration::days(1))).unwrap(), ); properties.insert( String::from("longitude"), to_value(self.longitude).unwrap(), ); properties.insert( String::from("latitude"), to_value(self.latitude).unwrap(), ); properties.insert( String::from("magnitude"), to_value(self.magnitude).unwrap(), ); properties.insert( String::from("depth"), to_value(self.depth).unwrap(), ); properties.insert( String::from("location"), to_value(self.location.clone()).unwrap(), ); properties.insert( String::from("province"), to_value(self.province.clone()).unwrap(), ); properties.insert( String::from("url"), to_value(self.url.clone()).unwrap(), ); properties.insert( String::from("source"), to_value(self.source.clone()).unwrap(), ); Feature { bbox: None, geometry: Some(geometry), id: None, properties: Some(properties), foreign_members: None, } } pub fn find_province(text: String) -> (String, String) { match text.rfind("(") { Some(pos) => { let len = text.len(); let province = &text[pos + 1..len - 1]; let location = &text[
} #[derive(Debug, Clone)] pub struct QuakeList(Box<[Quake]>); impl QuakeList { pub fn list(&self) -> Box<[Quake]> { self.0.clone() } pub fn new(vec: Vec<Quake>) -> QuakeList { QuakeList(vec.into_boxed_slice()) } pub fn to_geojson(&self) -> GeoJson { let bbox = None; let foreign_members = None; let features: Vec<Feature> = self.0.iter().map(|quake| quake.to_geojson_feature()).collect(); GeoJson::FeatureCollection(FeatureCollection { bbox, features, foreign_members, } ) } } use std::borrow::Cow; use std::hash::{Hash, Hasher}; use std::cmp::Ordering; #[derive(Clone, Debug, PartialEq, Eq)] pub struct QuakeError { description: Cow<'static, str>, } impl QuakeError { pub fn new<S>(description: S) -> Self where S: Into<Cow<'static, str>>, { QuakeError { description: description.into(), } } } impl std::error::Error for QuakeError { fn description(&self) -> &str { &self.description } } impl std::fmt::Display for QuakeError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.write_str("Quake error: ")?; f.write_str(&self.description) } } impl From<std::io::Error> for QuakeError { fn from(error: std::io::Error) -> Self { QuakeError::new(format!( "IO error occurred! {}", error.to_string() )) } } #[cfg(test)] mod tests { use super::*; use std::collections::HashSet; fn test_quake() -> Quake { let datetime = Utc::now(); let longitude: f64 = 1.0; let latitude: f64 = 0.0; let magnitude: f64 = 2.4; let depth: u16 = 134; let location = "Some location".to_string(); let province = "Some province".to_string(); let url = "http://example.com".to_string(); let source = "Some source".to_string(); Quake::new(datetime, longitude, latitude, magnitude, depth, location, province, url, source) } #[test] fn geojson_conversion() { let quake = test_quake(); let feature = quake.to_geojson_feature(); let geojson = GeoJson::Feature(feature); println!("{}", geojson.to_string()); } #[test] fn compare_quakes() { let one = test_quake(); let mut two = one.clone(); two.url = "https://some.other.url".to_string(); assert_eq!(one, two); } #[test] fn compare_quakes_set() { let one = test_quake(); let mut two = one.clone(); two.url = "https://some.other.url".to_string(); let mut three = test_quake(); three.depth = 100; let one_vec = vec![one.clone()]; let two_vec = vec![two.clone(), three.clone()]; let mut set: HashSet<Quake> = HashSet::new(); set.extend(one_vec); set.extend(two_vec); let mut sorted = set.clone().into_iter().collect::<Vec<Quake>>(); sorted.sort(); println!("{:?}", &set); assert_eq!(sorted[0], one); assert_eq!(sorted[1], three); } #[actix_rt::test] async fn retrieve_philvolcs_quakes() { let quake = test_quake(); let list = QuakeList::new(vec![quake]); let geojson = list.to_geojson(); println!("{}", geojson.to_string()); } }
0..pos - 1]; (location.to_string(), province.to_string()) } None => { let location = text.clone(); let mut province = ""; if let Some(pos) = text.rfind("of ") { province = &text[pos + 3..text.len()] } (location, province.to_string()) } } }
function_block-function_prefixed
[ { "content": "type Row = HashMap<String, String>;\n\n\n\npub struct HtmlParser(Vec<Row>, String);\n\n\n\nimpl HtmlParser {\n\n\n\n pub async fn parse(html: String, source_url: String) -> HtmlParser {\n\n let mut collection: Vec<Row> = Vec::new();\n\n let expected_headers: HashSet<String> = [\n\...
Rust
src/voxel_tools/mesh_builder.rs
TanTanDev/first_voxel_engine
1cb19a85fdba285e478eb97819dc762753c6c5e9
use crate::rendering::gpu_resources::GpuResources; use super::{ chunk, chunks::{adjacent_voxels, Chunks}, }; use super::{ direction::Direction, quad::Quad, rendering::voxel_vertex::VoxelVertex, voxel::Voxel, }; use wgpu::util::DeviceExt; pub fn build_chunk_mesh( chunks: &mut Chunks, device: &wgpu::Device, gpu_resources: &mut GpuResources, chunk_pos: &cgmath::Vector3<i32>, chunk_world_pos: &cgmath::Vector3<f32>, ) -> bool { let chunk_size = chunk::SIZE as i32; let mut quads = Vec::<Quad>::new(); for x in 0..chunk_size { for y in 0..chunk_size { for z in 0..chunk_size { let voxel_pos_local = cgmath::Vector3::<f32>::new(x as f32, y as f32, z as f32); let voxel_pos_world = chunk_world_pos + voxel_pos_local; if let Ok((voxel, back, left, down)) = adjacent_voxels(chunks, (x, y, z), chunk_pos) { process_voxel(&voxel, voxel_pos_world, &left, &down, &back, &mut quads); } } } } if quads.is_empty() { } let mut voxel_vertices = Vec::<VoxelVertex>::new(); let mut indices = Vec::<u32>::new(); let mut vert_index = 0; for quad in quads { let normal = quad.direction.get_normal(); (0..4).for_each(|index| { voxel_vertices.push(VoxelVertex { position: quad.corners[index].into(), normal: normal.into(), color_diffuse: quad.color.into(), }); }); indices.push(vert_index); indices.push(vert_index + 1); indices.push(vert_index + 2); indices.push(vert_index); indices.push(vert_index + 2); indices.push(vert_index + 3); vert_index += 4; } if let Some(chunk_mesh) = chunks.get_chunk_mesh_mut(chunk_pos) { let num_indices = indices.len() as u32; let num_vertices = voxel_vertices.len() as u32; let (v_buf, i_buf) = construct_buffers(device, voxel_vertices, indices); let v_buf = gpu_resources.buffer_arena.insert(v_buf); let i_buf = gpu_resources.buffer_arena.insert(i_buf); chunk_mesh.update_vertex_buffers(v_buf, i_buf, num_indices, num_vertices); return num_vertices != 0; } false } fn process_voxel( voxel: &Voxel, voxel_pos: cgmath::Vector3<f32>, left: &Voxel, down: &Voxel, back: &Voxel, quads: &mut Vec<Quad>, ) { match voxel.is_solid() { true => { if !left.is_solid() { quads.push(Quad::from_direction(Direction::Left, voxel_pos)); } if !down.is_solid() { quads.push(Quad::from_direction(Direction::Down, voxel_pos)); } if !back.is_solid() { quads.push(Quad::from_direction(Direction::Back, voxel_pos)); } } false => { if left.is_solid() { quads.push(Quad::from_direction(Direction::Right, voxel_pos)); } if down.is_solid() { quads.push(Quad::from_direction(Direction::Up, voxel_pos)); } if back.is_solid() { quads.push(Quad::from_direction(Direction::Forward, voxel_pos)); } } } } fn construct_buffers( device: &wgpu::Device, vertices: Vec<VoxelVertex>, indices: Vec<u32>, ) -> (wgpu::Buffer, wgpu::Buffer) { let vertex_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor { label: Some("voxel_chunk_vertices"), contents: bytemuck::cast_slice(&vertices), usage: wgpu::BufferUsage::VERTEX, }); let index_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor { label: Some("voxel_chunk_indices"), contents: bytemuck::cast_slice(&indices), usage: wgpu::BufferUsage::INDEX, }); (vertex_buffer, index_buffer) }
use crate::rendering::gpu_resources::GpuResources; use super::{ chunk, chunks::{adjacent_voxels, Chunks}, }; use super::{ direction::Direction, quad::Quad, rendering::voxel_vertex::VoxelVertex, voxel::Voxel, }; use wgpu::util::DeviceExt; pub fn build_chunk_mesh( chunks: &mut Chunks, device: &wgpu::Device, gpu_resources: &mut GpuResources, chunk_pos: &cgmath::Vector3<i32>, chunk_world_pos: &cgmath::Vector3<f32>, ) -> bool { let chunk_size = chunk::SIZE as i32; let mut quads = Vec::<Quad>::new(); for x in 0..chunk_size { for y in 0..chunk_size { for z in 0..chunk_size { let voxel_pos_local = cgmath::Vector3::<f32>::new(x as f32, y as f32, z as f32); let voxel_pos_world = chunk_world_pos + voxel_pos_local; if let Ok((voxel, back, left, down)) = adjacent_voxels(chunks, (x, y, z), chunk_pos) { process_voxel(&voxel, voxel_pos_world, &left, &down, &back, &mut quads); } } } } if quads.is_empty() { } let mut voxel_vertices = Vec::<VoxelVertex>::new(); let mut indices = Vec::<u32>::new(); let mut vert_index = 0; for quad in quads { let normal = quad.direction.get_normal(); (0..4).for_each(|index| { voxel_vertices.push(VoxelVertex { position: quad.corners[index].into(), normal: normal.into(), color_diffuse: quad.color.into(), }); }); indices.push(vert_index); indices.push(vert_index + 1); indices.push(vert_index + 2); indices.push(vert_index); indices.push(vert_index + 2); indices.push(vert_index + 3); vert_index += 4; } if let Some(chunk_mesh) = chunks.get_chunk_mesh_mut(chunk_pos) { let num_indices = indices.len() as u32; let num_vertices = voxel_vertices.len() as u32; let (v_buf, i_buf) = construct_buffers(device, voxel_vertices, indices); let v_buf = gpu_resources.buffer_arena.insert(v_buf); let i_buf = gpu_resources.buffer_arena.insert(i_buf); chunk_mesh.update_vertex_buffers(v_buf, i_buf, num_indices, num_vertices); return num_vertices != 0; } false }
fn construct_buffers( device: &wgpu::Device, vertices: Vec<VoxelVertex>, indices: Vec<u32>, ) -> (wgpu::Buffer, wgpu::Buffer) { let vertex_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor { label: Some("voxel_chunk_vertices"), contents: bytemuck::cast_slice(&vertices), usage: wgpu::BufferUsage::VERTEX, }); let index_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor { label: Some("voxel_chunk_indices"), contents: bytemuck::cast_slice(&indices), usage: wgpu::BufferUsage::INDEX, }); (vertex_buffer, index_buffer) }
fn process_voxel( voxel: &Voxel, voxel_pos: cgmath::Vector3<f32>, left: &Voxel, down: &Voxel, back: &Voxel, quads: &mut Vec<Quad>, ) { match voxel.is_solid() { true => { if !left.is_solid() { quads.push(Quad::from_direction(Direction::Left, voxel_pos)); } if !down.is_solid() { quads.push(Quad::from_direction(Direction::Down, voxel_pos)); } if !back.is_solid() { quads.push(Quad::from_direction(Direction::Back, voxel_pos)); } } false => { if left.is_solid() { quads.push(Quad::from_direction(Direction::Right, voxel_pos)); } if down.is_solid() { quads.push(Quad::from_direction(Direction::Up, voxel_pos)); } if back.is_solid() { quads.push(Quad::from_direction(Direction::Forward, voxel_pos)); } } } }
function_block-full_function
[ { "content": "pub fn adjacent_voxels<'a>(\n\n chunks: &'a mut Chunks,\n\n local_pos: (i32, i32, i32),\n\n chunk_pos: &cgmath::Vector3<i32>,\n\n) -> Result<(&'a Voxel, &'a Voxel, &'a Voxel, &'a Voxel)> {\n\n let (x, y, z) = (local_pos.0, local_pos.1, local_pos.2);\n\n let voxel = chunks\n\n ...
Rust
components/restreamer/src/dvr.rs
iAnanich/ephyr
0ed272838d04b4e952e105bd0e170005c363dcf9
use std::{ ffi::OsString, io, path::{Path, PathBuf}, time::SystemTime, }; use anyhow::anyhow; use ephyr_log::log; use futures::{future, TryFutureExt as _, TryStreamExt as _}; use once_cell::sync::OnceCell; use tokio::fs; use url::Url; use uuid::Uuid; use crate::state; static STORAGE: OnceCell<Storage> = OnceCell::new(); #[derive(Debug)] pub struct Storage { pub root_path: PathBuf, } impl Storage { #[inline] #[must_use] pub fn global() -> &'static Storage { STORAGE.get().expect("dvr::Storage is not initialized") } #[inline] pub fn set_global(self) -> anyhow::Result<()> { STORAGE .set(self) .map_err(|_| anyhow!("dvr::Storage has been initialized already")) } pub fn file_url(&self, output: &state::Output) -> anyhow::Result<Url> { let mut full = self.root_path.clone(); full.push(output.id.to_string()); full.push(output.dst.path().trim_start_matches('/')); Url::from_file_path(full) .map_err(|e| anyhow!("Failed convert path to URL: {:?}", e)) } pub async fn list_files(&self, id: state::OutputId) -> Vec<String> { let dir = &self.root_path; let mut output_dir = dir.clone(); output_dir.push(id.to_string()); fs::read_dir(output_dir) .try_flatten_stream() .try_filter_map(|i| async move { Ok(i.file_type().await?.is_file().then(|| i.path()).and_then( |p| Some(p.strip_prefix(dir).ok()?.display().to_string()), )) }) .try_collect() .await .unwrap_or_else(|e| { if e.kind() != io::ErrorKind::NotFound { log::error!("Failed to list {} DVR files: {}", id, e); } vec![] }) } pub async fn remove_file<P: AsRef<Path>>(&self, path: P) -> bool { let path = path.as_ref(); let mut full = self.root_path.clone(); full.push(path.strip_prefix("/").unwrap_or(path)); if let Err(e) = fs::remove_file(full).await { if e.kind() != io::ErrorKind::NotFound { log::error!( "Failed to remove {} DVR file: {}", path.display(), e, ); } return false; } true } pub async fn cleanup(&self, restreams: &[state::Restream]) { fs::read_dir(&self.root_path) .try_flatten_stream() .try_filter(|i| { future::ready( i.file_name() .to_str() .and_then(|n| Uuid::parse_str(n).ok()) .map_or(true, |id| { let id = id.into(); !restreams .iter() .any(|r| r.outputs.iter().any(|o| o.id == id)) }), ) }) .try_for_each_concurrent(4, |i| async move { if i.file_type().await?.is_dir() { fs::remove_dir_all(i.path()).await } else { fs::remove_file(i.path()).await } }) .await .unwrap_or_else(|e| { log::error!("Failed to cleanup DVR files: {}", e) }) } } #[allow(clippy::missing_panics_doc)] pub async fn new_file_path(url: &Url) -> io::Result<PathBuf> { let mut path = url.to_file_path().map_err(|_| { io::Error::new(io::ErrorKind::Other, "File URL contains bad file path") })?; if let Some(dir) = path.parent() { fs::create_dir_all(dir).await?; } let now = SystemTime::now() .duration_since(SystemTime::UNIX_EPOCH) .unwrap(); let mut file_name = OsString::new(); if let Some(name) = path.file_stem() { file_name.push(name) } file_name.push(format!("_{}.", now.as_micros())); if let Some(ext) = path.extension() { file_name.push(ext) } path.set_file_name(file_name); Ok(path) }
use std::{ ffi::OsString, io, path::{Path, PathBuf}, time::SystemTime, }; use anyhow::anyhow; use ephyr_log::log; use futures::{future, TryFutureExt as _, TryStreamExt as _}; use once_cell::sync::OnceCell; use tokio::fs; use url::Url; use uuid::Uuid; use crate::state; static STORAGE: OnceCell<Storage> = OnceCell::new(); #[derive(Debug)] pub struct Storage { pub root_path: PathBuf, } impl Storage { #[inline]
og::error!("Failed to cleanup DVR files: {}", e) }) } } #[allow(clippy::missing_panics_doc)] pub async fn new_file_path(url: &Url) -> io::Result<PathBuf> { let mut path = url.to_file_path().map_err(|_| { io::Error::new(io::ErrorKind::Other, "File URL contains bad file path") })?; if let Some(dir) = path.parent() { fs::create_dir_all(dir).await?; } let now = SystemTime::now() .duration_since(SystemTime::UNIX_EPOCH) .unwrap(); let mut file_name = OsString::new(); if let Some(name) = path.file_stem() { file_name.push(name) } file_name.push(format!("_{}.", now.as_micros())); if let Some(ext) = path.extension() { file_name.push(ext) } path.set_file_name(file_name); Ok(path) }
#[must_use] pub fn global() -> &'static Storage { STORAGE.get().expect("dvr::Storage is not initialized") } #[inline] pub fn set_global(self) -> anyhow::Result<()> { STORAGE .set(self) .map_err(|_| anyhow!("dvr::Storage has been initialized already")) } pub fn file_url(&self, output: &state::Output) -> anyhow::Result<Url> { let mut full = self.root_path.clone(); full.push(output.id.to_string()); full.push(output.dst.path().trim_start_matches('/')); Url::from_file_path(full) .map_err(|e| anyhow!("Failed convert path to URL: {:?}", e)) } pub async fn list_files(&self, id: state::OutputId) -> Vec<String> { let dir = &self.root_path; let mut output_dir = dir.clone(); output_dir.push(id.to_string()); fs::read_dir(output_dir) .try_flatten_stream() .try_filter_map(|i| async move { Ok(i.file_type().await?.is_file().then(|| i.path()).and_then( |p| Some(p.strip_prefix(dir).ok()?.display().to_string()), )) }) .try_collect() .await .unwrap_or_else(|e| { if e.kind() != io::ErrorKind::NotFound { log::error!("Failed to list {} DVR files: {}", id, e); } vec![] }) } pub async fn remove_file<P: AsRef<Path>>(&self, path: P) -> bool { let path = path.as_ref(); let mut full = self.root_path.clone(); full.push(path.strip_prefix("/").unwrap_or(path)); if let Err(e) = fs::remove_file(full).await { if e.kind() != io::ErrorKind::NotFound { log::error!( "Failed to remove {} DVR file: {}", path.display(), e, ); } return false; } true } pub async fn cleanup(&self, restreams: &[state::Restream]) { fs::read_dir(&self.root_path) .try_flatten_stream() .try_filter(|i| { future::ready( i.file_name() .to_str() .and_then(|n| Uuid::parse_str(n).ok()) .map_or(true, |id| { let id = id.into(); !restreams .iter() .any(|r| r.outputs.iter().any(|o| o.id == id)) }), ) }) .try_for_each_concurrent(4, |i| async move { if i.file_type().await?.is_dir() { fs::remove_dir_all(i.path()).await } else { fs::remove_file(i.path()).await } }) .await .unwrap_or_else(|e| { l
random
[ { "content": "/// Interprets given [panic payload][1] as displayable message.\n\n///\n\n/// [1]: std::panic::PanicInfo::payload\n\npub fn display_panic<'a>(err: &'a (dyn Any + Send + 'static)) -> &'a str {\n\n if let Some(s) = err.downcast_ref::<&str>() {\n\n return s;\n\n }\n\n if let Some(s) =...
Rust
src/file_watcher.rs
devzbysiu/podium
ee45e5e8c880b6b8bf638f5257db3f773df4e61b
use crate::contracts::file_to_process::{new_file_to_process, FileToProcess}; use crate::custom_tantivy::wrapper::*; use notify::{watcher, DebouncedEvent, RecursiveMode, Watcher}; use tracing::info; use walkdir::{DirEntry, WalkDir}; use std::path::PathBuf; use std::sync::mpsc::channel; use std::time::Duration; pub async fn start_watcher(directories: &Vec<PathBuf>, tantivy_wrapper: &mut TantivyWrapper) { info!("Starting file watcher thread on: {:?}", directories); let (watcher_tx, watcher_rx) = channel(); let mut watcher = watcher(watcher_tx, Duration::from_secs(10)).unwrap(); for directory in directories { watcher.watch(directory, RecursiveMode::Recursive).unwrap(); } loop { let watcher_event = watcher_rx.recv(); match watcher_event { Ok(event) => { info!("Received watcher event: {:?}", event); match event { DebouncedEvent::Create(path_buf) => { create_event(path_buf, &tantivy_wrapper).await; } DebouncedEvent::Write(path_buf) => { write_event(path_buf, &tantivy_wrapper).await; } DebouncedEvent::NoticeRemove(path_buf) => { remove_event(&path_buf, &tantivy_wrapper); } DebouncedEvent::Rename(src_path_buf, dst_path_buf) => { rename_event(&src_path_buf, &dst_path_buf, &tantivy_wrapper); } _ => { } } } Err(e) => error!("watch error: {:?}", e), } tantivy_wrapper.index_writer.commit().unwrap(); } } async fn create_event(path_buf: PathBuf, tantivy_wrapper: &TantivyWrapper) { if path_buf.is_dir() { let walker = WalkDir::new(path_buf).into_iter(); for entry in walker.filter_entry(|e| !is_hidden(e)) { let entry = entry.unwrap(); create( new_file_to_process(entry.into_path()).await, tantivy_wrapper, ) .await; } } else { create(new_file_to_process(path_buf).await, tantivy_wrapper).await; } } async fn create(file_to_process: FileToProcess, tantivy_wrapper: &TantivyWrapper) { tantivy_wrapper.process_file(file_to_process).await; } async fn write_event(path_buf: PathBuf, tantivy_wrapper: &TantivyWrapper) { if path_buf.is_dir() { let walker = WalkDir::new(path_buf).into_iter(); for entry in walker.filter_entry(|e| !is_hidden(e)) { let entry = entry.unwrap(); write( new_file_to_process(entry.into_path()).await, tantivy_wrapper, ) .await; } } else { write(new_file_to_process(path_buf).await, tantivy_wrapper).await; } } async fn write(file_to_process: FileToProcess, tantivy_wrapper: &TantivyWrapper) { let path_buf = file_to_process.path.clone(); remove(&path_buf, tantivy_wrapper); tantivy_wrapper.process_file(file_to_process).await; } fn remove_event(path_buf: &PathBuf, tantivy_wrapper: &TantivyWrapper) { if path_buf.is_dir() { let walker = WalkDir::new(path_buf).into_iter(); for entry in walker.filter_entry(|e| !is_hidden(e)) { let entry = entry.unwrap(); remove(&entry.into_path(), tantivy_wrapper); } } else { remove(path_buf, tantivy_wrapper); } } fn remove(path_buf: &PathBuf, tantivy_wrapper: &TantivyWrapper) { tantivy_wrapper.remove(path_buf); } fn rename_event( _src_path_buf: &PathBuf, _dst_path_buf: &PathBuf, _tantivy_wrapper: &TantivyWrapper, ) { unimplemented!(); } pub fn is_hidden(entry: &DirEntry) -> bool { entry .file_name() .to_str() .map(|s| s.starts_with('.')) .unwrap_or(false) }
use crate::contracts::file_to_process::{new_file_to_process, FileToProcess}; use crate::custom_tantivy::wrapper::*; use notify::{watcher, DebouncedEvent, RecursiveMode, Watcher}; use tracing::info; use walkdir::{DirEntry, WalkDir}; use std::path::PathBuf; use std::sync::mpsc::channel; use std::time::Duration; pub async fn start_watcher(directories: &Vec<PathBuf>, tantivy_wrapper: &mut TantivyWrapper) { info!("Starting file watcher thread on: {:?}", directories); let (watcher_tx, watcher_rx) = channel(); let mut watcher = watcher(watcher_tx, Duration::from_secs(10)).unwrap(); for directory in directories { watcher.watch(directory, RecursiveMode::Recursive).unwrap(); } loop { let watcher_event = watcher_rx.recv(); match watcher_event { Ok(event) => { info!("Received watcher event: {:?}", event); match event { DebouncedEvent::Create(path_buf) => { create_event(path_buf, &tantivy_wrapper).await; } DebouncedEvent::Write(path_buf) => { write_event(path_buf, &tantivy_wrapper).await; } DebouncedEvent::NoticeRemove(path_buf) => { remove_event(&path_buf, &tantivy_wrapper); } DebouncedEvent::Rename(src_path_buf, dst_path_buf) => { rename_event(&src_path_buf, &dst_path_buf, &tantivy_wrapper); } _ => { } } } Err(e) => error!("watch error: {:?}", e), } tantivy_wrapper.index_writer.commit().unwrap(); } } async fn create_event(path_buf: PathBuf, tantivy_wrapper: &TantivyWrapper) { if path_buf.is_dir() { let walker = WalkDir::new(path_buf).into_iter(); for entry in walker.filter_entry(|e| !is_hidden(e)) { let entry = entry.unwrap(); create( new_file_to_process(entry.into_path()).await, tantivy_wrapper, ) .await; } } else { create(new_file_to_process(path_buf).await, tantivy_wrapper).await; } } async fn create(file_to_process: FileToProcess, tantivy_wrapper: &TantivyWrapper) { tantivy_wrapper.process_file(file_to_process).await; } async fn write_event(path_buf: PathBuf, tantivy_wrapper: &TantivyWrapper) { if path_buf.is_dir() { let walker = WalkDir::new(path_buf).into_iter(); for entry in walker.filter_entry(|e| !is_hidden(e)) { let entry = entry.unwrap(); write( new_file_to_process(entry.into_path()).await, tantivy_wrapper, ) .await; } } else { write(new_file_to_process(path_buf).await, tantivy_wrapper).await; } } async fn write(file_to_process: FileToProcess, tantivy_wrapper: &TantivyWrapper) { let path_buf = file_to_process.path.clone(); remove(&path_buf, tantivy_wrapper); tantivy_wrapper.process_file(file_to_process).await; } fn remove_event(path_buf: &PathBuf, tantivy_wrapper: &TantivyWrapper) { if path_buf.is_dir() { let walker = WalkDir::new(path_buf).into_iter(); for entry in walker.filter_entry(|e| !is_hidden(e)) { let entry = entry.unwrap(); remove(&entry.into_path(), tantivy_wrapper); } } else { remove(path_buf, tantivy_wrapper); } } fn remove(path_buf: &PathBuf, tantivy_wrapper: &TantivyWrapper) { tantivy_wrapper.remove(path_buf); } fn rename_event( _src_path_buf: &PathBuf, _dst_path_buf: &PathBuf, _tantivy_wrapper: &TantivyWrapper, ) { unimplemented!(); }
pub fn is_hidden(entry: &DirEntry) -> bool { entry .file_name() .to_str() .map(|s| s.starts_with('.')) .unwrap_or(false) }
function_block-full_function
[ { "content": "pub fn log_and_return_error_string(error_string: String) -> String {\n\n error!(\"{}\", error_string);\n\n error_string\n\n}\n", "file_path": "src/error_adapter.rs", "rank": 3, "score": 106939.53277923616 }, { "content": "fn bench_indexing_text_file(c: &mut Criterion) {\n...
Rust
tests/integration/cli/tests/create_exe.rs
psy-repos-rust/wasmer
75a98ab171bee010b9a7cd0f836919dc4519dcaf
use anyhow::{bail, Context}; use std::fs; use std::io::prelude::*; use std::path::PathBuf; use std::process::Command; use wasmer_integration_tests_cli::*; fn create_exe_test_wasm_path() -> String { format!("{}/{}", C_ASSET_PATH, "qjs.wasm") } const JS_TEST_SRC_CODE: &[u8] = b"function greet(name) { return JSON.stringify('Hello, ' + name); }; print(greet('World'));\n"; #[derive(Debug)] struct WasmerCreateExe { current_dir: PathBuf, wasmer_path: PathBuf, wasm_path: PathBuf, native_executable_path: PathBuf, compiler: Compiler, } impl Default for WasmerCreateExe { fn default() -> Self { #[cfg(not(windows))] let native_executable_path = PathBuf::from("wasm.out"); #[cfg(windows)] let native_executable_path = PathBuf::from("wasm.exe"); Self { current_dir: std::env::current_dir().unwrap(), wasmer_path: get_wasmer_path(), wasm_path: PathBuf::from(create_exe_test_wasm_path()), native_executable_path, compiler: Compiler::Cranelift, } } } impl WasmerCreateExe { fn run(&self) -> anyhow::Result<()> { let output = Command::new(&self.wasmer_path) .current_dir(&self.current_dir) .arg("create-exe") .arg(&self.wasm_path.canonicalize()?) .arg(&self.compiler.to_flag()) .arg("-o") .arg(&self.native_executable_path) .output()?; if !output.status.success() { bail!( "wasmer create-exe failed with: stdout: {}\n\nstderr: {}", std::str::from_utf8(&output.stdout) .expect("stdout is not utf8! need to handle arbitrary bytes"), std::str::from_utf8(&output.stderr) .expect("stderr is not utf8! need to handle arbitrary bytes") ); } Ok(()) } } #[test] fn create_exe_works() -> anyhow::Result<()> { let temp_dir = tempfile::tempdir()?; let operating_dir: PathBuf = temp_dir.path().to_owned(); let wasm_path = operating_dir.join(create_exe_test_wasm_path()); #[cfg(not(windows))] let executable_path = operating_dir.join("wasm.out"); #[cfg(windows)] let executable_path = operating_dir.join("wasm.exe"); WasmerCreateExe { current_dir: operating_dir.clone(), wasm_path, native_executable_path: executable_path.clone(), compiler: Compiler::Cranelift, ..Default::default() } .run() .context("Failed to create-exe wasm with Wasmer")?; let result = run_code( &operating_dir, &executable_path, &["--eval".to_string(), "function greet(name) { return JSON.stringify('Hello, ' + name); }; print(greet('World'));".to_string()], ) .context("Failed to run generated executable")?; let result_lines = result.lines().collect::<Vec<&str>>(); assert_eq!(result_lines, vec!["\"Hello, World\""],); Ok(()) } #[test] fn create_exe_works_with_file() -> anyhow::Result<()> { let temp_dir = tempfile::tempdir()?; let operating_dir: PathBuf = temp_dir.path().to_owned(); let wasm_path = operating_dir.join(create_exe_test_wasm_path()); #[cfg(not(windows))] let executable_path = operating_dir.join("wasm.out"); #[cfg(windows)] let executable_path = operating_dir.join("wasm.exe"); WasmerCreateExe { current_dir: operating_dir.clone(), wasm_path, native_executable_path: executable_path.clone(), compiler: Compiler::Cranelift, ..Default::default() } .run() .context("Failed to create-exe wasm with Wasmer")?; { let mut f = fs::OpenOptions::new() .write(true) .create_new(true) .open(operating_dir.join("test.js"))?; f.write_all(JS_TEST_SRC_CODE)?; } let result = run_code( &operating_dir, &executable_path, &[ "--dir=.".to_string(), "--script".to_string(), "test.js".to_string(), ], ) .context("Failed to run generated executable")?; let result_lines = result.lines().collect::<Vec<&str>>(); assert_eq!(result_lines, vec!["\"Hello, World\""],); let result = run_code( &operating_dir, &executable_path, &[ "--mapdir=abc:.".to_string(), "--script".to_string(), "abc/test.js".to_string(), ], ) .context("Failed to run generated executable")?; let result_lines = result.lines().collect::<Vec<&str>>(); assert_eq!(result_lines, vec!["\"Hello, World\""],); Ok(()) }
use anyhow::{bail, Context}; use std::fs; use std::io::prelude::*; use std::path::PathBuf; use std::process::Command; use wasmer_integration_tests_cli::*; fn create_exe_test_wasm_path() -> String { format!("{}/{}", C_ASSET_PATH, "qjs.wasm") } const JS_TEST_SRC_CODE: &[u8] = b"function greet(name) { return JSON.stringify('Hello, ' + name); }; print(greet('World'));\n"; #[derive(Debug)] struct WasmerCreateExe { current_dir: PathBuf, wasmer_path: PathBuf, wasm_path: PathBuf, native_executable_path: PathBuf, compiler: Compiler, } impl Default for WasmerCreateExe { fn default() -> Self { #[cfg(not(windows))] let native_executable_path = PathBuf::from("wasm.out"); #[cfg(windows)] let native_executable_path = PathBuf::from("wasm.exe"); Self { current_dir: std::env::current_dir().unwrap(), wasmer_path: get_wasmer_path(), wasm_path: PathBuf::from(create_exe_test_wasm_path()), native_executable_path, compiler: Compiler::Cranelift, } } } impl WasmerCreateExe { fn run(&self) -> anyhow::Result<()> { let output = Command::new(&self.wasmer_path) .current_dir(&self.current_dir) .arg("create-exe") .arg(&self.wasm_path.canonicalize()?) .arg(&self.compiler.to_flag()) .arg("-o") .arg(&self.native_executable_path) .output()?; if !output.status.success() { bail!( "wasmer create-exe failed with: stdout: {}\n\nstderr: {}", std::str::from_utf8(&output.stdout) .expect("stdout is not utf8! need to handl
} #[test] fn create_exe_works() -> anyhow::Result<()> { let temp_dir = tempfile::tempdir()?; let operating_dir: PathBuf = temp_dir.path().to_owned(); let wasm_path = operating_dir.join(create_exe_test_wasm_path()); #[cfg(not(windows))] let executable_path = operating_dir.join("wasm.out"); #[cfg(windows)] let executable_path = operating_dir.join("wasm.exe"); WasmerCreateExe { current_dir: operating_dir.clone(), wasm_path, native_executable_path: executable_path.clone(), compiler: Compiler::Cranelift, ..Default::default() } .run() .context("Failed to create-exe wasm with Wasmer")?; let result = run_code( &operating_dir, &executable_path, &["--eval".to_string(), "function greet(name) { return JSON.stringify('Hello, ' + name); }; print(greet('World'));".to_string()], ) .context("Failed to run generated executable")?; let result_lines = result.lines().collect::<Vec<&str>>(); assert_eq!(result_lines, vec!["\"Hello, World\""],); Ok(()) } #[test] fn create_exe_works_with_file() -> anyhow::Result<()> { let temp_dir = tempfile::tempdir()?; let operating_dir: PathBuf = temp_dir.path().to_owned(); let wasm_path = operating_dir.join(create_exe_test_wasm_path()); #[cfg(not(windows))] let executable_path = operating_dir.join("wasm.out"); #[cfg(windows)] let executable_path = operating_dir.join("wasm.exe"); WasmerCreateExe { current_dir: operating_dir.clone(), wasm_path, native_executable_path: executable_path.clone(), compiler: Compiler::Cranelift, ..Default::default() } .run() .context("Failed to create-exe wasm with Wasmer")?; { let mut f = fs::OpenOptions::new() .write(true) .create_new(true) .open(operating_dir.join("test.js"))?; f.write_all(JS_TEST_SRC_CODE)?; } let result = run_code( &operating_dir, &executable_path, &[ "--dir=.".to_string(), "--script".to_string(), "test.js".to_string(), ], ) .context("Failed to run generated executable")?; let result_lines = result.lines().collect::<Vec<&str>>(); assert_eq!(result_lines, vec!["\"Hello, World\""],); let result = run_code( &operating_dir, &executable_path, &[ "--mapdir=abc:.".to_string(), "--script".to_string(), "abc/test.js".to_string(), ], ) .context("Failed to run generated executable")?; let result_lines = result.lines().collect::<Vec<&str>>(); assert_eq!(result_lines, vec!["\"Hello, World\""],); Ok(()) }
e arbitrary bytes"), std::str::from_utf8(&output.stderr) .expect("stderr is not utf8! need to handle arbitrary bytes") ); } Ok(()) }
function_block-function_prefixed
[ { "content": "fn compile_and_compare(name: &str, engine: impl Engine, wasm: &[u8]) {\n\n let store = Store::new(&engine);\n\n\n\n // compile for first time\n\n let module = Module::new(&store, wasm).unwrap();\n\n let first = module.serialize().unwrap();\n\n\n\n // compile for second time\n\n l...
Rust
src/scd30/mod.rs
joemclo/knurling-sessions-cabon-sensor
893ffccb58166f273c02f3951bdce793f2fbd5fd
use crc_all::Crc; use embedded_hal::blocking::i2c::{Read, Write}; pub struct SensorData { pub co2: f32, pub temperature: f32, pub humidity: f32, } const DEFAULT_ADDRESS: u8 = 0x61; enum Command { StartContinuousMeasurement = 0x0010, StopContinuousMeasurement = 0x0104, MeasurementInterval = 0x4600, GetDataReadyStatus = 0x0202, ReadMeasurement = 0x0300, ASC = 0x5306, TemperatureOffset = 0x5403, ReadFirmwareVersion = 0xd100, SoftReset = 0xd304, } pub struct SCD30<T>(T); impl<T, E> SCD30<T> where T: Read<Error = E> + Write<Error = E>, { pub fn init(i2c2: T) -> Self { SCD30(i2c2) } pub fn read_firmware_version(&mut self) -> Result<[u8; 2], E> { let mut rd_buffer = [0u8; 2]; self.0.write( DEFAULT_ADDRESS, &(Command::ReadFirmwareVersion as u16).to_be_bytes(), )?; self.0.read(DEFAULT_ADDRESS, &mut rd_buffer)?; let major = u8::from_be(rd_buffer[0]); let minor = u8::from_be(rd_buffer[1]); Ok([major, minor]) } pub fn soft_reset(&mut self) -> Result<(), E> { self.0 .write(DEFAULT_ADDRESS, &(Command::SoftReset as u16).to_be_bytes())?; Ok(()) } fn get_crc(&mut self) -> Crc<u8> { let crc = Crc::<u8>::new(0x31, 8, 0xFF, 0x00, false); crc } pub fn set_temperature_offset(&mut self, temperature_offset: u16) -> Result<(), E> { let temperature_offset_bytes: &[u8; 2] = &temperature_offset.to_be_bytes(); let command: [u8; 2] = (Command::TemperatureOffset as u16).to_be_bytes(); let mut command: [u8; 5] = [ command[0], command[1], temperature_offset_bytes[0], temperature_offset_bytes[1], 0x00, ]; let mut crc = self.get_crc(); crc.update(temperature_offset_bytes); command[4] = crc.finish(); self.0.write(DEFAULT_ADDRESS, &command)?; Ok(()) } pub fn read_temperature_offset(&mut self) -> Result<u16, E> { let mut rd_buffer = [0u8; 3]; self.0.write( DEFAULT_ADDRESS, &(Command::TemperatureOffset as u16).to_be_bytes(), )?; self.0.read(DEFAULT_ADDRESS, &mut rd_buffer)?; Ok(u16::from_be_bytes([rd_buffer[0], rd_buffer[1]])) } pub fn start_continuous_measurement(&mut self, pressure: &u16) -> Result<(), E> { let argument_bytes = &pressure.to_be_bytes(); let mut crc = self.get_crc(); crc.update(argument_bytes); let command = (Command::StartContinuousMeasurement as u16).to_be_bytes(); let command: [u8; 5] = [ command[0], command[1], argument_bytes[0], argument_bytes[1], crc.finish(), ]; self.0.write(DEFAULT_ADDRESS, &command)?; Ok(()) } pub fn stop_continuous_measurement(&mut self) -> Result<(), E> { self.0.write( DEFAULT_ADDRESS, &(Command::StopContinuousMeasurement as u16).to_be_bytes(), )?; Ok(()) } pub fn set_measurement_interval(&mut self, interval: u16) -> Result<(), E> { let argument_bytes = &interval.to_be_bytes(); let mut crc = self.get_crc(); crc.update(argument_bytes); let command = (Command::MeasurementInterval as u16).to_be_bytes(); let command: [u8; 5] = [ command[0], command[1], argument_bytes[0], argument_bytes[1], crc.finish(), ]; self.0.write(DEFAULT_ADDRESS, &command)?; Ok(()) } pub fn get_measurement_interval(&mut self) -> Result<u16, E> { let mut rd_buffer = [0u8; 3]; self.0.write( DEFAULT_ADDRESS, &(Command::MeasurementInterval as u16).to_be_bytes(), )?; self.0.read(DEFAULT_ADDRESS, &mut rd_buffer)?; Ok(u16::from_be_bytes([rd_buffer[0], rd_buffer[1]])) } pub fn data_ready(&mut self) -> Result<bool, E> { let mut rd_buffer = [0u8; 3]; self.0.write( DEFAULT_ADDRESS, &(Command::GetDataReadyStatus as u16).to_be_bytes(), )?; self.0.read(DEFAULT_ADDRESS, &mut rd_buffer)?; Ok(u16::from_be_bytes([rd_buffer[0], rd_buffer[1]]) == 1) } pub fn read_measurement(&mut self) -> Result<SensorData, E> { let mut rd_buffer = [0u8; 18]; self.0.write( DEFAULT_ADDRESS, &(Command::ReadMeasurement as u16).to_be_bytes(), )?; self.0.read(DEFAULT_ADDRESS, &mut rd_buffer)?; let sensor_data = SensorData { co2: f32::from_be_bytes([rd_buffer[0], rd_buffer[1], rd_buffer[3], rd_buffer[4]]), temperature: f32::from_be_bytes([ rd_buffer[6], rd_buffer[7], rd_buffer[9], rd_buffer[10], ]), humidity: f32::from_be_bytes([ rd_buffer[12], rd_buffer[13], rd_buffer[15], rd_buffer[16], ]), }; Ok(sensor_data) } pub fn activate_auto_self_calibration(&mut self) -> Result<bool, E> { let argument_bytes: [u8; 2] = [0x00, 0x01]; let mut crc = self.get_crc(); crc.update(&argument_bytes); let command = (Command::ASC as u16).to_be_bytes(); let command: [u8; 5] = [ command[0], command[1], argument_bytes[0], argument_bytes[1], crc.finish(), ]; self.0.write(DEFAULT_ADDRESS, &command)?; self.0 .write(DEFAULT_ADDRESS, &(Command::ASC as u16).to_be_bytes())?; let mut rd_buffer = [0u8; 3]; self.0.read(DEFAULT_ADDRESS, &mut rd_buffer)?; Ok(u16::from_be_bytes([rd_buffer[0], rd_buffer[1]]) == 1) } }
use crc_all::Crc; use embedded_hal::blocking::i2c::{Read, Write}; pub struct SensorData { pub co2: f32, pub temperature: f32, pub humidity: f32, } const DEFAULT_ADDRESS: u8 = 0x61; enum Command { StartContinuousMeasurement = 0x0010, StopContinuousMeasurement = 0x0104, MeasurementInterval = 0x4600, GetDataReadyStatus = 0x0202, ReadMeasurement = 0x0300, ASC = 0x5306, TemperatureOffset = 0x5403, ReadFirmwareVersion = 0xd100, SoftReset = 0xd304, } pub struct SCD30<T>(T); impl<T, E> SCD30<T> where T: Read<Error = E> + Write<Error = E>, { pub fn init(i2c2: T) -> Self { SCD30(i2c2) } pub fn read_firmware_version(&mut self) -> Result<[u8; 2], E> { let mut rd_buffer = [0u8; 2]; self.0.write( DEFAULT_ADDRESS, &(Command::ReadFirmwareVersion as u16).to_be_bytes(), )?; self.0.read(DEFAULT_ADDRESS, &mut rd_buffer)?; let major = u8::from_be(rd_buffer[0]); let minor = u8::from_be(rd_buffer[1]); Ok([major, minor]) } pub fn soft_reset(&mut self) -> Result<(), E> { self.0 .write(DEFAULT_ADDRESS, &(Command::SoftReset as u16).to_be_bytes())?; Ok(()) } fn get_crc(&mut self) -> Crc<u8> { let crc = Crc::<u8>::new(0x31, 8, 0xFF, 0x00, false); crc } pub fn set_temperature_offset(&mut self, temperature_offset: u16) -> Result<(), E> { let temperature_offset_bytes: &[u8; 2] = &temperature_offset.to_be_bytes(); let command: [u8; 2] = (Command::TemperatureOffset as u16).to_be_bytes(); let mut command: [u8; 5] = [ command[0], command[1], temperature_offset_bytes[0], temperature_offset_bytes[1], 0x00, ]; let mut crc = self.get_crc(); crc.update(temperature_offset_bytes); command[4] = crc.finish(); self.0.write(DEFAULT_ADDRESS, &command)?; Ok(()) } pub fn read_temperature_offset(&mut self) -> Result<u16, E> { let mut rd_buffer = [0u8; 3]; self.0.write( DEFAULT_ADDRESS, &(Command::TemperatureOffset as u16).to_be_bytes(), )?; self.0.read(DEFAULT_ADDRESS, &mut rd_buffer)?; Ok(u16::from_be_bytes([rd_buffer[0], rd_buffer[1]])) } pub fn start_continuous_measurement(&mut self, pressure: &u16) -> Result<(), E> { let argument_bytes = &pressure.to_be_bytes(); let mut crc = self.get_crc(); crc.update(argument_bytes); let command = (Command::StartContinuousMeasurement as u16).to_be_bytes(); let command: [u8; 5] = [ command[0], command[1], argument_bytes[0], argument_bytes[1], crc.finish(), ]; self.0.write(DEFAULT_ADDRESS, &command)?; Ok(()) } pub fn stop_continuous_measurement(&mut self) -> Result<(), E> { self.0.write( DEFAULT_ADDRESS, &(Command::StopContinuousMeasurement as u16).to_be_bytes(), )?; Ok(()) } pub fn set_measurement_interval(&mut self, interval: u16) -> Result<(), E> { let argument_bytes = &interval.to_be_bytes(); let mut crc = self.get_crc(); crc.update(argument_bytes); let command = (Command::MeasurementInterval as u16).to_be_bytes(); let command: [u8; 5] = [ command[0], command[1], argument_bytes[0], argument_bytes[1], crc.finish(), ]; self.0.write(DEFAULT_ADDRESS, &command)?; Ok(()) } pub fn get_measurement_interval(&mut self) -> Result<u16, E> { let mut rd_buffer = [0u8; 3]; self.0.write( DEFAULT_ADDRESS, &(Command::MeasurementInterval as u16).to_be_bytes(), )?; self.0.read(DEFAULT_ADDRESS, &mut rd_buffer)?; Ok(u16::from_be_bytes([rd_buffer[0], rd_buffer[1]])) } pub fn data_ready(&mut self) -> Result<bool, E> { let mut rd_buffer = [0u8; 3]; self.0.write( DEFAULT_ADDRESS, &(Command::GetDataReadyStatus as u16).to_be_bytes(), )?; self.0.read(DEFAULT_ADDRESS, &mut rd_buffer)?; Ok(u16::from_be_bytes([rd_buffer[0], rd_buffer[1]]) == 1) } pub fn read_measurement(&mut self) -> Result<SensorData, E> { let mut rd_buffer = [0u8; 18]; self.0.write( DEFAULT_ADDRESS, &(Command::ReadMeasurement as u16).to_be_bytes(), )?; self.0.read(DEFAULT_ADDRESS, &mut rd_buffer)?; let sensor_data
_bytes([ rd_buffer[6], rd_buffer[7], rd_buffer[9], rd_buffer[10], ]), humidity: f32::from_be_bytes([ rd_buffer[12], rd_buffer[13], rd_buffer[15], rd_buffer[16], ]), }; Ok(sensor_data) } pub fn activate_auto_self_calibration(&mut self) -> Result<bool, E> { let argument_bytes: [u8; 2] = [0x00, 0x01]; let mut crc = self.get_crc(); crc.update(&argument_bytes); let command = (Command::ASC as u16).to_be_bytes(); let command: [u8; 5] = [ command[0], command[1], argument_bytes[0], argument_bytes[1], crc.finish(), ]; self.0.write(DEFAULT_ADDRESS, &command)?; self.0 .write(DEFAULT_ADDRESS, &(Command::ASC as u16).to_be_bytes())?; let mut rd_buffer = [0u8; 3]; self.0.read(DEFAULT_ADDRESS, &mut rd_buffer)?; Ok(u16::from_be_bytes([rd_buffer[0], rd_buffer[1]]) == 1) } }
= SensorData { co2: f32::from_be_bytes([rd_buffer[0], rd_buffer[1], rd_buffer[3], rd_buffer[4]]), temperature: f32::from_be
function_block-random_span
[ { "content": "pub fn draw_titles(mut display: Display4in2) -> Display4in2 {\n\n draw_large_text(&mut display, \"Air Quality\", (20, 30));\n\n\n\n draw_mid_text(&mut display, \"Carbon Dioxide:\", (20, 90));\n\n draw_mid_text(&mut display, \"Temperature:\", (20, 130));\n\n draw_mid_text(&mut display, ...
Rust
src/block_renderer.rs
AnthonyTornetta/bevy_testing
9f8a8e6bda66b2ac6b8caea72dded2e4740bdab3
use crate::blocks::block; use bevy::prelude::*; use bevy::render::mesh::Indices; use bevy::render::render_resource::PrimitiveTopology; use crate::base_renderable; use crate::base_renderable::CanCreateSubMesh; use crate::blocks::block::Side; pub const U_WIDTH: f32 = 0.5; pub const V_HEIGHT: f32 = 0.5; const DEFAULT_FRONT_INFO: [([f32; 3], [f32; 3], [f32; 2]); 4] = [ ([-0.5, -0.5, 0.5], [0., 0., 1.0], [0., 1.0]), ([0.5, -0.5, 0.5], [0., 0., 1.0], [1.0, 1.0]), ([0.5, 0.5, 0.5], [0., 0., 1.0], [1.0, 0.0]), ([-0.5, 0.5, 0.5], [0., 0., 1.0], [0., 0.0]), ]; const DEFAULT_FRONT_INDICES: [u32; 6] = [0, 1, 2, 2, 3, 0]; const DEFAULT_BACK_INFO: [([f32; 3], [f32; 3], [f32; 2]); 4] = [ ([-0.5, 0.5, -0.5], [0., 0., -1.0], [0., 1.0]), ([0.5, 0.5, -0.5], [0., 0., -1.0], [1.0, 1.0]), ([0.5, -0.5, -0.5], [0., 0., -1.0], [1.0, 0.0]), ([-0.5, -0.5, -0.5], [0., 0., -1.0], [0., 0.0]), ]; const DEFAULT_BACK_INDICES: [u32; 6] = [4, 5, 6, 6, 7, 4]; const DEFAULT_RIGHT_INFO: [([f32; 3], [f32; 3], [f32; 2]); 4] = [ ([0.5, -0.5, -0.5], [1.0, 0., 0.], [1.0, 1.0]), ([0.5, 0.5, -0.5], [1.0, 0., 0.], [1.0, 0.0]), ([0.5, 0.5, 0.5], [1.0, 0., 0.], [0.0, 0.0]), ([0.5, -0.5, 0.5], [1.0, 0., 0.], [0.0, 1.0]), ]; const DEFAULT_RIGHT_INDICES: [u32; 6] = [8, 9, 10, 10, 11, 8]; const DEFAULT_LEFT_INFO: [([f32; 3], [f32; 3], [f32; 2]); 4] = [ ([-0.5, -0.5, 0.5], [-1.0, 0., 0.], [1.0, 1.0]), ([-0.5, 0.5, 0.5], [-1.0, 0., 0.], [1.0, 0.0]), ([-0.5, 0.5, -0.5], [-1.0, 0., 0.], [0.0, 0.0]), ([-0.5, -0.5, -0.5], [-1.0, 0., 0.], [0.0, 1.0]), ]; const DEFAULT_LEFT_INDICES: [u32; 6] = [12, 13, 14, 14, 15, 12]; const DEFAULT_TOP_INFO: [([f32; 3], [f32; 3], [f32; 2]); 4] = [ ([0.5, 0.5, -0.5], [0., 1.0, 0.], [1.0, 0.]), ([-0.5, 0.5, -0.5], [0., 1.0, 0.], [0., 0.]), ([-0.5, 0.5, 0.5], [0., 1.0, 0.], [0., 1.0]), ([0.5, 0.5, 0.5], [0., 1.0, 0.], [1.0, 1.0]), ]; const DEFAULT_TOP_INDICES: [u32; 6] = [16, 17, 18, 18, 19, 16]; const DEFAULT_BOTTOM_INFO: [([f32; 3], [f32; 3], [f32; 2]); 4] = [ ([0.5, -0.5, 0.5], [0., -1.0, 0.], [1.0, 0.]), ([-0.5, -0.5, 0.5], [0., -1.0, 0.], [0., 0.]), ([-0.5, -0.5, -0.5], [0., -1.0, 0.], [0., 1.0]), ([0.5, -0.5, -0.5], [0., -1.0, 0.], [1.0, 1.0]), ]; const DEFAULT_BOTTOM_INDICES: [u32; 6] = [20, 21, 22, 22, 23, 20]; #[inline] fn apply_offset<T: HasUVs>( default_info: [([f32; 3], [f32; 3], [f32; 2]); 4], offset: &Vec3, has_uvs: &T, side: Side, ) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { let mut res = Vec::with_capacity(default_info.len()); for mut item in default_info { item.0[0] += offset.x; item.0[1] += offset.y; item.0[2] += offset.z; item.2[0] = has_uvs.u_width(side) * item.2[0] + has_uvs.u_min(side); item.2[1] = has_uvs.v_height(side) * item.2[1] + has_uvs.v_min(side); res.push(item); } res } pub trait HasUVs { fn u_min(&self, side: block::Side) -> f32; fn u_width(&self, _side: block::Side) -> f32 { U_WIDTH } fn v_min(&self, side: block::Side) -> f32; fn v_height(&self, _side: block::Side) -> f32 { V_HEIGHT } } pub struct DefaultBlockMeshCreator<T: HasUVs> { uv_chooser: T, } impl<T: HasUVs> DefaultBlockMeshCreator<T> { pub fn new(uv_chooser: T) -> Self { DefaultBlockMeshCreator { uv_chooser } } } impl<T: HasUVs> base_renderable::CanCreateSubMesh for DefaultBlockMeshCreator<T> { fn right_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { apply_offset(DEFAULT_RIGHT_INFO, offset, &self.uv_chooser, Side::RIGHT) } fn right_indices(&self) -> Vec<u32> { Vec::from(DEFAULT_FRONT_INDICES) } fn left_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { apply_offset(DEFAULT_LEFT_INFO, offset, &self.uv_chooser, Side::LEFT) } fn left_indices(&self) -> Vec<u32> { Vec::from(DEFAULT_FRONT_INDICES) } fn top_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { apply_offset(DEFAULT_TOP_INFO, offset, &self.uv_chooser, Side::TOP) } fn top_indices(&self) -> Vec<u32> { Vec::from(DEFAULT_FRONT_INDICES) } fn bottom_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { apply_offset(DEFAULT_BOTTOM_INFO, offset, &self.uv_chooser, Side::BOTTOM) } fn bottom_indices(&self) -> Vec<u32> { Vec::from(DEFAULT_FRONT_INDICES) } fn front_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { apply_offset(DEFAULT_FRONT_INFO, offset, &self.uv_chooser, Side::FRONT) } fn front_indices(&self) -> Vec<u32> { Vec::from(DEFAULT_FRONT_INDICES) } fn back_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { apply_offset(DEFAULT_BACK_INFO, offset, &self.uv_chooser, Side::BACK) } fn back_indices(&self) -> Vec<u32> { Vec::from(DEFAULT_FRONT_INDICES) } } pub struct EmptyMeshCreator; impl EmptyMeshCreator { pub fn new() -> Self { EmptyMeshCreator {} } } impl base_renderable::CanCreateSubMesh for EmptyMeshCreator { fn right_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { Vec::new() } fn right_indices(&self) -> Vec<u32> { Vec::new() } fn left_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { Vec::new() } fn left_indices(&self) -> Vec<u32> { Vec::new() } fn top_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { Vec::new() } fn top_indices(&self) -> Vec<u32> { Vec::new() } fn bottom_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { Vec::new() } fn bottom_indices(&self) -> Vec<u32> { Vec::new() } fn front_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { Vec::new() } fn front_indices(&self) -> Vec<u32> { Vec::new() } fn back_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { Vec::new() } fn back_indices(&self) -> Vec<u32> { Vec::new() } }
use crate::blocks::block; use bevy::prelude::*; use bevy::render::mesh::Indices; use bevy::render::render_resource::PrimitiveTopology; use crate::base_renderable; use crate::base_renderable::CanCreateSubMesh; use crate::blocks::block::Side; pub const U_WIDTH: f32 = 0.5; pub const V_HEIGHT: f32 = 0.5; const DEFAULT_FRONT_INFO: [([f32; 3], [f32; 3], [f32; 2]); 4] = [ ([-0.5, -0.5, 0.5], [0., 0., 1.0], [0., 1.0]), ([0.5, -0.5, 0.5], [0., 0., 1.0], [1.0, 1.0]), ([0.5, 0.5, 0.5], [0., 0., 1.0], [1.0, 0.0]), ([-0.5, 0.5, 0.5], [0., 0., 1.0], [0., 0.0]), ]; const DEFAULT_FRONT_INDICES: [u32; 6] = [0, 1, 2, 2, 3, 0]; const DEFAULT_BACK_INFO: [([f32; 3], [f32; 3], [f32; 2]); 4] = [ ([-0.5, 0.5, -0.5], [0., 0., -1.0], [0., 1.0]), ([0.5, 0.5, -0.5], [0., 0., -1.0], [1.0, 1.0]), ([0.5, -0.5, -0.5], [0., 0., -1.0], [1.0, 0.0]), ([-0.5, -0.5, -0.5], [0., 0., -1.0], [0., 0.0]), ]; const DEFAULT_BACK_INDICES: [u32; 6] = [4, 5, 6, 6, 7, 4]; const DEFAULT_RIGHT_INFO: [([f32; 3], [f32; 3], [f32; 2]); 4] = [ ([0.5, -0.5, -0.5], [1.0, 0., 0.], [1.0, 1.0]), ([0.5, 0.5, -0.5], [1.0, 0., 0.], [1.0, 0.0]), ([0.5, 0.5, 0.5], [1.0, 0., 0.], [0.0, 0.0]), ([0.5, -0.5, 0.5], [1.0, 0., 0.], [0.0, 1.0]), ]; const DEFAULT_RIGHT_INDICES: [u32; 6] = [8, 9, 10, 10, 11, 8]; const DEFAULT_LEFT_INFO: [([f32; 3], [f32; 3], [f32; 2]); 4] = [ ([-0.5, -0.5, 0.5], [-1.0, 0., 0.], [1.0, 1.0]), ([-0.5, 0.5, 0.5], [-1.0, 0., 0.], [1.0, 0.0]), ([-0.5, 0.5, -0.5], [-1.0, 0., 0.], [0.0, 0.0]), ([-0.5, -0.5, -0.5], [-1.0, 0., 0.], [0.0, 1.0]), ]; const DEFAULT_LEFT_INDICES: [u32; 6] = [12, 13, 14, 14, 15, 12]; const DEFAULT_TOP_INFO: [([f32; 3], [f32; 3], [f32; 2]); 4] = [ ([0.5, 0.5, -0.5], [0., 1.0, 0.], [1.0, 0.]), ([-0.5, 0.5, -0.5], [0., 1.0, 0.], [0., 0.]), ([-0.5, 0.5, 0.5], [0., 1.0, 0.], [0., 1.0]), ([0.5, 0.5, 0.5], [0., 1.0, 0.], [1.0, 1.0]), ]; const DEFAULT_TOP_INDICES: [u32; 6] = [16, 17, 18, 18, 19, 16]; const DEFAULT_BOTTOM_INFO: [([f32; 3], [f32; 3], [f32; 2]); 4] = [ ([0.5, -0.5, 0.5], [0., -1.0, 0.], [1.0, 0.]), ([-0.5, -0.5, 0.5], [0., -1.0, 0.], [0., 0.]), ([-0.5, -0.5, -0.5], [0., -1.0, 0.], [0., 1.0]), ([0.5, -0.5, -0.5], [0., -1.0, 0.], [1.0, 1.0]), ]; const DEFAULT_BOTTOM_INDICES: [u32; 6] = [20, 21, 22, 22, 23, 20]; #[inline] fn apply_offset<T: HasUVs>( default_i
n(side); item.2[1] = has_uvs.v_height(side) * item.2[1] + has_uvs.v_min(side); res.push(item); } res } pub trait HasUVs { fn u_min(&self, side: block::Side) -> f32; fn u_width(&self, _side: block::Side) -> f32 { U_WIDTH } fn v_min(&self, side: block::Side) -> f32; fn v_height(&self, _side: block::Side) -> f32 { V_HEIGHT } } pub struct DefaultBlockMeshCreator<T: HasUVs> { uv_chooser: T, } impl<T: HasUVs> DefaultBlockMeshCreator<T> { pub fn new(uv_chooser: T) -> Self { DefaultBlockMeshCreator { uv_chooser } } } impl<T: HasUVs> base_renderable::CanCreateSubMesh for DefaultBlockMeshCreator<T> { fn right_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { apply_offset(DEFAULT_RIGHT_INFO, offset, &self.uv_chooser, Side::RIGHT) } fn right_indices(&self) -> Vec<u32> { Vec::from(DEFAULT_FRONT_INDICES) } fn left_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { apply_offset(DEFAULT_LEFT_INFO, offset, &self.uv_chooser, Side::LEFT) } fn left_indices(&self) -> Vec<u32> { Vec::from(DEFAULT_FRONT_INDICES) } fn top_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { apply_offset(DEFAULT_TOP_INFO, offset, &self.uv_chooser, Side::TOP) } fn top_indices(&self) -> Vec<u32> { Vec::from(DEFAULT_FRONT_INDICES) } fn bottom_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { apply_offset(DEFAULT_BOTTOM_INFO, offset, &self.uv_chooser, Side::BOTTOM) } fn bottom_indices(&self) -> Vec<u32> { Vec::from(DEFAULT_FRONT_INDICES) } fn front_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { apply_offset(DEFAULT_FRONT_INFO, offset, &self.uv_chooser, Side::FRONT) } fn front_indices(&self) -> Vec<u32> { Vec::from(DEFAULT_FRONT_INDICES) } fn back_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { apply_offset(DEFAULT_BACK_INFO, offset, &self.uv_chooser, Side::BACK) } fn back_indices(&self) -> Vec<u32> { Vec::from(DEFAULT_FRONT_INDICES) } } pub struct EmptyMeshCreator; impl EmptyMeshCreator { pub fn new() -> Self { EmptyMeshCreator {} } } impl base_renderable::CanCreateSubMesh for EmptyMeshCreator { fn right_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { Vec::new() } fn right_indices(&self) -> Vec<u32> { Vec::new() } fn left_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { Vec::new() } fn left_indices(&self) -> Vec<u32> { Vec::new() } fn top_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { Vec::new() } fn top_indices(&self) -> Vec<u32> { Vec::new() } fn bottom_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { Vec::new() } fn bottom_indices(&self) -> Vec<u32> { Vec::new() } fn front_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { Vec::new() } fn front_indices(&self) -> Vec<u32> { Vec::new() } fn back_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { Vec::new() } fn back_indices(&self) -> Vec<u32> { Vec::new() } }
nfo: [([f32; 3], [f32; 3], [f32; 2]); 4], offset: &Vec3, has_uvs: &T, side: Side, ) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { let mut res = Vec::with_capacity(default_info.len()); for mut item in default_info { item.0[0] += offset.x; item.0[1] += offset.y; item.0[2] += offset.z; item.2[0] = has_uvs.u_width(side) * item.2[0] + has_uvs.u_mi
function_block-random_span
[ { "content": "pub fn camera_movement_system(\n\n mut query: Query<(&Camera, &mut Transform)>,\n\n mut ev_motion: EventReader<MouseMotion>,\n\n keys: Res<Input<KeyCode>>,\n\n)\n\n{\n\n let (_cam, mut transform) = query.single_mut();\n\n\n\n let speed = (keys.pressed(KeyCode::LShift) as i32 * 5 + 1...