lang
stringclasses
3 values
file_path
stringlengths
5
150
repo_name
stringlengths
6
110
commit
stringlengths
40
40
file_code
stringlengths
1.52k
18.9k
prefix
stringlengths
82
16.5k
suffix
stringlengths
0
15.1k
middle
stringlengths
121
8.18k
strategy
stringclasses
8 values
context_items
listlengths
0
100
Rust
src/web/websocket.rs
alecdwm/webtron
ba90a9e5d7d388dbe93228eceb1cfc656016288c
use futures::sink::{Sink, SinkExt}; use futures::stream::{Stream, StreamExt}; use log::{debug, error, trace, warn}; use std::net::SocketAddr; use std::sync::Arc; use std::time::Duration; use tokio::select; use tokio::sync::mpsc; use tokio::sync::mpsc::{Receiver, Sender}; use tokio::sync::Mutex; use tokio::time; use warp::reply::Reply; use warp::ws::{Message, WebSocket, Ws}; use crate::server::{ClientId, MessageIn, MessageOut}; const PING_RATE_SECONDS: u64 = 15; pub fn websocket( ws: Ws, ip_address: Option<SocketAddr>, server_tx: Sender<MessageIn>, ) -> impl Reply { let ip_address = ip_address.map(|ip_address| format!("{}", ip_address)); ws.on_upgrade(|websocket| handle_websocket(websocket, ip_address, server_tx)) } async fn handle_websocket( websocket: WebSocket, ip_address: Option<String>, mut server_tx: Sender<MessageIn>, ) { let id = ClientId::default(); let (messages_tx, messages_rx) = mpsc::channel::<MessageOut>(100); let (ws_tx, ws_rx) = websocket.split(); let ws_tx = Arc::new(Mutex::new(ws_tx)); if let Err(error) = server_tx .send(MessageIn::connect(id, ip_address, messages_tx)) .await { error!("Failed to send new client to server: {}", error); return; } let in_task = tokio::spawn(handle_in(id, ws_rx, server_tx.clone())); let out_task = tokio::spawn(handle_out(messages_rx, ws_tx.clone())); let ping_task = tokio::spawn(handle_ping(ws_tx)); if let Err(error) = select! { out = in_task => out, out = out_task => out, out = ping_task => out, } { error!("Failure occurred while handling websocket: {}", error); } if let Err(error) = server_tx.send(MessageIn::disconnect(id)).await { error!("Failed to send client disconnect to server: {}", error); } } async fn handle_in( id: ClientId, mut rx: impl Stream<Item = Result<Message, warp::Error>> + Unpin, mut tx: Sender<MessageIn>, ) { debug!("Websocket handler (in) created"); while let Some(message) = rx.next().await { let message = match message { Ok(message) => message, Err(error) => { error!("Error occurred in incoming message: {}", error); break; } }; if message.is_close() { trace!("Close received: {:?}", message); break; } let text = match message.to_str() { Ok(text) => text, Err(()) => { trace!("Non-text message received: {:?}", message); continue; } }; trace!("Text message received: {}", text); let message = match MessageIn::from_json(id, text) { Ok(message) => message, Err(error) => { warn!("Failed to parse incoming message ({}): {}", text, error); continue; } }; tx.send(message) .await .unwrap_or_else(|error| error!("Failed to send incoming message to server: {}", error)) } debug!("Websocket handler (in) closed"); } async fn handle_out(mut rx: Receiver<MessageOut>, tx: Arc<Mutex<impl Sink<Message> + Unpin>>) { debug!("Websocket handler (out) created"); while let Some(message) = rx.recv().await { let text = match message.to_json() { Ok(text) => text, Err(error) => { error!( "Failed to serialize outgoing message: ({:?}): {}", message, error ); continue; } }; if tx.lock().await.send(Message::text(text)).await.is_err() { error!("Failed to send outgoing message") } } debug!("Websocket handler (out) closed"); } async fn handle_ping(tx: Arc<Mutex<impl Sink<Message> + Unpin>>) { debug!("Websocket handler (ping) created"); let mut interval = time::interval(Duration::from_secs(PING_RATE_SECONDS)); loop { interval.tick().await; if tx .lock() .await .send(Message::ping(Vec::new())) .await .is_err() { error!("Failed to send outgoing ping"); break; } } debug!("Websocket handler (ping) closed"); }
use futures::sink::{Sink, SinkExt}; use futures::stream::{Stream, StreamExt}; use log::{debug, error, trace, warn}; use std::net::SocketAddr; use std::sync::Arc; use std::time::Duration; use tokio::select; use tokio::sync::mpsc; use tokio::sync::mpsc::{Receiver, Sender}; use tokio::sync::Mutex; use tokio::time; use warp::reply::Reply; use warp::ws::{Message, WebSocket, Ws}; use crate::server::{ClientId, MessageIn, MessageOut}; const PING_RATE_SECONDS: u64 = 15; pub fn websocket( ws: Ws, ip_address: Option<SocketAddr>, server_tx: Sender<MessageIn>, ) -> impl Reply { let ip_address = ip_address.map(|ip_address| format!("{}", ip_address)); ws.on_upgrade(|websocket| handle_websocket(websocket, ip_address, server_tx)) } async fn handle_websocket( websocket: WebSocket, ip_address: Option<String>, mut server_tx: Sender<MessageIn>, ) { let id = ClientId::default(); let (messages_tx, messages_rx) = mpsc::channel::<MessageOut>(100); let (ws_tx, ws_rx) = websocket.split(); let ws_tx = Arc::new(Mutex::new(ws_tx)); if let Err(error) = server_tx .send(MessageIn::connect(id, ip_address, messages_tx)) .await { error!("Failed to send new client to server: {}", error); return; } let in_task = tokio::spawn(handle_in(id, ws_rx, server_tx.clone())); let out_task = tokio::spawn(handle_out(messages_rx, ws_tx.clone())); let ping_task = tokio::spawn(handle_ping(ws_tx)); if let Err(error) = select! { out = in_task => out, out = out_task => out, out = ping_task => out, } { error!("Failure occurred while handling websocket: {}", error); } if let Err(error) = server_tx.send(MessageIn::disconnect(id)).await { error!("Failed to send client disconnect to server: {}", error); } } async fn handle_in( id: ClientId, mut rx: impl Stream<Item = Result<Message, warp::Error>> + Unpin, mut tx: Sender<MessageIn>, ) { debug!("Websocket handler (in) created"); while let Some(message) = rx.next().await { let message = match message { Ok(message) => message, Err(error) => { error!("Error occurred in incoming message: {}", error); break; } }; if message.is_close() { trace!("Close received: {:?}", message); break; } let text = match message.to_str() { Ok(text) => text, Err(()) => { trace!("Non-text message received: {:?}", message); continue; } }; trace!("Text message received: {}", text); let message = match Messag
async fn handle_out(mut rx: Receiver<MessageOut>, tx: Arc<Mutex<impl Sink<Message> + Unpin>>) { debug!("Websocket handler (out) created"); while let Some(message) = rx.recv().await { let text = match message.to_json() { Ok(text) => text, Err(error) => { error!( "Failed to serialize outgoing message: ({:?}): {}", message, error ); continue; } }; if tx.lock().await.send(Message::text(text)).await.is_err() { error!("Failed to send outgoing message") } } debug!("Websocket handler (out) closed"); } async fn handle_ping(tx: Arc<Mutex<impl Sink<Message> + Unpin>>) { debug!("Websocket handler (ping) created"); let mut interval = time::interval(Duration::from_secs(PING_RATE_SECONDS)); loop { interval.tick().await; if tx .lock() .await .send(Message::ping(Vec::new())) .await .is_err() { error!("Failed to send outgoing ping"); break; } } debug!("Websocket handler (ping) closed"); }
eIn::from_json(id, text) { Ok(message) => message, Err(error) => { warn!("Failed to parse incoming message ({}): {}", text, error); continue; } }; tx.send(message) .await .unwrap_or_else(|error| error!("Failed to send incoming message to server: {}", error)) } debug!("Websocket handler (in) closed"); }
function_block-function_prefixed
[ { "content": "pub fn embed() -> impl Filter<Extract = (impl Reply,), Error = Rejection> + Clone {\n\n let index = warp::path::end().and_then(serve_index);\n\n let path = warp::path::tail().and_then(serve_path);\n\n\n\n index.or(path)\n\n}\n\n\n\nasync fn serve_index() -> Result<impl Reply, Rejection> {...
Rust
src/stats/mod.rs
hhandika/yap
95f0b06770b958afef12105c8088274684bbdae8
mod fasta; mod fastq; mod math; mod output; mod qscores; mod sequence; use std::path::PathBuf; use std::sync::mpsc::channel; use rayon::prelude::*; use walkdir::WalkDir; use crate::stats::sequence::{FastaStats, FastqStats}; pub fn process_wildcard(entries: &[&str], iscsv: bool, fastq: bool) { let files: Vec<PathBuf> = entries.iter().map(PathBuf::from).collect(); par_process_files(&files, iscsv, fastq) } pub fn process_walkdir(path: &str, iscsv: bool, fastq: bool) { let entries = tranverse_dir(path, fastq); par_process_files(&entries, iscsv, fastq) } fn par_process_files(entries: &[PathBuf], iscsv: bool, fastq: bool) { if fastq { par_process_fastq(&entries, iscsv); } else { par_process_fasta(&entries, iscsv); } } fn tranverse_dir(path: &str, fastq: bool) -> Vec<PathBuf> { let mut entries = Vec::new(); WalkDir::new(path) .into_iter() .filter_map(|ok| ok.ok()) .filter(|e| e.file_type().is_file()) .for_each(|e| { if fastq { let files = String::from(e.path().to_string_lossy()); match_fastq(&files, &mut entries); } else { let files = String::from(e.path().to_string_lossy()); match_fasta(&files, &mut entries); } }); entries } fn match_fastq(files: &str, entries: &mut Vec<PathBuf>) { match files { s if s.ends_with(".fastq.gz") => entries.push(PathBuf::from(files)), s if s.ends_with(".fq.gz") => entries.push(PathBuf::from(files)), s if s.ends_with("fastq.gzip") => entries.push(PathBuf::from(files)), s if s.ends_with("fq.gzip") => entries.push(PathBuf::from(files)), s if s.ends_with("fastq") => entries.push(PathBuf::from(files)), s if s.ends_with("fq") => entries.push(PathBuf::from(files)), _ => (), }; } fn match_fasta(files: &str, entries: &mut Vec<PathBuf>) { match files { s if s.ends_with(".fasta.gz") => entries.push(PathBuf::from(files)), s if s.ends_with(".fas.gz") => entries.push(PathBuf::from(files)), s if s.ends_with(".fa.gz") => entries.push(PathBuf::from(files)), s if s.ends_with(".fasta.gzip") => entries.push(PathBuf::from(files)), s if s.ends_with(".fa.gzip") => entries.push(PathBuf::from(files)), s if s.ends_with(".fasta") => entries.push(PathBuf::from(files)), s if s.ends_with(".fas") => entries.push(PathBuf::from(files)), s if s.ends_with(".fa") => entries.push(PathBuf::from(files)), _ => (), }; } fn par_process_fastq(files: &[PathBuf], iscsv: bool) { let (sender, receiver) = channel(); files.into_par_iter().for_each_with(sender, |s, recs| { s.send(fastq::process_fastq(&recs)).unwrap(); }); let mut all_reads: Vec<FastqStats> = receiver.iter().collect(); output::write_fastq(&mut all_reads, iscsv); } fn par_process_fasta(files: &[PathBuf], iscsv: bool) { let (sender, receiver) = channel(); files.into_par_iter().for_each_with(sender, |s, recs| { s.send(fasta::process_fasta(&recs)).unwrap(); }); let mut all_reads: Vec<FastaStats> = receiver.iter().collect(); output::write_fasta(&mut all_reads, iscsv); } #[cfg(test)] mod tests { use super::*; #[test] fn tranverse_dir_test() { let input = "test_files/stats"; let files = tranverse_dir(&input, true); assert_eq!(4, files.len()) } #[test] fn match_fasta_test() { let input = vec!["test.fasta", "test.fas", "test.fa", "test.fa.gz"]; let mut entries = Vec::new(); input.iter().for_each(|e| { match_fasta(&e, &mut entries); }); assert_eq!(4, entries.len()); } #[test] fn match_fastq_test() { let input = vec!["test.fq", "test.fastq", "test.fq.gz", "test.fa.gz"]; let mut entries = Vec::new(); input.iter().for_each(|e| { match_fastq(&e, &mut entries); }); assert_eq!(3, entries.len()); } }
mod fasta; mod fastq; mod math; mod output; mod qscores; mod sequence; use std::path::PathBuf; use std::sync::mpsc::channel; use rayon::prelude::*; use walkdir::WalkDir; use crate::stats::sequence::{FastaStats, FastqStats}; pub fn process_wildcard(entries: &[&str], iscsv: bool, fastq: bool) { let files: Vec<PathBuf> = entries.iter().map(PathBuf::from).collect(); par_process_files(&files, iscsv, fastq) } pub fn process_walkdir(path: &str, iscsv: bool, fastq: bool) { let entries = tranverse_dir(path, fastq); par_process_files(&entries, iscsv, fastq) } fn par_process_files(entries: &[PathBuf], iscsv: bool, fastq: bool) { if fastq { par_process_fastq(&entries, iscsv); } else { par_process_fasta(&entries, iscsv); } } fn tranverse_dir(path: &str, fastq: bool) -> Vec<PathBuf> { let mut entries = Vec::new(); WalkDir::new(path) .into_iter() .filter_map(|ok| ok.ok()) .filter(|e| e.file_type().is_file()) .for_each(|e| { if fastq { let files = String::from(e.path().to_string_lossy()); match_fastq(&files, &mut entries); } else { let files = String::from(e.path().to_string_lossy()); match_fasta(&files, &mut entries); } }); entries } fn match_fastq(files: &str, entries: &mut Vec<PathBuf>) { match files { s if s.ends_with(".fastq.gz") => entries.push(PathBuf::from(files)), s if s.ends_with(".fq.gz") => entries.push(PathBuf::from(files)), s if s.ends_with("fastq.gzip") => entries.push(PathBuf::from(files)), s if s.ends_with("fq.gzip") => entries.push(PathBuf::from(files)), s if s.ends_with("fastq") => entries.push(PathBuf::from(files)), s if s.ends_with("fq") => entries.push(PathBuf::from(files)), _ => (), }; } fn match_fasta(files: &str, entries: &mut Vec<PathBuf>) { match files { s if s.ends_with(".fasta.gz") => entries.push(PathBuf::from(files)), s if s.ends_with(".fas.gz") => entries.push(PathBuf::from(files)), s if s.ends_with(".fa.gz") => entries.push(PathBuf::from(files)), s if s.ends_with(".fasta.gzip") => entries.push(PathBuf::from(files)), s if s.ends_with(".fa.gzip") => entries.push(PathBuf::from(files)), s if s.ends_with(".fasta") => entries.push(PathBuf::from(files)), s if s.ends_with(".fas") => entries.push(PathBuf::from(files)), s if s.ends_with(".fa") => entries.push(PathBuf::from(files)), _ => (), }; } fn par_process_fastq(files: &[PathBuf], iscsv: bool) { let (sender, receiver) = channel(); files.into_par_iter().for_each_with(sender, |s, recs| { s.send(fastq::process_fastq(&recs)).unwrap(); }); let mut all_reads: Vec<FastqStats> = receiver.iter().collect(); output::write_fastq(&mut all_reads, iscsv); } fn par_process_fasta(files: &[PathBuf], iscsv: bool) { let (sender, receiver) = channel();
#[cfg(test)] mod tests { use super::*; #[test] fn tranverse_dir_test() { let input = "test_files/stats"; let files = tranverse_dir(&input, true); assert_eq!(4, files.len()) } #[test] fn match_fasta_test() { let input = vec!["test.fasta", "test.fas", "test.fa", "test.fa.gz"]; let mut entries = Vec::new(); input.iter().for_each(|e| { match_fasta(&e, &mut entries); }); assert_eq!(4, entries.len()); } #[test] fn match_fastq_test() { let input = vec!["test.fq", "test.fastq", "test.fq.gz", "test.fa.gz"]; let mut entries = Vec::new(); input.iter().for_each(|e| { match_fastq(&e, &mut entries); }); assert_eq!(3, entries.len()); } }
files.into_par_iter().for_each_with(sender, |s, recs| { s.send(fasta::process_fasta(&recs)).unwrap(); }); let mut all_reads: Vec<FastaStats> = receiver.iter().collect(); output::write_fasta(&mut all_reads, iscsv); }
function_block-function_prefix_line
[ { "content": "pub fn write_fasta(stats: &mut [FastaStats], iscsv: bool) {\n\n stats.sort_by(|a, b| a.seqname.cmp(&b.seqname));\n\n\n\n println!(\"\\n\\x1b[1mResults:\\x1b[0m\");\n\n stats.iter().for_each(|recs| {\n\n write_fasta_console(&recs);\n\n });\n\n println!(\"Total files: {}\", sta...
Rust
examples/echo_server.rs
over-codes/oc-http
79051db0857225d437e499ef52782591a3ceca28
use std::{ error::Error }; use std::time::Duration; use log::{warn}; use env_logger::Env; use async_std::{ task, io::{ BufReader, BufWriter, }, net::{ TcpListener, }, }; use futures::{ prelude::*, AsyncRead, AsyncWrite, }; use oc_http::{ cookies::{Cookies, Cookie}, }; #[async_std::main] async fn main() -> Result<(), Box<dyn Error>> { env_logger::Builder::from_env(Env::default().default_filter_or("info")).init(); let listener = TcpListener::bind("127.0.0.1:8080").await?; let _local_addr = listener.local_addr()?; let mut incoming = listener.incoming(); while let Some(stream) = incoming.next().await { if let Ok(stream) = stream { task::spawn(handle_request(stream)); } } Ok(()) } async fn handle_request<S>(socket: S) where S: AsyncRead + AsyncWrite + Clone + Unpin { let mut reader = BufReader::new(socket.clone()); let mut writer = BufWriter::new(socket); let mut buf = vec![0; 65536]; let request = match oc_http::http(&mut reader, &mut buf).await { Ok(req) => req, Err(err) => { warn!("Error {}", err); return; }, }; let mut cookies = Cookies::new(&request); if request.path == "/echo" && request.method == "GET" { get_echo(&mut writer).await; } else if request.path == "/echo" && request.method == "POST" { post_echo(&mut reader, &mut writer).await; if let Some(_c) = cookies.get("Who") { writer.write(format!("You are a fool of a took!").as_bytes()).await.unwrap(); } } else { let mut res = oc_http::Response{ code: 404, reason: "NOT FOUND", headers: vec!(), }; cookies.add_cookie(Cookie::new("Who", "You fool!")); cookies.write_cookies(&mut res); oc_http::respond(&mut writer, res).await.unwrap(); } writer.flush().await.unwrap(); } async fn get_echo<S>(mut stream: &mut S) where S: AsyncWrite + Unpin { oc_http::respond(&mut stream, oc_http::Response{ code: 200, reason: "OK", headers: vec!(), }).await.unwrap(); stream.write(b" <html> <body> <form method=\"POST\"> <input name=\"input\"></inpout> <input type=\"submit\"></input> </form> </body> </html> ").await.unwrap(); } async fn post_echo<W, R>(reader: &mut R, mut writer: &mut W) where W: AsyncWrite + Unpin, R: AsyncRead + Unpin, { oc_http::respond(&mut writer, oc_http::Response{ code: 200, reason: "OK", headers: vec!(), }).await.unwrap(); let mut buf = vec![0; 10]; while let Ok(Ok(count)) = async_std::future::timeout(Duration::from_millis(10), reader.read(&mut buf)).await { if count == 0 { break; } writer.write_all(&buf[..count]).await.unwrap(); writer.flush().await.unwrap(); } }
use std::{ error::Error }; use std::time::Duration; use log::{warn}; use env_logger::Env; use async_std::{ task, io::{ BufReader, BufWriter, }, net::{ TcpListener, }, }; use futures::{ prelude::*, AsyncRead, AsyncWrite, }; use oc_http::{ cookies::{Cookies, Cookie}, }; #[async_std::main] async fn main() -> Result<(), Box<dyn Error>> { env_logger::Builder::from_env(Env::default().default_filter_or("info")).init(); let listener = TcpListener::bind("127.0.0.1:8080").await?; let _local_addr = listener.local_addr()?; let mut incoming = listener.incoming(); while let Some(stream) = incoming.next().await { if let Ok(stream) = stream { task::spawn(handle_request(stream)); } } Ok(()) } async fn handle_request<S>(socket: S) where S: AsyncRead + AsyncWrite + Clone + Unpin { let mut reader = BufReader::new(socket.clone()); let mut writer = BufWriter::new(socket); let mut buf = vec![0; 65536]; let request = match oc_http::http(&mut reader, &mut buf).await { Ok(req) => req, Err(err) => { warn!("Error {}", err); return; }, }; let mut cookies = Cookies::new(&request); if request.path == "/echo" && request.method == "GET" { get_echo(&mut writer).await; } else if request.path == "/echo" && request.method == "POS
async fn get_echo<S>(mut stream: &mut S) where S: AsyncWrite + Unpin { oc_http::respond(&mut stream, oc_http::Response{ code: 200, reason: "OK", headers: vec!(), }).await.unwrap(); stream.write(b" <html> <body> <form method=\"POST\"> <input name=\"input\"></inpout> <input type=\"submit\"></input> </form> </body> </html> ").await.unwrap(); } async fn post_echo<W, R>(reader: &mut R, mut writer: &mut W) where W: AsyncWrite + Unpin, R: AsyncRead + Unpin, { oc_http::respond(&mut writer, oc_http::Response{ code: 200, reason: "OK", headers: vec!(), }).await.unwrap(); let mut buf = vec![0; 10]; while let Ok(Ok(count)) = async_std::future::timeout(Duration::from_millis(10), reader.read(&mut buf)).await { if count == 0 { break; } writer.write_all(&buf[..count]).await.unwrap(); writer.flush().await.unwrap(); } }
T" { post_echo(&mut reader, &mut writer).await; if let Some(_c) = cookies.get("Who") { writer.write(format!("You are a fool of a took!").as_bytes()).await.unwrap(); } } else { let mut res = oc_http::Response{ code: 404, reason: "NOT FOUND", headers: vec!(), }; cookies.add_cookie(Cookie::new("Who", "You fool!")); cookies.write_cookies(&mut res); oc_http::respond(&mut writer, res).await.unwrap(); } writer.flush().await.unwrap(); }
function_block-function_prefixed
[ { "content": "fn main() {\n\n println!(\"Hello world!\");\n\n}\n\n\n\n/*\n\nuse std::io;\n\nuse std::error::Error;\n\nuse env_logger::Env;\n\nuse async_trait::async_trait;\n\nuse async_std::{\n\n prelude::*,\n\n sync::Arc,\n\n net::{\n\n TcpListener,\n\n },\n\n};\n\n\n\n#[async_std::main]\...
Rust
contracts/mirror_staking/src/contract.rs
jaypersanchez/shade
9b7357c366dceb108a300944d66dbf6deb735c01
use cosmwasm_std::{ from_binary, log, to_binary, Api, Binary, Decimal, Env, Extern, HandleResponse, HandleResult, HumanAddr, InitResponse, MigrateResponse, MigrateResult, Querier, StdError, StdResult, Storage, Uint128, }; use mirror_protocol::staking::{ ConfigResponse, Cw20HookMsg, HandleMsg, InitMsg, MigrateMsg, PoolInfoResponse, QueryMsg, }; use crate::migration::{migrate_config, migrate_pool_infos}; use crate::rewards::{adjust_premium, deposit_reward, query_reward_info, withdraw_reward}; use crate::staking::{ auto_stake, auto_stake_hook, bond, decrease_short_token, increase_short_token, unbond, }; use crate::state::{read_config, read_pool_info, store_config, store_pool_info, Config, PoolInfo}; use cw20::Cw20ReceiveMsg; pub fn init<S: Storage, A: Api, Q: Querier>( deps: &mut Extern<S, A, Q>, _env: Env, msg: InitMsg, ) -> StdResult<InitResponse> { store_config( &mut deps.storage, &Config { owner: deps.api.canonical_address(&msg.owner)?, mirror_token: deps.api.canonical_address(&msg.mirror_token)?, mint_contract: deps.api.canonical_address(&msg.mint_contract)?, oracle_contract: deps.api.canonical_address(&msg.oracle_contract)?, terraswap_factory: deps.api.canonical_address(&msg.terraswap_factory)?, base_denom: msg.base_denom, premium_min_update_interval: msg.premium_min_update_interval, }, )?; Ok(InitResponse::default()) } pub fn handle<S: Storage, A: Api, Q: Querier>( deps: &mut Extern<S, A, Q>, env: Env, msg: HandleMsg, ) -> StdResult<HandleResponse> { match msg { HandleMsg::Receive(msg) => receive_cw20(deps, env, msg), HandleMsg::UpdateConfig { owner, premium_min_update_interval, } => update_config(deps, env, owner, premium_min_update_interval), HandleMsg::RegisterAsset { asset_token, staking_token, } => register_asset(deps, env, asset_token, staking_token), HandleMsg::Unbond { asset_token, amount, } => unbond(deps, env.message.sender, asset_token, amount), HandleMsg::Withdraw { asset_token } => withdraw_reward(deps, env, asset_token), HandleMsg::AdjustPremium { asset_tokens } => adjust_premium(deps, env, asset_tokens), HandleMsg::IncreaseShortToken { staker_addr, asset_token, amount, } => increase_short_token(deps, env, staker_addr, asset_token, amount), HandleMsg::DecreaseShortToken { staker_addr, asset_token, amount, } => decrease_short_token(deps, env, staker_addr, asset_token, amount), HandleMsg::AutoStake { assets, slippage_tolerance, } => auto_stake(deps, env, assets, slippage_tolerance), HandleMsg::AutoStakeHook { asset_token, staking_token, staker_addr, prev_staking_token_amount, } => auto_stake_hook( deps, env, asset_token, staking_token, staker_addr, prev_staking_token_amount, ), } } pub fn receive_cw20<S: Storage, A: Api, Q: Querier>( deps: &mut Extern<S, A, Q>, env: Env, cw20_msg: Cw20ReceiveMsg, ) -> HandleResult { if let Some(msg) = cw20_msg.msg { let config: Config = read_config(&deps.storage)?; match from_binary(&msg)? { Cw20HookMsg::Bond { asset_token } => { let pool_info: PoolInfo = read_pool_info(&deps.storage, &deps.api.canonical_address(&asset_token)?)?; if pool_info.staking_token != deps.api.canonical_address(&env.message.sender)? { return Err(StdError::unauthorized()); } bond(deps, env, cw20_msg.sender, asset_token, cw20_msg.amount) } Cw20HookMsg::DepositReward { rewards } => { if config.mirror_token != deps.api.canonical_address(&env.message.sender)? { return Err(StdError::unauthorized()); } let mut rewards_amount = Uint128::zero(); for (_, amount) in rewards.iter() { rewards_amount += *amount; } if rewards_amount != cw20_msg.amount { return Err(StdError::generic_err("rewards amount miss matched")); } deposit_reward(deps, rewards, rewards_amount) } } } else { Err(StdError::generic_err("data should be given")) } } pub fn update_config<S: Storage, A: Api, Q: Querier>( deps: &mut Extern<S, A, Q>, env: Env, owner: Option<HumanAddr>, premium_min_update_interval: Option<u64>, ) -> StdResult<HandleResponse> { let mut config: Config = read_config(&deps.storage)?; if deps.api.canonical_address(&env.message.sender)? != config.owner { return Err(StdError::unauthorized()); } if let Some(owner) = owner { config.owner = deps.api.canonical_address(&owner)?; } if let Some(premium_min_update_interval) = premium_min_update_interval { config.premium_min_update_interval = premium_min_update_interval; } store_config(&mut deps.storage, &config)?; Ok(HandleResponse { messages: vec![], log: vec![log("action", "update_config")], data: None, }) } fn register_asset<S: Storage, A: Api, Q: Querier>( deps: &mut Extern<S, A, Q>, env: Env, asset_token: HumanAddr, staking_token: HumanAddr, ) -> HandleResult { let config: Config = read_config(&deps.storage)?; let asset_token_raw = deps.api.canonical_address(&asset_token)?; if config.owner != deps.api.canonical_address(&env.message.sender)? { return Err(StdError::unauthorized()); } if read_pool_info(&deps.storage, &asset_token_raw).is_ok() { return Err(StdError::generic_err("Asset was already registered")); } store_pool_info( &mut deps.storage, &asset_token_raw, &PoolInfo { staking_token: deps.api.canonical_address(&staking_token)?, total_bond_amount: Uint128::zero(), total_short_amount: Uint128::zero(), reward_index: Decimal::zero(), short_reward_index: Decimal::zero(), pending_reward: Uint128::zero(), short_pending_reward: Uint128::zero(), premium_rate: Decimal::zero(), short_reward_weight: Decimal::zero(), premium_updated_time: 0, }, )?; Ok(HandleResponse { messages: vec![], log: vec![ log("action", "register_asset"), log("asset_token", asset_token.as_str()), ], data: None, }) } pub fn query<S: Storage, A: Api, Q: Querier>( deps: &Extern<S, A, Q>, msg: QueryMsg, ) -> StdResult<Binary> { match msg { QueryMsg::Config {} => to_binary(&query_config(deps)?), QueryMsg::PoolInfo { asset_token } => to_binary(&query_pool_info(deps, asset_token)?), QueryMsg::RewardInfo { staker_addr, asset_token, } => to_binary(&query_reward_info(deps, staker_addr, asset_token)?), } } pub fn query_config<S: Storage, A: Api, Q: Querier>( deps: &Extern<S, A, Q>, ) -> StdResult<ConfigResponse> { let state = read_config(&deps.storage)?; let resp = ConfigResponse { owner: deps.api.human_address(&state.owner)?, mirror_token: deps.api.human_address(&state.mirror_token)?, mint_contract: deps.api.human_address(&state.mint_contract)?, oracle_contract: deps.api.human_address(&state.oracle_contract)?, terraswap_factory: deps.api.human_address(&state.terraswap_factory)?, base_denom: state.base_denom, premium_min_update_interval: state.premium_min_update_interval, }; Ok(resp) } pub fn query_pool_info<S: Storage, A: Api, Q: Querier>( deps: &Extern<S, A, Q>, asset_token: HumanAddr, ) -> StdResult<PoolInfoResponse> { let asset_token_raw = deps.api.canonical_address(&asset_token)?; let pool_info: PoolInfo = read_pool_info(&deps.storage, &asset_token_raw)?; Ok(PoolInfoResponse { asset_token, staking_token: deps.api.human_address(&pool_info.staking_token)?, total_bond_amount: pool_info.total_bond_amount, total_short_amount: pool_info.total_short_amount, reward_index: pool_info.reward_index, short_reward_index: pool_info.short_reward_index, pending_reward: pool_info.pending_reward, short_pending_reward: pool_info.short_pending_reward, premium_rate: pool_info.premium_rate, short_reward_weight: pool_info.short_reward_weight, premium_updated_time: pool_info.premium_updated_time, }) } pub fn migrate<S: Storage, A: Api, Q: Querier>( deps: &mut Extern<S, A, Q>, _env: Env, msg: MigrateMsg, ) -> MigrateResult { migrate_config( &mut deps.storage, deps.api.canonical_address(&msg.mint_contract)?, deps.api.canonical_address(&msg.oracle_contract)?, deps.api.canonical_address(&msg.terraswap_factory)?, msg.base_denom, msg.premium_min_update_interval, )?; migrate_pool_infos(&mut deps.storage)?; Ok(MigrateResponse::default()) }
use cosmwasm_std::{ from_binary, log, to_binary, Api, Binary, Decimal, Env, Extern, HandleResponse, HandleResult, HumanAddr, InitResponse, MigrateResponse, MigrateResult, Querier, StdError, StdResult, Storage, Uint128, }; use mirror_protocol::staking::{ ConfigResponse, Cw20HookMsg, HandleMsg, InitMsg, MigrateMsg, PoolInfoResponse, QueryMsg, }; use crate::migration::{migrate_config, migrate_pool_infos}; use crate::rewards::{adjust_premium, deposit_reward, query_reward_info, withdraw_reward}; use crate::staking::{ auto_stake, auto_stake_hook, bond, decrease_short_token, increase_short_token, unbond, }; use crate::state::{read_config, read_pool_info, store_config, store_pool_info, Config, PoolInfo}; use cw20::Cw20ReceiveMsg; pub fn init<S: Storage, A: Api, Q: Querier>( deps: &mut Extern<S, A, Q>, _env: Env, msg: InitMsg, ) -> StdResult<InitResponse> { store_config( &mut deps.storage, &Config { owner: deps.api.canonical_address(&msg.owner)?, mirror_token: deps.api.canonical_address(&msg.mirror_token)?, mint_contract: deps.api.canonical_address(&msg.mint_contract)?, oracle_contract: deps.api.canonical_address(&msg.oracle_contract)?, terraswap_factory: deps.api.canonical_address(&msg.terraswap_factory)?, base_denom: msg.base_denom, premium_min_update_interval: msg.premium_min_update_interval, }, )?; Ok(InitResponse::default()) } pub fn handle<S: Storage, A: Api, Q: Querier>( deps: &mut Extern<S, A, Q>, env: Env, msg: HandleMsg, ) -> StdResult<HandleResponse> { match msg { HandleMsg::Receive(msg) => receive_cw20(deps, env, msg), HandleMsg::UpdateConfig { owner, premium_min_update_interval, } => update_config(deps, env, owner, premium_min_update_interval), HandleMsg::RegisterAsset { asset_token, staking_token, } => register_asset(deps, env, asset_token, staking_token), HandleMsg::Unbond { asset_token, amount, } => unbond(deps, env.message.sender, asset_token, amount), HandleMsg::Withdraw { asset_token } => withdraw_reward(deps, env, asset_token), HandleMsg::AdjustPremium { asset_tokens } => adjust_premium(deps, env, asset_tokens), HandleMsg::IncreaseShortToken { staker_addr, asset_token, amount, } => increase_short_token(deps, env, staker_addr, asset_token, amount), HandleMsg::DecreaseShortToken { staker_addr, asset_token, amount, } => decrease_short_token(deps, env, staker_addr, asset_token, amount), HandleMsg::AutoStake { assets, slippage_tolerance, } => auto_stake(deps, env, assets, slippage_tolerance), HandleMsg::AutoStakeHook { asset_token, staking_token, staker_addr, prev_staking_token_amount, } => auto_stake_hook( deps, env, asset_token, staking_token, staker_addr, prev_staking_token_amount, ), } } pub fn receive_cw20<S: Storage, A: Api, Q: Querier>( deps: &mut Extern<S, A, Q>, env: Env, cw20_msg: Cw20ReceiveMsg, ) -> HandleResult { if let Some(msg) = cw20_msg.msg { let config: Config = read_config(&deps.storage)?; match from_binary(&msg)? { Cw20HookMsg::Bond { asset_token } => { let pool_info: PoolInfo = read_pool_info(&deps.storage, &deps.api.canonical_address(&asset_token)?)?; if pool_info.staking_token != deps.api.canonical_address(&env.message.sender)? { return Err(StdError::unauthorized()); } bond(deps, env, cw20_msg.sender, asset_token, cw20_msg.amount) } Cw20HookMsg::DepositReward { rewards } => { if config.mirror_token != deps.api.canonical_address(&env.message.sender)? { return Err(StdError::unauthorized()); } let mut rewards_amount = Uint128::zero(); for (_, amount) in rewards.iter() { rewards_amount += *amount; } if rewards_amount != cw20_msg.amount { return Err(StdError::generic_err("rewards amount miss matched")); } deposit_reward(deps, rewards, rewards_amount) } } } else { Err(StdError::generic_err("data should be given")) } } pub fn update_config<S: Storage, A: Api, Q: Querier>( deps: &mut Extern<S, A, Q>, env: Env, owner: Option<HumanAddr>, premium_min_update_interval: Option<u64>, ) -> StdResult<HandleResponse> { let mut config: Config = read_config(&deps.storage)?; if deps.api.canonical_address(&env.message.sender)? != config.owner { return Err(StdError::unauthorized()); } if let Some(owner) = owner { config.owner = deps.api.canonical_address(&owner)?; } if let Some(premium_min_update_interval) = premium_min_update_interval { config.premium_min_update_interval = premium_min_update_interval; } store_config(&mut deps.storage, &config)?; Ok(HandleResponse { messages: vec![], log: vec![log("action", "update_config")], data: None, }) } fn register_asset<S: Storage, A: Api, Q: Querier>( deps: &mut Extern<S, A, Q>, env: Env, asset_token: HumanAddr, staking_token: HumanAddr, ) -> HandleResult { let config: Config = read_config(&deps.storage)?; let asset_token_raw = deps.api.canonical_address(&asset_token)?; if config.owner != deps.api.canonical_address(&env.message.sender)? { return Err(StdError::unauthorized()); } if read_pool_info(&deps.storage, &asset_token_raw).is_ok() { return Err(StdError::generic_err("Asset was already registered")); } store_pool_info( &mut deps.storage, &asset_token_raw, &PoolInfo { staking_token: deps.api.canonical_address(&staking_token)?, total_bond_amount: Uint128::zero(), total_short_amount: Uint128::zero(), reward_index: Decimal::zero(), short_reward_index: Decimal::zero(), pending_reward: Uint128::zero(), short_pending_reward: Uint128::zero(), premium_rate: Decimal::zero(), short_reward_weight: Decimal::zero(), premium_updated_time: 0, }, )?; Ok(HandleResponse { messages: vec![], log: vec![ log("action", "register_asset"), log("asset_token", asset_token.as_str()), ], data: None, }) } pub fn query<S: Storage, A: Api, Q: Querier>( deps: &Extern<S, A, Q>, msg: QueryMsg, ) -> StdResult<Binary> { match msg { QueryMsg::Config {} => to_binary(&query_config(deps)?), QueryMsg::PoolInfo { asset_token } => to_binary(&query_pool_info(deps, asset_token)?), QueryMsg::RewardInfo { staker_addr, asset_token, } => to_binary(&query_reward_info(deps, staker_addr, asset_token)?), } } pub fn query_config<S: Storage, A: Api, Q: Querier>( deps: &Extern<S, A, Q>, ) -> StdResult<ConfigResponse> { let state = read_config(&deps.storage)?; let resp = ConfigResponse { owner: deps.api.human_address(&state.owner)?,
pub fn query_pool_info<S: Storage, A: Api, Q: Querier>( deps: &Extern<S, A, Q>, asset_token: HumanAddr, ) -> StdResult<PoolInfoResponse> { let asset_token_raw = deps.api.canonical_address(&asset_token)?; let pool_info: PoolInfo = read_pool_info(&deps.storage, &asset_token_raw)?; Ok(PoolInfoResponse { asset_token, staking_token: deps.api.human_address(&pool_info.staking_token)?, total_bond_amount: pool_info.total_bond_amount, total_short_amount: pool_info.total_short_amount, reward_index: pool_info.reward_index, short_reward_index: pool_info.short_reward_index, pending_reward: pool_info.pending_reward, short_pending_reward: pool_info.short_pending_reward, premium_rate: pool_info.premium_rate, short_reward_weight: pool_info.short_reward_weight, premium_updated_time: pool_info.premium_updated_time, }) } pub fn migrate<S: Storage, A: Api, Q: Querier>( deps: &mut Extern<S, A, Q>, _env: Env, msg: MigrateMsg, ) -> MigrateResult { migrate_config( &mut deps.storage, deps.api.canonical_address(&msg.mint_contract)?, deps.api.canonical_address(&msg.oracle_contract)?, deps.api.canonical_address(&msg.terraswap_factory)?, msg.base_denom, msg.premium_min_update_interval, )?; migrate_pool_infos(&mut deps.storage)?; Ok(MigrateResponse::default()) }
mirror_token: deps.api.human_address(&state.mirror_token)?, mint_contract: deps.api.human_address(&state.mint_contract)?, oracle_contract: deps.api.human_address(&state.oracle_contract)?, terraswap_factory: deps.api.human_address(&state.terraswap_factory)?, base_denom: state.base_denom, premium_min_update_interval: state.premium_min_update_interval, }; Ok(resp) }
function_block-function_prefix_line
[ { "content": "pub fn query_reward_info<S: Storage, A: Api, Q: Querier>(\n\n deps: &Extern<S, A, Q>,\n\n staker_addr: HumanAddr,\n\n asset_token: Option<HumanAddr>,\n\n) -> StdResult<RewardInfoResponse> {\n\n let staker_addr_raw = deps.api.canonical_address(&staker_addr)?;\n\n\n\n let reward_infos...
Rust
src/message/message.rs
ddimaria/stun-server
4557238c5f05f69105c1834da09f62aad74b826e
use crate::error::{Error, Result}; use crate::message::attribute::Attribute; use crate::message::class::Class; use crate::message::method::Method; use crate::message::transaction_id::TransactionId; use bytes::{Buf, BufMut, Bytes, BytesMut}; pub(crate) const MAGIC_COOKIE: u32 = 0x2112A442; pub(crate) const MESSAGE_HEADER_LENGTH: usize = 20; #[derive(Debug, PartialEq)] pub(crate) struct Message { pub(crate) class: Class, pub(crate) method: Method, pub(crate) transaction_id: TransactionId, pub(crate) attributes: Vec<Attribute>, } impl Message { pub(crate) fn binding_request(attributes: Vec<Attribute>) -> Message { Message { class: Class::Request, method: Method::Binding, transaction_id: TransactionId::new(), attributes, } } pub(crate) fn binding_response(attributes: Vec<Attribute>) -> Message { Message { class: Class::SuccessResponse, method: Method::Binding, transaction_id: TransactionId::new(), attributes, } } pub(crate) fn encode(&self, buf: &mut BytesMut) { let transaction_id = &self.transaction_id.0; let class = self.class.encode(); let method = self.method.encode(); buf.put_u16(class + method); let mut body = BytesMut::with_capacity(256); let mut message_length: u16 = 0; for attribute in &self.attributes { message_length += attribute.encode(&mut body, &self.transaction_id); } buf.put_u16(message_length); buf.put_u32(MAGIC_COOKIE); buf.put_slice(transaction_id); buf.put_slice(body.as_ref()); } pub(crate) fn decode(buffer: &mut Bytes) -> Result<Message> { let mut attributes: Vec<Attribute> = Vec::new(); if buffer.remaining() < MESSAGE_HEADER_LENGTH { return Err(Error::Decode(format!( "Not enough bytes in the header. Expected {}, but got {}", 20, buffer.remaining() ))); } let message_type = buffer.get_u16(); let class = Class::decode(message_type)?; let method = Method::decode(message_type); let message_length = buffer.get_u16() as usize; let magic_cookie = buffer.get_u32(); let transaction_id = TransactionId::decode(buffer)?; if magic_cookie != MAGIC_COOKIE { return Err(Error::Decode(format!( "Invalid magic cookie. Expected {}, but got {}.", MAGIC_COOKIE, magic_cookie ))); } let attributes_length = buffer.remaining() - message_length; while buffer.remaining() > attributes_length { let attribute = Attribute::decode(buffer, &transaction_id)?; attributes.push(attribute); } let msg = Message { class, method, transaction_id, attributes, }; Ok(msg) } } #[cfg(test)] pub(crate) mod tests { use super::*; pub(crate) const BINDING_REQUEST: &[u8; 20] = b"\0\x01\0\0!\x12\xa4B\xb0\xb8?\0\xda\x0c\xa2\xc3(\xe1\xf2\x85"; pub(crate) const BINDING_RESPONSE: &[u8; 20] = b"\x01\x01\0\0!\x12\xa4B\xc3>bhW \xc0\x8e\xd8\xf1y\x88"; pub(crate) fn decode_message(buffer: &[u8; 20]) -> Message { let mut buffer = Bytes::copy_from_slice(buffer); Message::decode(&mut buffer).unwrap() } pub(crate) fn binding_request() -> Message { Message { class: Class::Request, method: Method::Binding, transaction_id: TransactionId([176, 184, 63, 0, 218, 12, 162, 195, 40, 225, 242, 133]), attributes: vec![], } } pub(crate) fn binding_response() -> Message { Message { class: Class::SuccessResponse, method: Method::Binding, transaction_id: TransactionId([195, 62, 98, 104, 87, 32, 192, 142, 216, 241, 121, 136]), attributes: vec![], } } #[test] fn it_encodes_a_binding_request() { let mut buffer = BytesMut::new(); let message = binding_request(); message.encode(&mut buffer); let mut expected_buffer = BytesMut::with_capacity(0); expected_buffer.extend_from_slice(BINDING_REQUEST); assert_eq!(buffer, expected_buffer); } #[test] fn it_encodes_a_binding_response() { let mut buffer = BytesMut::new(); let message = binding_response(); message.encode(&mut buffer); let mut expected_buffer = BytesMut::with_capacity(0); expected_buffer.extend_from_slice(BINDING_RESPONSE); assert_eq!(buffer, expected_buffer); } #[test] fn it_decodes_a_binding_request() { let message = decode_message(BINDING_REQUEST); let expected = binding_request(); assert_eq!(message, expected); } #[test] fn it_decodes_a_binding_response() { let message = decode_message(BINDING_RESPONSE); let expected = binding_response(); assert_eq!(message, expected); } }
use crate::error::{Error, Result}; use crate::message::attribute::Attribute; use crate::message::class::Class; use crate::message::method::Method; use crate::message::transaction_id::TransactionId; use bytes::{Buf, BufMut, Bytes, BytesMut}; pub(crate) const MAGIC_COOKIE: u32 = 0x2112A442; pub(crate) const MESSAGE_HEADER_LENGTH: usize = 20; #[derive(Debug, PartialEq)] pub(crate) struct Message { pub(crate) class: Class, pub(crate) method: Method, pub(crate) transaction_id: TransactionId, pub(crate) attributes: Vec<Attribute>, } impl Message { pub(crate) fn binding_request(attributes: Vec<Attribute>) -> Message { Message { class: Class::Request, method: Method::Binding, transaction_id: TransactionId::new(), attributes, } } pub(crate) fn binding_response(attributes: Vec<Attribute>) -> Message { Message { class: Class::SuccessResponse, method: Method::Binding, transaction_id: TransactionId::new(), attributes, } } pub(crate) fn encode(&self, buf: &mut BytesMut) { let transaction_id = &self.transaction_id.0; let class = self.class.encode(); let method = self.method.encode(); buf.put_u16(class + method); let mut body = BytesMut::with_capacity(256); let mut message_length: u16 = 0; for attribute in &self.attributes { message_length += attribute.encode(&mut body, &self.transaction_id); } buf.put_u16(message_length); buf.put_u32(MAGIC_COOKIE); buf.put_slice(transaction_id); buf.put_slice(body.as_ref()); } pub(crate) fn decode(buffer: &mut Bytes) -> Result<Message> { let mut attributes: Vec<Attribute> = Vec::new();
let message_type = buffer.get_u16(); let class = Class::decode(message_type)?; let method = Method::decode(message_type); let message_length = buffer.get_u16() as usize; let magic_cookie = buffer.get_u32(); let transaction_id = TransactionId::decode(buffer)?; if magic_cookie != MAGIC_COOKIE { return Err(Error::Decode(format!( "Invalid magic cookie. Expected {}, but got {}.", MAGIC_COOKIE, magic_cookie ))); } let attributes_length = buffer.remaining() - message_length; while buffer.remaining() > attributes_length { let attribute = Attribute::decode(buffer, &transaction_id)?; attributes.push(attribute); } let msg = Message { class, method, transaction_id, attributes, }; Ok(msg) } } #[cfg(test)] pub(crate) mod tests { use super::*; pub(crate) const BINDING_REQUEST: &[u8; 20] = b"\0\x01\0\0!\x12\xa4B\xb0\xb8?\0\xda\x0c\xa2\xc3(\xe1\xf2\x85"; pub(crate) const BINDING_RESPONSE: &[u8; 20] = b"\x01\x01\0\0!\x12\xa4B\xc3>bhW \xc0\x8e\xd8\xf1y\x88"; pub(crate) fn decode_message(buffer: &[u8; 20]) -> Message { let mut buffer = Bytes::copy_from_slice(buffer); Message::decode(&mut buffer).unwrap() } pub(crate) fn binding_request() -> Message { Message { class: Class::Request, method: Method::Binding, transaction_id: TransactionId([176, 184, 63, 0, 218, 12, 162, 195, 40, 225, 242, 133]), attributes: vec![], } } pub(crate) fn binding_response() -> Message { Message { class: Class::SuccessResponse, method: Method::Binding, transaction_id: TransactionId([195, 62, 98, 104, 87, 32, 192, 142, 216, 241, 121, 136]), attributes: vec![], } } #[test] fn it_encodes_a_binding_request() { let mut buffer = BytesMut::new(); let message = binding_request(); message.encode(&mut buffer); let mut expected_buffer = BytesMut::with_capacity(0); expected_buffer.extend_from_slice(BINDING_REQUEST); assert_eq!(buffer, expected_buffer); } #[test] fn it_encodes_a_binding_response() { let mut buffer = BytesMut::new(); let message = binding_response(); message.encode(&mut buffer); let mut expected_buffer = BytesMut::with_capacity(0); expected_buffer.extend_from_slice(BINDING_RESPONSE); assert_eq!(buffer, expected_buffer); } #[test] fn it_decodes_a_binding_request() { let message = decode_message(BINDING_REQUEST); let expected = binding_request(); assert_eq!(message, expected); } #[test] fn it_decodes_a_binding_response() { let message = decode_message(BINDING_RESPONSE); let expected = binding_response(); assert_eq!(message, expected); } }
if buffer.remaining() < MESSAGE_HEADER_LENGTH { return Err(Error::Decode(format!( "Not enough bytes in the header. Expected {}, but got {}", 20, buffer.remaining() ))); }
if_condition
[ { "content": " method.into()\n\n }\n\n}\n\n\n\nimpl From<u16> for Method {\n\n fn from(value: u16) -> Method {\n\n match value {\n\n 0x001 => Method::Binding,\n\n _ => unimplemented!(\"Only binding methods are allowed\"),\n\n }\n\n }\n\n}\n\n\n\nimpl Into<u16>...
Rust
src/graphics/camera.rs
yggie/mithril-examples
5dd264bfbe38a80bc52ba5923d2091bcfc3b7d4b
extern crate mithril; use std::f64; use std::num::Float; use self::mithril::math::{ Vector, Quaternion }; pub struct Camera { position: Vector, focus_point: Vector, up: Vector, field_of_view: f64, aspect_ratio: f64, far: f64, near: f64, anchor_point: Option<[f64; 2]>, control_point: [f64; 2], } impl Camera { pub fn new(position: Vector, focus_point: Vector, up: Vector) -> Camera { Camera{ position: position, focus_point: focus_point, up: up.normalize(), field_of_view: (90.0 * f64::consts::PI / 180.0), aspect_ratio: 640.0/480.0, far: 100.0, near: 1.0, anchor_point: None, control_point: [0.0; 2], } } pub fn position(&self) -> Vector { self.position } pub fn focus_point(&self) -> Vector { self.focus_point } pub fn go_to(&mut self, position: Vector) { self.position = position; } pub fn update(&mut self) { } pub fn start_control(&mut self, x: f64, y: f64) { self.anchor_point = Some([x, y]); self.control_point[0] = x; self.control_point[1] = y; } pub fn set_control_point(&mut self, x: f64, y: f64) { self.control_point[0] = x; self.control_point[1] = y; } pub fn release_controls(&mut self) { self.anchor_point = None; } pub fn is_controlled(&self) -> bool { self.anchor_point != None } pub fn view_matrix(&self) -> [f32; 16] { let mut z_view = (self.position - self.focus_point).normalize(); let mut x_view = self.up.cross(z_view).normalize(); let mut y_view = z_view.cross(x_view).normalize(); let x_trans = -self.position.dot(x_view); let y_trans = -self.position.dot(y_view); let z_trans = -self.position.dot(z_view); match self.anchor_point { Some(anchor_point) => { let diff = [ (self.control_point[1] - anchor_point[1]) as f32, (anchor_point[0] - self.control_point[0]) as f32, ]; let diff_sq = (diff[0] * diff[0] + diff[1] * diff[1]).sqrt(); if diff_sq > 0.0001 { let diff_length = diff_sq.sqrt(); let rot_axis = (x_view * diff[0] + y_view * diff[1]) / diff_length; let rot_in_radians = diff_length * 2.0; let u_quat = Quaternion::new(0.0, x_view[0], x_view[1], x_view[2]); let v_quat = Quaternion::new(0.0, y_view[0], y_view[1], y_view[2]); let w_quat = Quaternion::new(0.0, z_view[0], z_view[1], z_view[2]); let rot_quat = Quaternion::new_from_rotation(rot_in_radians, rot_axis[0], rot_axis[1], rot_axis[2]); let new_u_quat = rot_quat * u_quat * rot_quat.inverse(); let new_v_quat = rot_quat * v_quat * rot_quat.inverse(); let new_w_quat = rot_quat * w_quat * rot_quat.inverse(); x_view[0] = new_u_quat[1]; x_view[1] = new_u_quat[2]; x_view[2] = new_u_quat[3]; y_view[0] = new_v_quat[1]; y_view[1] = new_v_quat[2]; y_view[2] = new_v_quat[3]; z_view[0] = new_w_quat[1]; z_view[1] = new_w_quat[2]; z_view[2] = new_w_quat[3]; } } None => { } } [ x_view[0], x_view[1], x_view[2], x_trans, y_view[0], y_view[1], y_view[2], y_trans, z_view[0], z_view[1], z_view[2], z_trans, 0.0, 0.0, 0.0, 1.0, ] } pub fn projection_matrix(&self) -> [f32; 16] { let m_11 = (1.0 / (self.field_of_view / 2.0).tan()) as f32; let m_22 = m_11 * (self.aspect_ratio as f32); let m_33 = -((self.far + self.near) / (self.far - self.near)) as f32; let m_43 = -((2.0 * self.far * self.near) / (self.far - self.near)) as f32; [ m_11, 0.0, 0.0, 0.0, 0.0, m_22, 0.0, 0.0, 0.0, 0.0, m_33, m_43, 0.0, 0.0, -1.0, 0.0, ] } }
extern crate mithril; use std::f64; use std::num::Float; use self::mithril::math::{ Vector, Quaternion }; pub struct Camera { position: Vector, focus_point: Vector, up: Vector, field_of_view: f64, aspect_ratio: f64, far: f64, near: f64, anchor_point: Option<[f64; 2]>, control_point: [f64; 2], } impl Camera { pub fn new(position: Vector, focus_point: Vector, up: Vector) -> Camera { Camera{ position: position, focus_point: focus_point, up: up.normalize(), field_of_view: (90.0 * f64::consts::PI / 180.0), aspect_ratio: 640.0/480.0, far: 100.0, near: 1.0, anchor_point: None, control_point: [0.0; 2], } } pub fn position(&self) -> Vector { self.position } pub fn focus_point(&self) -> Vector { self.focus_point } pub fn go_to(&mut self, position: Vector) { self.position = position; } pub fn update(&mut self) { } pub fn start_control(&mut self, x: f64, y: f64) { self.anchor_point = Some([x, y]); self.control_point[0] = x; self.control_point[1] = y; } pub fn set_control_point(&mut self, x: f64, y: f64) { self.control_point[0] = x; self.control_point[1] = y; } pub fn release_controls(&mut self) { self.anchor_point = None; } pub fn is_controlled(&self) -> bool { self.anchor_point != None } pub fn view_matrix(&self) -> [f32; 16] { let mut z_view = (self.position - self.focus_point).normalize(); let mut x_view = self.up.cross(z_view).normalize(); let mut y_view = z_view.cross(x_view).normalize(); let x_trans = -self.position.dot(x_view); let y_trans = -self.position.dot(y_view); let z_trans = -self.position.dot(z_view); match self.anchor_point { Some(anchor_point) => { let diff = [ (self.control_point[1] - anchor_point[1]) as f32, (anchor_point[0] - self.control_point[0]) as f32, ]; let diff_sq = (diff[0] * diff[0] + diff[1] * diff[1]).sqrt(); if diff_sq > 0.0001 { let diff_length = diff_sq.sqrt(); let rot_axis = (x_view * diff[0] + y_view * diff[1]) / diff_length; let rot_in_radians = diff_length * 2.0; let u_quat = Quaternion::new(0.0, x_view[0], x_view[1], x_view[2]); let v_quat = Quaternion::new(0.0, y_view[0], y_view[1], y_view[2]); let w_quat = Quaternion::new(0.0, z_view[0], z_view[1], z_view[2]); let rot_quat = Quaternion::new_from_rotation(rot_in_radians, rot_a
pub fn projection_matrix(&self) -> [f32; 16] { let m_11 = (1.0 / (self.field_of_view / 2.0).tan()) as f32; let m_22 = m_11 * (self.aspect_ratio as f32); let m_33 = -((self.far + self.near) / (self.far - self.near)) as f32; let m_43 = -((2.0 * self.far * self.near) / (self.far - self.near)) as f32; [ m_11, 0.0, 0.0, 0.0, 0.0, m_22, 0.0, 0.0, 0.0, 0.0, m_33, m_43, 0.0, 0.0, -1.0, 0.0, ] } }
xis[0], rot_axis[1], rot_axis[2]); let new_u_quat = rot_quat * u_quat * rot_quat.inverse(); let new_v_quat = rot_quat * v_quat * rot_quat.inverse(); let new_w_quat = rot_quat * w_quat * rot_quat.inverse(); x_view[0] = new_u_quat[1]; x_view[1] = new_u_quat[2]; x_view[2] = new_u_quat[3]; y_view[0] = new_v_quat[1]; y_view[1] = new_v_quat[2]; y_view[2] = new_v_quat[3]; z_view[0] = new_w_quat[1]; z_view[1] = new_w_quat[2]; z_view[2] = new_w_quat[3]; } } None => { } } [ x_view[0], x_view[1], x_view[2], x_trans, y_view[0], y_view[1], y_view[2], y_trans, z_view[0], z_view[1], z_view[2], z_trans, 0.0, 0.0, 0.0, 1.0, ] }
function_block-function_prefixed
[ { "content": "pub fn import_from_obj(filepath: &str) -> (Vec<GLfloat>, Vec<GLfloat>, Vec<GLuint>) {\n\n let comments_regex = Regex::new(r\"\\A\\s*#(?s:.*)\\z\").ok().unwrap();\n\n let vertex_regex = Regex::new(r\"\\A\\s*v\\s+(\\+?-?\\d+\\.\\d+)\\s+(\\+?-?\\d+\\.\\d+)\\s+(\\+?-?\\d+\\.\\d+)\\s*\\z\").ok()....
Rust
07-rust/stm32l0x1/stm32l0x1_pac/src/adc/isr.rs
aaronhktan/stm32-exploration
dcd7674424cd17b02b85c6b3ce533456d5037d65
#[doc = "Reader of register ISR"] pub type R = crate::R<u32, super::ISR>; #[doc = "Writer for register ISR"] pub type W = crate::W<u32, super::ISR>; #[doc = "Register ISR `reset()`'s with value 0"] impl crate::ResetValue for super::ISR { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `ADRDY`"] pub type ADRDY_R = crate::R<bool, bool>; #[doc = "Write proxy for field `ADRDY`"] pub struct ADRDY_W<'a> { w: &'a mut W, } impl<'a> ADRDY_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } #[doc = "Reader of field `EOSMP`"] pub type EOSMP_R = crate::R<bool, bool>; #[doc = "Write proxy for field `EOSMP`"] pub struct EOSMP_W<'a> { w: &'a mut W, } impl<'a> EOSMP_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1); self.w } } #[doc = "Reader of field `EOC`"] pub type EOC_R = crate::R<bool, bool>; #[doc = "Write proxy for field `EOC`"] pub struct EOC_W<'a> { w: &'a mut W, } impl<'a> EOC_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2); self.w } } #[doc = "Reader of field `EOS`"] pub type EOS_R = crate::R<bool, bool>; #[doc = "Write proxy for field `EOS`"] pub struct EOS_W<'a> { w: &'a mut W, } impl<'a> EOS_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3); self.w } } #[doc = "Reader of field `OVR`"] pub type OVR_R = crate::R<bool, bool>; #[doc = "Write proxy for field `OVR`"] pub struct OVR_W<'a> { w: &'a mut W, } impl<'a> OVR_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4); self.w } } #[doc = "Reader of field `AWD`"] pub type AWD_R = crate::R<bool, bool>; #[doc = "Write proxy for field `AWD`"] pub struct AWD_W<'a> { w: &'a mut W, } impl<'a> AWD_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7); self.w } } #[doc = "Reader of field `EOCAL`"] pub type EOCAL_R = crate::R<bool, bool>; #[doc = "Write proxy for field `EOCAL`"] pub struct EOCAL_W<'a> { w: &'a mut W, } impl<'a> EOCAL_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11); self.w } } impl R { #[doc = "Bit 0 - ADC ready"] #[inline(always)] pub fn adrdy(&self) -> ADRDY_R { ADRDY_R::new((self.bits & 0x01) != 0) } #[doc = "Bit 1 - End of sampling flag"] #[inline(always)] pub fn eosmp(&self) -> EOSMP_R { EOSMP_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 2 - End of conversion flag"] #[inline(always)] pub fn eoc(&self) -> EOC_R { EOC_R::new(((self.bits >> 2) & 0x01) != 0) } #[doc = "Bit 3 - End of sequence flag"] #[inline(always)] pub fn eos(&self) -> EOS_R { EOS_R::new(((self.bits >> 3) & 0x01) != 0) } #[doc = "Bit 4 - ADC overrun"] #[inline(always)] pub fn ovr(&self) -> OVR_R { OVR_R::new(((self.bits >> 4) & 0x01) != 0) } #[doc = "Bit 7 - Analog watchdog flag"] #[inline(always)] pub fn awd(&self) -> AWD_R { AWD_R::new(((self.bits >> 7) & 0x01) != 0) } #[doc = "Bit 11 - End Of Calibration flag"] #[inline(always)] pub fn eocal(&self) -> EOCAL_R { EOCAL_R::new(((self.bits >> 11) & 0x01) != 0) } } impl W { #[doc = "Bit 0 - ADC ready"] #[inline(always)] pub fn adrdy(&mut self) -> ADRDY_W { ADRDY_W { w: self } } #[doc = "Bit 1 - End of sampling flag"] #[inline(always)] pub fn eosmp(&mut self) -> EOSMP_W { EOSMP_W { w: self } } #[doc = "Bit 2 - End of conversion flag"] #[inline(always)] pub fn eoc(&mut self) -> EOC_W { EOC_W { w: self } } #[doc = "Bit 3 - End of sequence flag"] #[inline(always)] pub fn eos(&mut self) -> EOS_W { EOS_W { w: self } } #[doc = "Bit 4 - ADC overrun"] #[inline(always)] pub fn ovr(&mut self) -> OVR_W { OVR_W { w: self } } #[doc = "Bit 7 - Analog watchdog flag"] #[inline(always)] pub fn awd(&mut self) -> AWD_W { AWD_W { w: self } } #[doc = "Bit 11 - End Of Calibration flag"] #[inline(always)] pub fn eocal(&mut self) -> EOCAL_W { EOCAL_W { w: self } } }
#[doc = "Reader of register ISR"] pub type R = crate::R<u32, super::ISR>; #[doc = "Writer for register ISR"] pub type W = crate::W<u32, super::ISR>; #[doc = "Register ISR `reset()`'s with value 0"] impl crate::ResetValue for super::ISR { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `ADRDY`"] pub type ADRDY_R = crate::R<bool, bool>; #[doc = "Write proxy for field `ADRDY`"] pub struct ADRDY_W<'a> { w: &'a mut W, } impl<'a> ADRDY_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } #[doc = "Reader of field `EOSMP`"] pub type EOSMP_R = crate::R<bool, bool>; #[doc = "Write proxy for field `EOSMP`"] pub struct EOSMP_W<'a> { w: &'a mut W, } impl<'a> EOSMP_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1); self.w } } #[doc = "Reader of field `EOC`"] pub type EOC_R = crate::R<bool, bool>; #[doc = "Write proxy for field `EOC`"] pub struct EOC_W<'a> { w: &'a mut W, } impl<'a> EOC_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2); self.w } } #[doc = "Reader of field `EOS`"] pub type EOS_R = crate::R<bool, bool>; #[doc = "Write proxy for field `EOS`"] pub struct EOS_W<'a> { w: &'a mut W, } impl<'a> EOS_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3); self.w } } #[doc = "Reader of field `OVR`"] pub type OVR_R = crate::R<bool, bool>; #[doc = "Write proxy for field `OVR`"] pub struct OVR_W<'a> { w: &'a mut W, } impl<'a> OVR_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4); self.w } } #[doc = "Reader of field `AWD`"] pub type AWD_R = crate::R<bool, bool>; #[doc = "Write proxy for field `AWD`"] pub struct AWD_W<'a> { w: &'a mut W, } impl<'a> AWD_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7); self.w } } #[doc = "Reader of field `EOCAL`"] pub type EOCAL_R = crate::R<bool, bool>; #[doc = "Write proxy fo
a mut W { self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11); self.w } } impl R { #[doc = "Bit 0 - ADC ready"] #[inline(always)] pub fn adrdy(&self) -> ADRDY_R { ADRDY_R::new((self.bits & 0x01) != 0) } #[doc = "Bit 1 - End of sampling flag"] #[inline(always)] pub fn eosmp(&self) -> EOSMP_R { EOSMP_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 2 - End of conversion flag"] #[inline(always)] pub fn eoc(&self) -> EOC_R { EOC_R::new(((self.bits >> 2) & 0x01) != 0) } #[doc = "Bit 3 - End of sequence flag"] #[inline(always)] pub fn eos(&self) -> EOS_R { EOS_R::new(((self.bits >> 3) & 0x01) != 0) } #[doc = "Bit 4 - ADC overrun"] #[inline(always)] pub fn ovr(&self) -> OVR_R { OVR_R::new(((self.bits >> 4) & 0x01) != 0) } #[doc = "Bit 7 - Analog watchdog flag"] #[inline(always)] pub fn awd(&self) -> AWD_R { AWD_R::new(((self.bits >> 7) & 0x01) != 0) } #[doc = "Bit 11 - End Of Calibration flag"] #[inline(always)] pub fn eocal(&self) -> EOCAL_R { EOCAL_R::new(((self.bits >> 11) & 0x01) != 0) } } impl W { #[doc = "Bit 0 - ADC ready"] #[inline(always)] pub fn adrdy(&mut self) -> ADRDY_W { ADRDY_W { w: self } } #[doc = "Bit 1 - End of sampling flag"] #[inline(always)] pub fn eosmp(&mut self) -> EOSMP_W { EOSMP_W { w: self } } #[doc = "Bit 2 - End of conversion flag"] #[inline(always)] pub fn eoc(&mut self) -> EOC_W { EOC_W { w: self } } #[doc = "Bit 3 - End of sequence flag"] #[inline(always)] pub fn eos(&mut self) -> EOS_W { EOS_W { w: self } } #[doc = "Bit 4 - ADC overrun"] #[inline(always)] pub fn ovr(&mut self) -> OVR_W { OVR_W { w: self } } #[doc = "Bit 7 - Analog watchdog flag"] #[inline(always)] pub fn awd(&mut self) -> AWD_W { AWD_W { w: self } } #[doc = "Bit 11 - End Of Calibration flag"] #[inline(always)] pub fn eocal(&mut self) -> EOCAL_W { EOCAL_W { w: self } } }
r field `EOCAL`"] pub struct EOCAL_W<'a> { w: &'a mut W, } impl<'a> EOCAL_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'
random
[ { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Res...
Rust
day_05_puzzle_02/src/grid.rs
simonhaisz/advent_of_code_2021
cc75f0d281a5a67b8a2e552fd240896a48a6a795
use std::collections::HashSet; use crate::line::{self, Line, Point}; pub struct Grid { lines: Vec<Line>, } impl Grid { pub fn new() -> Grid { Grid { lines: vec![], } } pub fn add_line(&mut self, line: Line) { self.lines.push(line); } pub fn overlaps(&self) -> HashSet<Point> { let mut overlaps = HashSet::new(); let lines = self.lines.iter().collect::<Vec<&Line>>(); let mut compare_counter = 0; for outer in 0..(lines.len()-1) { for inner in (outer+1)..lines.len() { let a = lines[outer]; let b = lines[inner]; compare_counter += 1; let points = line::intersections_specialized(a, b); if points.len() > 0 { for p in points.into_iter() { overlaps.insert(p); } } } } let expected_comparisons = lines.len() * (lines.len() - 1) / 2; println!("Compared {} pairs of lines together (expected {}) from a total of {}", compare_counter, expected_comparisons, lines.len()); overlaps } } #[cfg(test)] mod tests { use super::*; #[test] fn test_overlaps_square() { let mut grid = Grid::new(); grid.add_line(Line::new(Point::new(1, 1), Point::new(1, 10))); grid.add_line(Line::new(Point::new(1, 1), Point::new(10, 1))); grid.add_line(Line::new(Point::new(1, 10), Point::new(10, 10))); grid.add_line(Line::new(Point::new(10, 10), Point::new(10, 1))); let overlaps = grid.overlaps(); assert_eq!(4, overlaps.len()); assert_eq!(true, overlaps.contains(&Point::new(1, 1))); assert_eq!(true, overlaps.contains(&Point::new(1, 10))); assert_eq!(true, overlaps.contains(&Point::new(10, 1))); assert_eq!(true, overlaps.contains(&Point::new(10, 10))); } #[test] fn test_overlaps_thicc_line() { let mut grid = Grid::new(); grid.add_line(Line::new(Point::new(1,1), Point::new(10, 1))); grid.add_line(Line::new(Point::new(9,1), Point::new(2, 1))); grid.add_line(Line::new(Point::new(5,1), Point::new(7, 1))); grid.add_line(Line::new(Point::new(7,1), Point::new(6, 1))); grid.add_line(Line::new(Point::new(6,1), Point::new(6, 1))); let overlaps = grid.overlaps(); assert_eq!(8, overlaps.len()); assert_eq!(true, overlaps.contains(&Point::new(2, 1))); assert_eq!(true, overlaps.contains(&Point::new(3, 1))); assert_eq!(true, overlaps.contains(&Point::new(4, 1))); assert_eq!(true, overlaps.contains(&Point::new(5, 1))); assert_eq!(true, overlaps.contains(&Point::new(6, 1))); assert_eq!(true, overlaps.contains(&Point::new(7, 1))); assert_eq!(true, overlaps.contains(&Point::new(8, 1))); assert_eq!(true, overlaps.contains(&Point::new(9, 1))); } #[test] fn test_demo() { let mut grid = Grid::new(); let input = r" 0,9 -> 5,9 8,0 -> 0,8 9,4 -> 3,4 2,2 -> 2,1 7,0 -> 7,4 6,4 -> 2,0 0,9 -> 2,9 3,4 -> 1,4 0,0 -> 8,8 5,5 -> 8,2 "; let lines = input.split("\n").filter(|l| !l.trim().is_empty()).collect::<Vec<&str>>(); for line in lines.iter() { let l = Line::from(line); grid.add_line(l); } let overlaps = grid.overlaps(); assert_eq!(12, overlaps.len()); assert_eq!(true, overlaps.contains(&Point::new(7, 1))); assert_eq!(true, overlaps.contains(&Point::new(2, 2))); assert_eq!(true, overlaps.contains(&Point::new(5, 3))); assert_eq!(true, overlaps.contains(&Point::new(7, 3))); assert_eq!(true, overlaps.contains(&Point::new(3, 4))); assert_eq!(true, overlaps.contains(&Point::new(4, 4))); assert_eq!(true, overlaps.contains(&Point::new(6, 4))); assert_eq!(true, overlaps.contains(&Point::new(7, 4))); assert_eq!(true, overlaps.contains(&Point::new(5, 5))); assert_eq!(true, overlaps.contains(&Point::new(0, 9))); assert_eq!(true, overlaps.contains(&Point::new(2, 9))); assert_eq!(true, overlaps.contains(&Point::new(2, 9))); } }
use std::collections::HashSet; use crate::line::{self, Line, Point}; pub struct Grid { lines: Vec<Line>, } impl Grid { pub fn new() -> Grid { Grid { lines: vec![], } } pub fn add_line(&mut self, line: Line) { self.lines.push(line); }
} #[cfg(test)] mod tests { use super::*; #[test] fn test_overlaps_square() { let mut grid = Grid::new(); grid.add_line(Line::new(Point::new(1, 1), Point::new(1, 10))); grid.add_line(Line::new(Point::new(1, 1), Point::new(10, 1))); grid.add_line(Line::new(Point::new(1, 10), Point::new(10, 10))); grid.add_line(Line::new(Point::new(10, 10), Point::new(10, 1))); let overlaps = grid.overlaps(); assert_eq!(4, overlaps.len()); assert_eq!(true, overlaps.contains(&Point::new(1, 1))); assert_eq!(true, overlaps.contains(&Point::new(1, 10))); assert_eq!(true, overlaps.contains(&Point::new(10, 1))); assert_eq!(true, overlaps.contains(&Point::new(10, 10))); } #[test] fn test_overlaps_thicc_line() { let mut grid = Grid::new(); grid.add_line(Line::new(Point::new(1,1), Point::new(10, 1))); grid.add_line(Line::new(Point::new(9,1), Point::new(2, 1))); grid.add_line(Line::new(Point::new(5,1), Point::new(7, 1))); grid.add_line(Line::new(Point::new(7,1), Point::new(6, 1))); grid.add_line(Line::new(Point::new(6,1), Point::new(6, 1))); let overlaps = grid.overlaps(); assert_eq!(8, overlaps.len()); assert_eq!(true, overlaps.contains(&Point::new(2, 1))); assert_eq!(true, overlaps.contains(&Point::new(3, 1))); assert_eq!(true, overlaps.contains(&Point::new(4, 1))); assert_eq!(true, overlaps.contains(&Point::new(5, 1))); assert_eq!(true, overlaps.contains(&Point::new(6, 1))); assert_eq!(true, overlaps.contains(&Point::new(7, 1))); assert_eq!(true, overlaps.contains(&Point::new(8, 1))); assert_eq!(true, overlaps.contains(&Point::new(9, 1))); } #[test] fn test_demo() { let mut grid = Grid::new(); let input = r" 0,9 -> 5,9 8,0 -> 0,8 9,4 -> 3,4 2,2 -> 2,1 7,0 -> 7,4 6,4 -> 2,0 0,9 -> 2,9 3,4 -> 1,4 0,0 -> 8,8 5,5 -> 8,2 "; let lines = input.split("\n").filter(|l| !l.trim().is_empty()).collect::<Vec<&str>>(); for line in lines.iter() { let l = Line::from(line); grid.add_line(l); } let overlaps = grid.overlaps(); assert_eq!(12, overlaps.len()); assert_eq!(true, overlaps.contains(&Point::new(7, 1))); assert_eq!(true, overlaps.contains(&Point::new(2, 2))); assert_eq!(true, overlaps.contains(&Point::new(5, 3))); assert_eq!(true, overlaps.contains(&Point::new(7, 3))); assert_eq!(true, overlaps.contains(&Point::new(3, 4))); assert_eq!(true, overlaps.contains(&Point::new(4, 4))); assert_eq!(true, overlaps.contains(&Point::new(6, 4))); assert_eq!(true, overlaps.contains(&Point::new(7, 4))); assert_eq!(true, overlaps.contains(&Point::new(5, 5))); assert_eq!(true, overlaps.contains(&Point::new(0, 9))); assert_eq!(true, overlaps.contains(&Point::new(2, 9))); assert_eq!(true, overlaps.contains(&Point::new(2, 9))); } }
pub fn overlaps(&self) -> HashSet<Point> { let mut overlaps = HashSet::new(); let lines = self.lines.iter().collect::<Vec<&Line>>(); let mut compare_counter = 0; for outer in 0..(lines.len()-1) { for inner in (outer+1)..lines.len() { let a = lines[outer]; let b = lines[inner]; compare_counter += 1; let points = line::intersections_specialized(a, b); if points.len() > 0 { for p in points.into_iter() { overlaps.insert(p); } } } } let expected_comparisons = lines.len() * (lines.len() - 1) / 2; println!("Compared {} pairs of lines together (expected {}) from a total of {}", compare_counter, expected_comparisons, lines.len()); overlaps }
function_block-full_function
[ { "content": "pub fn intersections_optimized(a: &Line, b: &Line) -> Vec<Point> {\n\n let mut points = vec!();\n\n\n\n if (!a.horizontal() && !a.vertical()) || (!b.horizontal() && !b.vertical()) {\n\n panic!(\"Expected lines to be either horizontal or vertical in order to determine intersections\\nl...
Rust
backend/src/integrations/lifx/utils.rs
FruitieX/homectl-rs
fc3d8e5569ce813491e7cd234b1ad774ca81ebd6
use anyhow::{anyhow, Result}; use byteorder::{ByteOrder, LittleEndian}; use homectl_types::device::{Device, DeviceColor, DeviceId, DeviceState, Light}; use homectl_types::integration::IntegrationId; use palette::Hsv; use std::net::SocketAddr; #[derive(Clone, Debug)] pub struct LifxState { pub hue: u16, pub sat: u16, pub bri: u16, pub power: u16, pub label: String, pub addr: SocketAddr, pub transition: Option<u32>, } #[derive(Clone, Debug)] pub enum LifxMsg { Get(SocketAddr), SetColor(LifxState), State(LifxState), SetPower(LifxState), Unknown, } pub fn lifx_msg_type_to_u16(msg_type: LifxMsg) -> u16 { match msg_type { LifxMsg::Get(_) => 101, LifxMsg::SetColor(_) => 102, LifxMsg::State(_) => 107, LifxMsg::SetPower(_) => 117, LifxMsg::Unknown => panic!("Cannot convert LifxMsg::Unknown to u16"), } } fn mk_lifx_msg_payload(lifx_msg: LifxMsg) -> Option<Vec<u8>> { match lifx_msg { LifxMsg::SetPower(state) => { let mut buf: [u8; 16 + 32] = [0; 16 + 32]; LittleEndian::write_u16(&mut buf, state.power); if let Some(t) = state.transition { LittleEndian::write_u32(&mut buf[2..], t) } Some(buf.to_vec()) } LifxMsg::SetColor(state) => { let mut buf: [u8; 8 + 16 * 4 + 32] = [0; 8 + 16 * 4 + 32]; LittleEndian::write_u16(&mut buf[1..], state.hue); LittleEndian::write_u16(&mut buf[3..], state.sat); LittleEndian::write_u16(&mut buf[5..], state.bri); LittleEndian::write_u16(&mut buf[7..], 6500); let t = state.transition.unwrap_or(500); LittleEndian::write_u32(&mut buf[9..], t); Some(buf.to_vec()) } _ => None, } } pub fn mk_lifx_udp_msg(lifx_msg: LifxMsg) -> Vec<u8> { let mut frame: [u8; 8] = [0; 8]; let protocol = 1024; let origin = 0; let tagged = 1; let addressable = 1; LittleEndian::write_u16(&mut frame, 0); LittleEndian::write_u16( &mut frame[2..], protocol | (origin << 14) | (tagged << 13) | (addressable << 12), ); LittleEndian::write_u16(&mut frame[1..], 4); let mut frame_address: [u8; 16] = [0; 16]; let ack_required = 0; let res_required = match lifx_msg { LifxMsg::Get(_) => 1, _ => 0, }; frame_address[14] = (ack_required << 1) | res_required; let mut protocol_header: [u8; 12] = [0; 12]; let msg_type = lifx_msg_type_to_u16(lifx_msg.clone()); LittleEndian::write_u16(&mut protocol_header[8..], msg_type); let payload = mk_lifx_msg_payload(lifx_msg); let payload_size = payload.clone().map(|p| p.len()).unwrap_or(0); let msg_size = frame.len() + frame_address.len() + protocol_header.len() + payload_size; LittleEndian::write_u16(&mut frame, msg_size as u16); let mut msg: Vec<u8> = vec![]; msg.append(&mut frame.to_vec()); msg.append(&mut frame_address.to_vec()); msg.append(&mut protocol_header.to_vec()); if let Some(payload) = payload { msg.append(&mut payload.to_vec()); }; msg } pub fn read_lifx_msg(buf: &[u8], addr: SocketAddr) -> LifxMsg { let msg_type = LittleEndian::read_u16(&buf[32..]); let payload = &buf[36..]; match msg_type { 107 => { let hue = LittleEndian::read_u16(payload); let sat = LittleEndian::read_u16(&payload[2..]); let bri = LittleEndian::read_u16(&payload[4..]); let power = LittleEndian::read_u16(&payload[10..]); let label = std::str::from_utf8(&payload[12..(12 + 32)]) .unwrap_or("Unknown") .to_owned() .replace('\0', ""); let state = LifxState { hue, sat, bri, power, label, addr, transition: None, }; LifxMsg::State(state) } _ => LifxMsg::Unknown, } } pub fn from_lifx_state(lifx_state: LifxState, integration_id: IntegrationId) -> Device { let hue = from_lifx_hue((f32::from(lifx_state.hue) / 65535.0) * 360.0); let sat = f32::from(lifx_state.sat) / 65535.0; let bri = f32::from(lifx_state.bri) / 65535.0; let power = lifx_state.power == 65535; let color = Hsv::new(hue, sat, bri); let transition_ms = lifx_state.transition.map(|transition| transition as u64); let state = DeviceState::Light(Light::new( power, None, Some(DeviceColor::Color(color)), transition_ms, )); Device { id: DeviceId::new(&lifx_state.addr.to_string()), name: lifx_state.label, integration_id, scene: None, state, } } pub fn to_lifx_state(device: &Device) -> Result<LifxState> { let light_state = match device.state.clone() { DeviceState::Light(Light { brightness, color, power, transition_ms, }) => Ok(Light { power, brightness, color, transition_ms, }), _ => Err(anyhow!("Unsupported device state")), }?; let power = if light_state.power { 65535 } else { 0 }; let transition = light_state .transition_ms .map(|transition_ms| transition_ms as u32); match light_state.color { Some(DeviceColor::Color(color)) => { let hue = ((to_lifx_hue(color.hue.to_positive_degrees()) / 360.0) * 65535.0).floor() as u16; let sat = (color.saturation * 65535.0).floor() as u16; let bri = (light_state.brightness.unwrap_or(1.0) * color.value * 65535.0).floor() as u16; Ok(LifxState { hue, sat, bri, power, label: device.name.clone(), addr: device.id.to_string().parse()?, transition, }) } Some(DeviceColor::Cct(_)) => Err(anyhow!( "Support for Lifx color temperature mode not implemented" )), None => Ok(LifxState { hue: 0, sat: 0, bri: 0, power, label: device.name.clone(), addr: device.id.to_string().parse()?, transition, }), } } pub fn to_lifx_hue(h: f32) -> f32 { if h > 0.0 && h < 60.0 { let p = h / 60.0; f32::powf(p, 1.0 / 2.0) * 60.0 } else { h } } pub fn from_lifx_hue(h: f32) -> f32 { if h > 0.0 && h < 60.0 { let p = h / 60.0; f32::powf(p, 2.0 / 1.0) * 60.0 } else { h } }
use anyhow::{anyhow, Result}; use byteorder::{ByteOrder, LittleEndian}; use homectl_types::device::{Device, DeviceColor, DeviceId, DeviceState, Light}; use homectl_types::integration::IntegrationId; use palette::Hsv; use std::net::SocketAddr; #[derive(Clone, Debug)] pub struct LifxState { pub hue: u16, pub sat: u16, pub bri: u16, pub power: u16, pub label: String, pub addr: SocketAddr, pub transition: Option<u32>, } #[derive(Clone, Debug)] pub enum LifxMsg { Get(SocketAddr), SetColor(LifxState), State(LifxState), SetPower(LifxState), Unknown, } pub fn lifx_msg_type_to_u16(msg_type: LifxMsg) -> u16 { match msg_type { LifxMsg::Get(_) => 101, LifxMsg::SetColor(_) => 102, LifxMsg::State(_) => 107, LifxMsg::SetPower(_) => 117, LifxMsg::Unknown => panic!("Cannot convert LifxMsg::Unknown to u16"), } } fn mk_lifx_msg_payload(lifx_msg: LifxMsg) -> Option<Vec<u8>> { match lifx_msg { LifxMsg::SetPower(state) => { let mut buf: [u8; 16 + 32] = [0; 16 + 32]; LittleEndian::write_u16(&mut buf, state.power); if let Some(t) = state.transition { LittleEndian::write_u32(&mut buf[2..], t) } Some(buf.to_vec()) } LifxMsg::SetColor(state) => { let mut buf: [u8; 8 + 16 * 4 + 32] = [0; 8 + 16 * 4 + 32]; LittleEndian::write_u16(&mut buf[1..], state.hue); LittleEndian::write_u16(&mut buf[3..], state.sat); LittleEndian::write_u16(&mut buf[5..], state.bri); LittleEndian::write_u16(&mut buf[7..], 6500); let t = state.transition.unwrap_or(500); LittleEndian::write_u32(&mut buf[9..], t); Some(buf.to_vec()) } _ => None, } } pub fn mk_lifx_udp_msg(lifx_msg: LifxMsg) -> Vec<u8> { let mut frame: [u8; 8] = [0; 8]; let protocol = 1024; let origin = 0; let tagged = 1; let addressable = 1; LittleEndian::write_u16(&mut frame, 0); LittleEndian::write_u16( &mut frame[2..], protocol | (origin << 14) | (tagged << 13) | (addressable << 12), ); LittleEndian::write_u16(&mut frame[1..], 4); let mut frame_address: [u8; 16] = [0; 16]; let ack_required = 0; let res_required = match lifx_msg { LifxMsg::Get(_) => 1, _ => 0, }; frame_address[14] = (ack_required << 1) | res_required; let mut protocol_header: [u8; 12] = [0; 12]; let msg_type = lifx_msg_type_to_u16(lifx_msg.clone()); LittleEndian::write_u16(&mut protocol_header[8..], msg_type); let payload = mk_lifx_msg_payload(lifx_msg); let payload_size = payload.clone().map(|p| p.len()).unwrap_or(0); let msg_size = frame.len() + frame_address.len() + protocol_header.len() + payload_size; LittleEndian::write_u16(&mut frame, msg_size as u16); let mut msg: Vec<u8> = vec![]; msg.append(&mut frame.to_vec()); msg.append(&mut frame_address.to_vec()); msg.append(&mut protocol_header.to_vec()); if let Some(payload) = payload { msg.append(&mut payload.to_vec()); }; msg } pub fn read_lifx_msg(buf: &[u8], addr: SocketAddr) -> LifxMsg { let msg_type = LittleEndian::read_u16(&buf[32..]); let payload = &buf[36..]; match msg_type { 107 => { let hue = LittleEndian::read_u16(payload); let sat = LittleEndian::read_u16(&payload[2..]); let bri = LittleEndian::read_u16(&payload[4..]); let power = LittleEndian::read_u16(&payload[10..]); let label = std::str::from_utf8(&payload[12..(12 + 32)]) .unwrap_or("Unknown") .to_owned() .replace('\0', ""); let state = LifxState { hue, sat, bri, power, label, addr, transition: None, }; LifxMsg::State(state) } _ => LifxMsg::Unknown, } } pub fn from_lifx_state(lifx_state: LifxState, integration_id: IntegrationId) -> Device { let hue = from_lifx_hue((f32::from(lifx_state.hue) / 65535.0) * 360.0); let sat = f32::from(lifx_state.sat) / 65535.0; let bri = f32::from(lifx_state.bri) / 65535.0; let power = lifx_state.power == 65535; let color = Hsv::new(hue, sat, bri); let transition_ms = lifx_state.transition.map(|transition| transition as u64); let state = DeviceState::Light(Light::new( power, None, Some(DeviceColor::Color(color)), transition_ms, )); Device { id: DeviceId::new(&lifx_state.addr.to_string()), name: lifx_state.label, integration_id, scene: None, state, } } pub fn to_lifx_state(device: &Device) -> Result<LifxState> { let light_state =
?; let power = if light_state.power { 65535 } else { 0 }; let transition = light_state .transition_ms .map(|transition_ms| transition_ms as u32); match light_state.color { Some(DeviceColor::Color(color)) => { let hue = ((to_lifx_hue(color.hue.to_positive_degrees()) / 360.0) * 65535.0).floor() as u16; let sat = (color.saturation * 65535.0).floor() as u16; let bri = (light_state.brightness.unwrap_or(1.0) * color.value * 65535.0).floor() as u16; Ok(LifxState { hue, sat, bri, power, label: device.name.clone(), addr: device.id.to_string().parse()?, transition, }) } Some(DeviceColor::Cct(_)) => Err(anyhow!( "Support for Lifx color temperature mode not implemented" )), None => Ok(LifxState { hue: 0, sat: 0, bri: 0, power, label: device.name.clone(), addr: device.id.to_string().parse()?, transition, }), } } pub fn to_lifx_hue(h: f32) -> f32 { if h > 0.0 && h < 60.0 { let p = h / 60.0; f32::powf(p, 1.0 / 2.0) * 60.0 } else { h } } pub fn from_lifx_hue(h: f32) -> f32 { if h > 0.0 && h < 60.0 { let p = h / 60.0; f32::powf(p, 2.0 / 1.0) * 60.0 } else { h } }
match device.state.clone() { DeviceState::Light(Light { brightness, color, power, transition_ms, }) => Ok(Light { power, brightness, color, transition_ms, }), _ => Err(anyhow!("Unsupported device state")), }
if_condition
[ { "content": "fn default_device(device_id: DeviceId, name: String, integration_id: IntegrationId) -> Device {\n\n Device {\n\n id: device_id,\n\n name,\n\n integration_id,\n\n scene: None,\n\n state: DeviceState::Light(Light {\n\n power: false,\n\n bri...
Rust
src/options.rs
manuelsteiner/rcproxy
bef7ced754a5d15328ca20e6ff46ca3018b729b1
use ipnet::IpNet; use log::LevelFilter; use std::collections::HashMap; use std::process::exit; use std::result::Result::Ok; use structopt::clap::arg_enum; use structopt::StructOpt; lazy_static! { pub static ref OPT: Opt = { let mut opt = Opt::from_args(); opt.headers = match opt.header.clone() { Some(headers) => { match create_header_map(&headers) { Ok(headers) => Some(headers), Err(_error) => { eprintln!("Error parsing additional response headers from arguments. Exiting."); exit(1); } } } None => None, }; if let Some(blacklist) = opt.blacklist_ip.clone() { let mut blacklist = blacklist; if blacklist.len() == 1 { blacklist = split_coma_separated_list(blacklist[0].as_str()); } if !validate_ip_blacklist(&blacklist) { eprintln!("Error parsing client IP blacklist. Exiting."); exit(1); } opt.blacklist_ip = Some(blacklist) } if let Some(filters) = opt.filter_url.clone() { let mut filters = filters; if filters.len() == 1 { filters = split_coma_separated_list(filters[0].as_str()); } match validate_and_extract_filters(&filters) { Ok((allow, deny)) => { opt.filters_allow = allow; opt.filters_deny = deny; }, Err(_) => { eprintln!("Error parsing URL filters. Exiting."); exit(1); } } } if opt.mime.len() == 1 { opt.mime = split_coma_separated_list(opt.mime[0].as_str()); } opt.mime_regex = create_mime_regex(&opt.mime); opt }; } #[derive(Debug, StructOpt)] #[structopt(name = "rcproxy")] pub struct Opt { #[structopt(short, long, default_value = "127.0.0.1:80", env = "RCPROXY_ADDRESS")] pub address: String, #[structopt( short = "n", long, default_value = "rcproxy", env = "RCPROXY_SERVER_NAME" )] pub server_name: String, #[structopt(short = "H", long)] pub header: Option<Vec<String>>, #[structopt(skip)] pub headers: Option<HashMap<String, String>>, #[structopt(short, long, env = "RCPROXY_BLACKLIST_IP")] pub blacklist_ip: Option<Vec<String>>, #[structopt(short, long, requires_all = &["filter-default"], env = "RCPROXY_FILTER_URL")] pub filter_url: Option<Vec<String>>, #[structopt(skip)] pub filters_allow: Vec<String>, #[structopt(skip)] pub filters_deny: Vec<String>, #[structopt(short = "d", long, possible_values = &DefaultFilterRule::variants(), case_insensitive = true, default_value = "Allow", env = "RCPROXY_FILTER_DEFAULT")] pub filter_default: DefaultFilterRule, #[structopt(short, long, env = "RCPROXY_KEY")] pub key: String, #[structopt(short = "s", long)] pub allow_https: bool, #[structopt(short = "c", long, default_value = "0", env = "RCPROXY_MAX_SIZE")] pub max_size: u32, #[structopt(short = "r", long, default_value = "5", env = "RCPROXY_MAX_REDIRECTS")] pub max_redirects: u8, #[structopt(short, long, default_value = "5", env = "RCPROXY_TIMEOUT")] pub timeout: u8, #[structopt(short, long, env = "RCPROXY_PROXY")] pub proxy: Option<String>, #[structopt(long, requires_all = &["proxy", "proxy-password"], env = "RCPROXY_PROXY_USERNAME")] pub proxy_username: Option<String>, #[structopt(long, requires_all = &["proxy", "proxy-username"], env = "RCPROXY_PROXY_PASSWORD")] pub proxy_password: Option<String>, #[structopt(short, long, default_value = "image/*", env = "RCPROXY_MIME")] pub mime: Vec<String>, #[structopt(skip)] pub mime_regex: String, #[structopt(short, long, possible_values = &LogLevel::variants(), case_insensitive = true, default_value = "Info", env = "RCPROXY_LOGLEVEL")] pub log_level: LogLevel, } arg_enum! { #[derive(Clone, Copy, Debug)] pub enum DefaultFilterRule { Allow, Deny, } } arg_enum! { #[derive(Clone, Copy, Debug)] pub enum LogLevel { Trace, Debug, Info, Warn, Error, } } impl From<LogLevel> for LevelFilter { fn from(other: LogLevel) -> LevelFilter { match other { LogLevel::Trace => LevelFilter::Trace, LogLevel::Debug => LevelFilter::Debug, LogLevel::Info => LevelFilter::Info, LogLevel::Warn => LevelFilter::Warn, LogLevel::Error => LevelFilter::Error, } } } fn split_coma_separated_list(string: &str) -> Vec<String> { string.split(',').map(str::trim).map(String::from).collect() } fn create_header_map(headers: &[String]) -> Result<HashMap<String, String>, ()> { let mut map = HashMap::new(); for header in headers { let parts: Vec<String> = header.split(": ").map(String::from).collect(); if parts.len() != 2 { eprintln!("Error parsing additional header value: \"{}\".", header); return Err(()); } let key = parts[0].trim().to_string(); let value = parts[1].trim().to_string(); map.insert(key, value); } Ok(map) } fn validate_ip_blacklist(blocks: &[String]) -> bool { for block in blocks.iter() { if block.parse::<IpNet>().is_err() { eprintln!("Error parsing IP range: \"{}\"", block); return false; } } true } fn validate_and_extract_filters(filters: &[String]) -> Result<(Vec<String>, Vec<String>), ()> { let mut filters_allow = Vec::<String>::new(); let mut filters_deny = Vec::<String>::new(); for filter in filters.iter() { if filter.len() < 3 { return Err(()); } match filter.get(..2).unwrap() { "a:" => filters_allow.push(filter.get(2..).unwrap().to_string()), "d:" => filters_deny.push(filter.get(2..).unwrap().to_string()), _ => return Err(()), } } Ok((filters_allow, filters_deny)) } fn create_mime_regex(mimes: &[String]) -> String { mimes.join("|").replace("*", ".+") } #[cfg(test)] mod tests { use super::*; #[test] fn test_split_coma_separated_list() { let strings = vec!["value1", "127.0.0.0/8, 10.0.0.0/24", "image/*, audio/*"]; let results: Vec<Vec<String>> = vec![ vec!["value1".to_string()], vec!["127.0.0.0/8".to_string(), "10.0.0.0/24".to_string()], vec!["image/*".to_string(), "audio/*".to_string()], ]; for (index, string) in strings.iter().enumerate() { assert_eq!(split_coma_separated_list(&string), results[index]); } } #[test] fn test_create_mime_regex() { let mimes: Vec<String> = vec!["image/*".to_string(), "audio/*".to_string()]; let mime_regex = "image/.+|audio/.+"; assert_eq!(create_mime_regex(&mimes), mime_regex); } #[test] fn test_create_header_map() { let headers: Vec<String> = vec!["key1: value1".to_string(), "key2: value2".to_string()]; assert!(create_header_map(&headers).is_ok()); let headers = create_header_map(&headers).unwrap(); assert_eq!(headers.len(), 2); assert!(headers.contains_key("key1")); assert!(headers.contains_key("key2")); assert_eq!(headers.get("key1"), Some(&"value1".to_string())); assert_eq!(headers.get("key2"), Some(&"value2".to_string())); } #[test] fn test_create_header_map_error() { let headers: Vec<String> = vec![ "key1: value1".to_string(), "key2: value2: error".to_string(), ]; assert!(create_header_map(&headers).is_err()); } #[test] fn test_validate_ip_blacklist() { let blacklist: Vec<String> = vec!["127.0.0.0/8".to_string(), "::1/128".to_string()]; assert_eq!(validate_ip_blacklist(&blacklist), true) } #[test] fn test_validate_ip_blacklist_error() { let blacklist: Vec<String> = vec!["127.0.0.0.1/8".to_string(), "::1/128".to_string()]; assert_eq!(validate_ip_blacklist(&blacklist), false) } #[test] fn test_validate_and_extract_filters_allow_and_deny() { let filters: Vec<String> = vec![ "a:.*allow.com/.*".to_string(), "d:.*deny.com/.*".to_string(), ]; let filters_allow = vec![".*allow.com/.*".to_string()]; let filters_deny = vec![".*deny.com/.*".to_string()]; assert_eq!( validate_and_extract_filters(&filters), Ok((filters_allow, filters_deny)) ); } #[test] fn test_validate_and_extract_filters_allow_only() { let filters: Vec<String> = vec![ "a:.*allow.com/.*".to_string(), "a:.*allow2.com/.*".to_string(), ]; let filters_allow = vec![".*allow.com/.*".to_string(), ".*allow2.com/.*".to_string()]; let filters_deny: Vec<String> = vec![]; assert_eq!( validate_and_extract_filters(&filters), Ok((filters_allow, filters_deny)) ); } #[test] fn test_validate_and_extract_filters_deny_only() { let filters: Vec<String> = vec![ "d:.*deny.com/.*".to_string(), "d:.*deny2.com/.*".to_string(), ]; let filters_allow: Vec<String> = vec![]; let filters_deny = vec![".*deny.com/.*".to_string(), ".*deny2.com/.*".to_string()]; assert_eq!( validate_and_extract_filters(&filters), Ok((filters_allow, filters_deny)) ); } #[test] fn test_validate_and_extract_filters_error() { let filters: Vec<String> = vec!["ad:.*deny.com/.*".to_string()]; assert_eq!(validate_and_extract_filters(&filters), Err(())); } }
use ipnet::IpNet; use log::LevelFilter; use std::collections::HashMap; use std::process::exit; use std::result::Result::Ok; use structopt::clap::arg_enum; use structopt::StructOpt; lazy_static! { pub static ref OPT: Opt = { let mut opt = Opt::from_args(); opt.headers = match opt.header.clone() { Some(headers) => { match create_header_map(&headers) { Ok(headers) => Some(headers), Err(_error) => { eprintln!("Error parsing additional response headers from arguments. Exiting."); exit(1); } } } None => None, }; if let Some(blacklist) = opt.blacklist_ip.clone() { let mut blacklist = blacklist; if blacklist.len() == 1 { blacklist = split_coma_separated_list(blacklist[0].as_str()); } if !validate_ip_blacklist(&blacklist) { eprintln!("Error parsing client IP blacklist. Exiting."); exit(1); } opt.blacklist_ip = Some(blacklist) } if let Some(filters) = opt.filter_url.clone() { let mut filters = filters; if filters.len() == 1 { filters = split_coma_separated_list(filters[0].as_str()); } match validate_and_extract_filters(&filters) { Ok((allow, deny)) => { opt.filters_allow = allow; opt.filters_deny = deny; }, Err(_) => { eprintln!("Error parsing URL filters. Exiting."); exit(1); } } } if opt.mime.len() == 1 { opt.mime = split_coma_separated_list(opt.mime[0].as_str()); } opt.mime_regex = create_mime_regex(&opt.mime); opt }; } #[derive(Debug, StructOpt)] #[structopt(name = "rcproxy")] pub struct Opt { #[structopt(short, long, default_value = "127.0.0.1:80", env = "RCPROXY_ADDRESS")] pub address: String, #[structopt( short = "n", long, default_value = "rcproxy", env = "RCPROXY_SERVER_NAME" )] pub server_name: String, #[structopt(short = "H", long)] pub header: Option<Vec<String>>, #[structopt(skip)] pub headers: Option<HashMap<String, String>>, #[structopt(short, long, env = "RCPROXY_BLACKLIST_IP")] pub blacklist_ip: Option<Vec<String>>, #[structopt(short, long, requires_all = &["filter-default"], env = "RCPROXY_FILTER_URL")] pub filter_url: Option<Vec<String>>, #[structopt(skip)] pub filters_allow: Vec<String>, #[structopt(skip)] pub filters_deny: Vec<String>, #[structopt(short = "d", long, possible_values = &DefaultFilterRule::variants(), case_insensitive = true, default_value = "Allow", env = "RCPROXY_FILTER_DEFAULT")] pub filter_default: DefaultFilterRule, #[structopt(short, long, env = "RCPROXY_KEY")] pub key: String, #[structopt(short = "s", long)] pub allow_https: bool, #[structopt(short = "c", long, default_value = "0", env = "RCPROXY_MAX_SIZE")] pub max_size: u32, #[structopt(short = "r", long, default_value = "5", env = "RCPROXY_MAX_REDIRECTS")] pub max_redirects: u8, #[structopt(short, long, default_value = "5", env = "RCPROXY_TIMEOUT")] pub timeout: u8, #[structopt(short, long, env = "RCPROXY_PROXY")] pub proxy: Option<String>, #[structopt(long, requires_all = &["proxy", "proxy-password"], env = "RCPROXY_PROXY_USERNAME")] pub proxy_username: Option<String>, #[structopt(long, requires_all = &["proxy", "proxy-username"], env = "RCPROXY_PROXY_PASSWORD")] pub proxy_password: Option<String>, #[structopt(short, long, default_value = "image/*", env = "RCPROXY_MIME")] pub mime: Vec<String>, #[structopt(skip)] pub mime_regex: String, #[structopt(short, long, possible_values = &LogLevel::variants(), case_insensitive = true, default_value = "Info", env = "RCPROXY_LOGLEVEL")] pub log_level: LogLevel, } arg_enum! { #[derive(Clone, Copy, Debug)] pub enum DefaultFilterRule { Allow, Deny, } } arg_enum! { #[derive(Clone, Copy, Debug)] pub enum LogLevel { Trace, Debug, Info, Warn, Error, } } impl From<LogLevel> for LevelFilter { fn from(other: LogLevel) -> LevelFilter { match other { LogLevel::Trace => LevelFilter::Trace, LogLevel::Debug => LevelFilter::Debug, LogLevel::Info => LevelFilter::Info, LogLevel::Warn => LevelFilter::Warn, LogLevel::Error => LevelFilter::Error, } } } fn split_coma_separated_list(string: &str) -> Vec<String> { string.split(',').map(str::trim).map(String::from).collect() } fn create_header_map(headers: &[String]) -> Result<HashMap<String, String>, ()> { let mut map = HashMap::new(); for header in headers { let parts: Vec<String> = header.split(": ").map(String::from).collect(); if parts.len() != 2 { eprintln!("Error parsing additional header value: \"{}\".", header); return Err(()); } let key = parts[0].trim().to_string(); let value = parts[1].trim().to_string(); map.insert(key, value); } Ok(map) } fn validate_ip_blacklist(blocks: &[String]) -> bool { for block in blocks.iter() { if block.parse::<IpNet>().is_err() { eprintln!("Error parsing IP range: \"{}\"", block); return false; } } true } fn validate_and_extract_filters(filters: &[String]) -> Result<(Vec<String>, Vec<String>), ()> { let mut filters_allow = Vec::<String>::new(); let mut filters_deny = Vec::<String>::new(); for filter in filters.iter() { if filter.len() < 3 { return Err(()); } match filter.get(..2).unwrap() { "a:" => filters_allow.push(filter.get(2..).unwrap().to_string()), "d:" => filters_deny.push(filter.get(2..).unwrap().to_string()), _ => return Err(()), } } Ok((filters_allow, filters_deny)) } fn create_mime_regex(mimes: &[String]) -> String { mimes.join("|").replace("*", ".+") } #[cfg(test)] mod tests { use super::*; #[test] fn test_split_coma_separated_list() { let strings = vec!["value1", "127.0.0.0/8, 10.0.0.0/24", "image/*, audio/*"]; let results: Vec<Vec<String>> = vec![ vec!["value1".to_string()], vec!["127.0.0.0/8".to_string(), "10.0.0.0/24".to_string()], vec!["image/*".to_string(), "audio/*".to_string()], ]; for (index, string) in strings.iter().enumerate() { assert_eq!(split_coma_separated_list(&string), results[index]); } } #[test] fn test_create_mime_regex() { let mimes: Vec<String> = vec!["image/*".to_string(), "audio/*".to_string()]; let mime_regex = "image/.+|audio/.+"; assert_eq!(create_mime_regex(&mimes), mime_regex); } #[test] fn test_create_header_map() { let headers: Vec<String> = vec!["key1: value1".to_string(), "key2: value2".to_string()]; assert!(create_header_map(&headers).is_ok()); let headers = create_header_map(&headers).unwrap(); assert_eq!(headers.len(), 2); assert!(headers.contains_key("key1")); assert!(headers.contains_key("key2")); assert_eq!(headers.get("key1"), Some(&"value1".to_string())); assert_eq!(headers.get("key2"), Some(&"value2".to_string())); } #[test] fn test_create_header_map_error() { let headers: Vec<String> = vec![ "key1: value1".to_string(), "key2: value2: error".to_string(), ]; assert!(create_header_map(&headers).is_err()); } #[test] fn test_validate_ip_blacklist() { let blacklist: Vec<String> = vec!["127.0.0.0/8".to_string(), "::1/128".to_string()]; assert_eq!(validate_ip_blacklist(&blacklist), true) } #[test] fn test_validate_ip_blacklist_error() { let blacklist: Vec<String> = vec!["127.0.0.0.1/8".to_string(), "::1/128".to_string()]; assert_eq!(validate_ip_blacklist(&blacklist), false) } #[test] fn test_validate_and_extract_filters_allow_and_deny() { let filters: Vec<String> = vec![ "a:.*allow.com/.*".to_string(), "d:.*deny.com/.*".to_string(), ]; let filters_allow = vec![".*allow.com/.*".to_string()]; let filters_deny = vec![".*deny.com/.*".to_string()]; assert_eq!( validate_and_extract_filters(&filters), Ok((filters_allow, filters_deny)) ); } #[test] fn test_validate_and_extract_filters_allow_only() { let filters: Vec<String> = vec![ "a:.*allow.com/.*".to_string(), "a:.*allow2.com/.*".to_string(), ]; let filters_allow = vec![".*allow.com/.*".to_string(), ".*allow2.com/.*".to_string()]; let filters_deny: Vec<String> = vec![]; assert_eq!( validate_and_extract_filters(&filters), Ok((filters_allow, filters_deny)) ); } #[test] fn test_validate_and_extract_filters_deny_only() { let filters: Vec<String> = vec![ "d:.*deny.com/.*".to_string(), "d:.*deny2.com/.*".to_string(), ]; let filters_allow: Vec<String> = vec![]; let filters_deny = vec![".*deny.co
#[test] fn test_validate_and_extract_filters_error() { let filters: Vec<String> = vec!["ad:.*deny.com/.*".to_string()]; assert_eq!(validate_and_extract_filters(&filters), Err(())); } }
m/.*".to_string(), ".*deny2.com/.*".to_string()]; assert_eq!( validate_and_extract_filters(&filters), Ok((filters_allow, filters_deny)) ); }
function_block-function_prefixed
[ { "content": "fn decode_and_validate_url(encoded_url: &str, allow_https: bool) -> Result<String, ()> {\n\n let url = match hex::decode(encoded_url) {\n\n Ok(url) => url,\n\n Err(_error) => {\n\n debug!(\"URL parameter is not HEX encoded.\");\n\n\n\n match base64::decode(en...
Rust
packages/vm/src/limited.rs
venkattejaRaavi/cosmwasm
73c72d4eccd5028e18fd60e77f44d975647a13bd
use std::collections::{BTreeSet, HashSet}; use std::iter::FromIterator; pub trait LimitedDisplay { fn to_string_limited(&self, max_length: usize) -> String; } impl<E: Ord + AsRef<str>> LimitedDisplay for BTreeSet<E> { fn to_string_limited(&self, max_length: usize) -> String { collection_to_string_limited(self.iter(), max_length, "{", "}") } } impl<E: Ord + AsRef<str>> LimitedDisplay for HashSet<E> { fn to_string_limited(&self, max_length: usize) -> String { let sorted = BTreeSet::from_iter(self); sorted.to_string_limited(max_length) } } impl<E: AsRef<str>> LimitedDisplay for Vec<E> { fn to_string_limited(&self, max_length: usize) -> String { collection_to_string_limited(self.iter(), max_length, "[", "]") } } fn collection_to_string_limited<E: AsRef<str>, I: ExactSizeIterator<Item = E>>( iter: I, max_length: usize, opening: &str, closing: &str, ) -> String { let elements_count = iter.len(); let mut out = String::with_capacity(max_length * 130 / 100); let mut first = true; out.push_str(opening); let mut lengths_stack = Vec::<usize>::new(); for element in iter { lengths_stack.push(out.len()); if first { out.push('"'); first = false; } else { out.push_str(", \""); } out.push_str(element.as_ref()); out.push('"'); if out.len() > max_length { break; }; } if out.len() + closing.len() <= max_length { out.push_str(closing); out } else { loop { let previous_length = lengths_stack .pop() .expect("Cannot remove hide enough elements to fit in length limit."); let skipped = elements_count - lengths_stack.len(); let remaining = elements_count - skipped; let skipped_text = if remaining == 0 { format!("... {} elements", skipped) } else { format!(", ... {} more", skipped) }; if previous_length + skipped_text.len() + closing.len() <= max_length { out.truncate(previous_length); out.push_str(&skipped_text); out.push_str(closing); return out; } } } } #[cfg(test)] mod test { use super::*; #[test] fn works_for_btreeset() { let set = BTreeSet::<String>::new(); assert_eq!(set.to_string_limited(100), "{}"); assert_eq!(set.to_string_limited(20), "{}"); assert_eq!(set.to_string_limited(2), "{}"); let fruits = BTreeSet::from_iter( [ "watermelon".to_string(), "apple".to_string(), "banana".to_string(), ] .iter() .cloned(), ); assert_eq!( fruits.to_string_limited(100), "{\"apple\", \"banana\", \"watermelon\"}" ); assert_eq!( fruits.to_string_limited(33), "{\"apple\", \"banana\", \"watermelon\"}" ); assert_eq!( fruits.to_string_limited(32), "{\"apple\", \"banana\", ... 1 more}" ); assert_eq!( fruits.to_string_limited(31), "{\"apple\", \"banana\", ... 1 more}" ); assert_eq!(fruits.to_string_limited(30), "{\"apple\", ... 2 more}"); assert_eq!(fruits.to_string_limited(21), "{\"apple\", ... 2 more}"); assert_eq!(fruits.to_string_limited(20), "{... 3 elements}"); assert_eq!(fruits.to_string_limited(16), "{... 3 elements}"); } #[test] fn works_for_hashset() { let set = HashSet::<String>::new(); assert_eq!(set.to_string_limited(100), "{}"); assert_eq!(set.to_string_limited(20), "{}"); assert_eq!(set.to_string_limited(2), "{}"); let fruits = HashSet::from_iter( [ "watermelon".to_string(), "apple".to_string(), "banana".to_string(), ] .iter() .cloned(), ); assert_eq!( fruits.to_string_limited(100), "{\"apple\", \"banana\", \"watermelon\"}" ); assert_eq!( fruits.to_string_limited(33), "{\"apple\", \"banana\", \"watermelon\"}" ); assert_eq!( fruits.to_string_limited(32), "{\"apple\", \"banana\", ... 1 more}" ); assert_eq!( fruits.to_string_limited(31), "{\"apple\", \"banana\", ... 1 more}" ); assert_eq!(fruits.to_string_limited(30), "{\"apple\", ... 2 more}"); assert_eq!(fruits.to_string_limited(21), "{\"apple\", ... 2 more}"); assert_eq!(fruits.to_string_limited(20), "{... 3 elements}"); assert_eq!(fruits.to_string_limited(16), "{... 3 elements}"); } #[test] #[should_panic(expected = "Cannot remove hide enough elements to fit in length limit.")] fn panics_if_limit_is_too_small_empty() { let set = HashSet::<String>::new(); assert_eq!(set.to_string_limited(1), "{}"); } #[test] #[should_panic(expected = "Cannot remove hide enough elements to fit in length limit.")] fn panics_if_limit_is_too_small_nonempty() { let fruits = HashSet::from_iter( [ "watermelon".to_string(), "apple".to_string(), "banana".to_string(), ] .iter() .cloned(), ); assert_eq!(fruits.to_string_limited(15), "{... 3 elements}"); } #[test] fn works_for_vectors() { let list = Vec::<String>::new(); assert_eq!(list.to_string_limited(100), "[]"); assert_eq!(list.to_string_limited(20), "[]"); assert_eq!(list.to_string_limited(2), "[]"); let fruits = vec![ "banana".to_string(), "apple".to_string(), "watermelon".to_string(), ]; assert_eq!( fruits.to_string_limited(100), "[\"banana\", \"apple\", \"watermelon\"]" ); assert_eq!( fruits.to_string_limited(33), "[\"banana\", \"apple\", \"watermelon\"]" ); assert_eq!( fruits.to_string_limited(32), "[\"banana\", \"apple\", ... 1 more]" ); assert_eq!( fruits.to_string_limited(31), "[\"banana\", \"apple\", ... 1 more]" ); assert_eq!(fruits.to_string_limited(30), "[\"banana\", ... 2 more]"); assert_eq!(fruits.to_string_limited(22), "[\"banana\", ... 2 more]"); assert_eq!(fruits.to_string_limited(21), "[... 3 elements]"); assert_eq!(fruits.to_string_limited(16), "[... 3 elements]"); } }
use std::collections::{BTreeSet, HashSet}; use std::iter::FromIterator; pub trait LimitedDisplay { fn to_string_limited(&self, max_length: usize) -> String; } impl<E: Ord + AsRef<str>> LimitedDisplay for BTreeSet<E> { fn to_string_limited(&self, max_length: usize) -> String { collection_to_string_limited(self.iter(), max_length, "{", "}") } } impl<E: Ord + AsRef<str>> LimitedDisplay for HashSet<E> { fn to_string_limited(&self, max_length: usize) -> String { let sorted = BTreeSet::from_iter(self); sorted.to_string_limited(max_length) } } impl<E: AsRef<str>> LimitedDisplay for Vec<E> { fn to_string_limited(&self, max_length: usize) -> String { collection_to_string_limited(self.iter(), max_length, "[", "]") } } fn collection_to_string_limited<E: AsRef<str>, I: ExactSizeIterator<Item = E>>( iter: I, max_length: usize, opening: &str, closing: &str, ) -> String { let elements_count = iter.len(); let mut out = String::with_capacity(max_length * 130 / 100); let mut first = true; out.push_str(opening); let mut lengths_stack = Vec::<usize>::new(); for element in iter { lengths_stack.push(out.len()); if first { out.push('"'); first = false; } else { out.push_str(", \""); } out.push_str(element.as_ref()); out.push('"'); if out.len() > max_length { break; }; } if out.len() + closing.len() <= max_length { out.push_str(closing); out } else { loop { let previous_length = lengths_stack .pop() .expect("Cannot remove hide enough elements to fit in length limit."); let skipped = elements_count - lengths_stack.len(); let remaining = elements_count - skipped; let skipped_text = if remaining == 0 { format!("... {} elements", skipped) } else { format!(", ... {} more", skipped) }; if previous_length + skipped_text.len() + closing.len() <= max_length { out.truncate(previous_length); out.push_str(&skipped_text); out.push_str(closing); return out; } } } } #[cfg(test)] mod test { use super::*; #[test] fn works_for_btreeset() { let set = BTreeSet::<String>::new(); assert_eq!(set.to_string_limited(100), "{}"); assert_eq!(set.to_string_limited(20), "{}"); assert_eq!(set.to_string_limited(2), "{}"); let fruits = BTreeSet::from_iter( [ "watermelon".to_string(), "apple".to_string(), "banana".to_string(), ] .iter() .
#[test] fn works_for_hashset() { let set = HashSet::<String>::new(); assert_eq!(set.to_string_limited(100), "{}"); assert_eq!(set.to_string_limited(20), "{}"); assert_eq!(set.to_string_limited(2), "{}"); let fruits = HashSet::from_iter( [ "watermelon".to_string(), "apple".to_string(), "banana".to_string(), ] .iter() .cloned(), ); assert_eq!( fruits.to_string_limited(100), "{\"apple\", \"banana\", \"watermelon\"}" ); assert_eq!( fruits.to_string_limited(33), "{\"apple\", \"banana\", \"watermelon\"}" ); assert_eq!( fruits.to_string_limited(32), "{\"apple\", \"banana\", ... 1 more}" ); assert_eq!( fruits.to_string_limited(31), "{\"apple\", \"banana\", ... 1 more}" ); assert_eq!(fruits.to_string_limited(30), "{\"apple\", ... 2 more}"); assert_eq!(fruits.to_string_limited(21), "{\"apple\", ... 2 more}"); assert_eq!(fruits.to_string_limited(20), "{... 3 elements}"); assert_eq!(fruits.to_string_limited(16), "{... 3 elements}"); } #[test] #[should_panic(expected = "Cannot remove hide enough elements to fit in length limit.")] fn panics_if_limit_is_too_small_empty() { let set = HashSet::<String>::new(); assert_eq!(set.to_string_limited(1), "{}"); } #[test] #[should_panic(expected = "Cannot remove hide enough elements to fit in length limit.")] fn panics_if_limit_is_too_small_nonempty() { let fruits = HashSet::from_iter( [ "watermelon".to_string(), "apple".to_string(), "banana".to_string(), ] .iter() .cloned(), ); assert_eq!(fruits.to_string_limited(15), "{... 3 elements}"); } #[test] fn works_for_vectors() { let list = Vec::<String>::new(); assert_eq!(list.to_string_limited(100), "[]"); assert_eq!(list.to_string_limited(20), "[]"); assert_eq!(list.to_string_limited(2), "[]"); let fruits = vec![ "banana".to_string(), "apple".to_string(), "watermelon".to_string(), ]; assert_eq!( fruits.to_string_limited(100), "[\"banana\", \"apple\", \"watermelon\"]" ); assert_eq!( fruits.to_string_limited(33), "[\"banana\", \"apple\", \"watermelon\"]" ); assert_eq!( fruits.to_string_limited(32), "[\"banana\", \"apple\", ... 1 more]" ); assert_eq!( fruits.to_string_limited(31), "[\"banana\", \"apple\", ... 1 more]" ); assert_eq!(fruits.to_string_limited(30), "[\"banana\", ... 2 more]"); assert_eq!(fruits.to_string_limited(22), "[\"banana\", ... 2 more]"); assert_eq!(fruits.to_string_limited(21), "[... 3 elements]"); assert_eq!(fruits.to_string_limited(16), "[... 3 elements]"); } }
cloned(), ); assert_eq!( fruits.to_string_limited(100), "{\"apple\", \"banana\", \"watermelon\"}" ); assert_eq!( fruits.to_string_limited(33), "{\"apple\", \"banana\", \"watermelon\"}" ); assert_eq!( fruits.to_string_limited(32), "{\"apple\", \"banana\", ... 1 more}" ); assert_eq!( fruits.to_string_limited(31), "{\"apple\", \"banana\", ... 1 more}" ); assert_eq!(fruits.to_string_limited(30), "{\"apple\", ... 2 more}"); assert_eq!(fruits.to_string_limited(21), "{\"apple\", ... 2 more}"); assert_eq!(fruits.to_string_limited(20), "{... 3 elements}"); assert_eq!(fruits.to_string_limited(16), "{... 3 elements}"); }
function_block-function_prefix_line
[ { "content": "/// Takes a comma-separated string, splits it by commas, removes empty elements and returns a set of features.\n\n/// This can be used e.g. to initialize the cache.\n\npub fn features_from_csv(csv: &str) -> HashSet<String> {\n\n HashSet::from_iter(\n\n csv.split(',')\n\n .map(...
Rust
sync/src/relayer/tests/compact_block_verifier.rs
orangemio/ckb
5b3664e162c840f421469279e7e68fe9dcad75dd
use crate::relayer::compact_block::{CompactBlock, ShortTransactionID}; use crate::relayer::compact_block_verifier::{PrefilledVerifier, ShortIdsVerifier}; use crate::relayer::error::Error; use ckb_core::transaction::{CellOutput, IndexTransaction, TransactionBuilder}; use ckb_core::Capacity; use ckb_protocol::{short_transaction_id, short_transaction_id_keys}; fn new_index_transaction(index: usize) -> IndexTransaction { let transaction = TransactionBuilder::default() .output(CellOutput::new( Capacity::bytes(index).unwrap(), Default::default(), Default::default(), None, )) .build(); IndexTransaction { index, transaction } } #[test] fn test_unordered_prefilled() { let mut block = CompactBlock::default(); let prefilled: Vec<IndexTransaction> = vec![0, 1, 2, 4, 3] .into_iter() .map(new_index_transaction) .collect(); block.prefilled_transactions = prefilled; assert_eq!( PrefilledVerifier::new().verify(&block), Err(Error::UnorderedPrefilledTransactions), ); } #[test] fn test_ordered_prefilled() { let mut block = CompactBlock::default(); let prefilled: Vec<IndexTransaction> = (0..5).map(new_index_transaction).collect(); block.prefilled_transactions = prefilled; assert_eq!(PrefilledVerifier::new().verify(&block), Ok(()),); } #[test] fn test_overflow_prefilled() { let mut block = CompactBlock::default(); let prefilled: Vec<IndexTransaction> = vec![0, 1, 2, 5] .into_iter() .map(new_index_transaction) .collect(); block.prefilled_transactions = prefilled; assert_eq!( PrefilledVerifier::new().verify(&block), Err(Error::OverflowPrefilledTransactions), ); } #[test] fn test_cellbase_not_prefilled() { let block = CompactBlock::default(); assert_eq!( PrefilledVerifier::new().verify(&block), Err(Error::CellbaseNotPrefilled) ); let mut block = CompactBlock::default(); let prefilled: Vec<IndexTransaction> = (1..5).map(new_index_transaction).collect(); block.prefilled_transactions = prefilled; assert_eq!( PrefilledVerifier::new().verify(&block), Err(Error::CellbaseNotPrefilled), ); } #[test] fn test_duplicated_short_ids() { let mut block = CompactBlock::default(); let mut short_ids: Vec<ShortTransactionID> = (1..5) .map(new_index_transaction) .map(|tx| { let (key0, key1) = short_transaction_id_keys(block.header.nonce(), block.nonce); short_transaction_id(key0, key1, &tx.transaction.witness_hash()) }) .collect(); short_ids.push(short_ids[0]); block.short_ids = short_ids; assert_eq!( ShortIdsVerifier::new().verify(&block), Err(Error::DuplicatedShortIds), ); } #[test] fn test_intersected_short_ids() { let mut block = CompactBlock::default(); let prefilled: Vec<IndexTransaction> = (0..=5).map(new_index_transaction).collect(); let short_ids: Vec<ShortTransactionID> = (5..9) .map(new_index_transaction) .map(|tx| { let (key0, key1) = short_transaction_id_keys(block.header.nonce(), block.nonce); short_transaction_id(key0, key1, &tx.transaction.witness_hash()) }) .collect(); block.prefilled_transactions = prefilled; block.short_ids = short_ids; assert_eq!( ShortIdsVerifier::new().verify(&block), Err(Error::IntersectedPrefilledTransactions), ); } #[test] fn test_normal() { let mut block = CompactBlock::default(); let prefilled: Vec<IndexTransaction> = vec![1, 2, 5] .into_iter() .map(new_index_transaction) .collect(); let short_ids: Vec<ShortTransactionID> = vec![0, 3, 4] .into_iter() .map(new_index_transaction) .map(|tx| { let (key0, key1) = short_transaction_id_keys(block.header.nonce(), block.nonce); short_transaction_id(key0, key1, &tx.transaction.witness_hash()) }) .collect(); block.prefilled_transactions = prefilled; block.short_ids = short_ids; assert_eq!(ShortIdsVerifier::new().verify(&block), Ok(()),); }
use crate::relayer::compact_block::{CompactBlock, ShortTransactionID}; use crate::relayer::compact_block_verifier::{PrefilledVerifier, ShortIdsVerifier}; use crate::relayer::error::Error; use ckb_core::transaction::{CellOutput, IndexTransaction, TransactionBuilder}; use ckb_core::Capacity; use ckb_protocol::{short_transaction_id, short_transaction_id_keys}; fn new_index_transaction(index: usize) -> IndexTransaction { let transaction = TransactionBuilder::default() .output(CellOutput::new( Capacity::bytes(index).unwrap(), Default::default(), Default::default(), None, )) .build(); IndexTransaction { index, transaction } } #[test] fn test_unordered_prefilled() { let mut block = CompactBlock::default(); let prefilled: Vec<IndexTransaction> = vec![0, 1, 2, 4, 3] .into_iter() .map(new_index_transaction) .collect(); block.prefilled_transactions = prefilled; assert_eq!( PrefilledVerifier::new().verify(&block), Err(Error::UnorderedPrefilledTransactions), ); } #[test] fn test_ordered_prefilled() { let mut block = CompactBlock::default(); let prefilled: Vec<IndexTransaction> = (0..5).map(new_index_transaction).collect(); block.prefilled_transactions = prefilled; assert_eq!(PrefilledVerifier::new().verify(&block), Ok(()),); } #[test] fn test_overflow_prefilled() { let mut block = CompactBlock::default(); let prefilled: Vec<IndexTransaction> = vec![0, 1, 2, 5] .into_iter() .map(new_index_transaction) .collect(); block.prefilled_transactions = prefilled; assert_eq!( PrefilledVerifier::new().verify(&block), Err(Error::OverflowPrefilledTransactions), ); } #[test] fn test_cellbase_not_prefilled() { let block = CompactBlock::default(); assert_eq!( PrefilledVerifier::new().verify(&block), Err(Error::CellbaseNotPre
#[test] fn test_duplicated_short_ids() { let mut block = CompactBlock::default(); let mut short_ids: Vec<ShortTransactionID> = (1..5) .map(new_index_transaction) .map(|tx| { let (key0, key1) = short_transaction_id_keys(block.header.nonce(), block.nonce); short_transaction_id(key0, key1, &tx.transaction.witness_hash()) }) .collect(); short_ids.push(short_ids[0]); block.short_ids = short_ids; assert_eq!( ShortIdsVerifier::new().verify(&block), Err(Error::DuplicatedShortIds), ); } #[test] fn test_intersected_short_ids() { let mut block = CompactBlock::default(); let prefilled: Vec<IndexTransaction> = (0..=5).map(new_index_transaction).collect(); let short_ids: Vec<ShortTransactionID> = (5..9) .map(new_index_transaction) .map(|tx| { let (key0, key1) = short_transaction_id_keys(block.header.nonce(), block.nonce); short_transaction_id(key0, key1, &tx.transaction.witness_hash()) }) .collect(); block.prefilled_transactions = prefilled; block.short_ids = short_ids; assert_eq!( ShortIdsVerifier::new().verify(&block), Err(Error::IntersectedPrefilledTransactions), ); } #[test] fn test_normal() { let mut block = CompactBlock::default(); let prefilled: Vec<IndexTransaction> = vec![1, 2, 5] .into_iter() .map(new_index_transaction) .collect(); let short_ids: Vec<ShortTransactionID> = vec![0, 3, 4] .into_iter() .map(new_index_transaction) .map(|tx| { let (key0, key1) = short_transaction_id_keys(block.header.nonce(), block.nonce); short_transaction_id(key0, key1, &tx.transaction.witness_hash()) }) .collect(); block.prefilled_transactions = prefilled; block.short_ids = short_ids; assert_eq!(ShortIdsVerifier::new().verify(&block), Ok(()),); }
filled) ); let mut block = CompactBlock::default(); let prefilled: Vec<IndexTransaction> = (1..5).map(new_index_transaction).collect(); block.prefilled_transactions = prefilled; assert_eq!( PrefilledVerifier::new().verify(&block), Err(Error::CellbaseNotPrefilled), ); }
function_block-function_prefixed
[ { "content": "// Build compact block based on core block, and specific prefilled indices\n\npub fn build_compact_block_with_prefilled(block: &Block, prefilled: Vec<usize>) -> Bytes {\n\n let prefilled = prefilled.into_iter().collect();\n\n let fbb = &mut FlatBufferBuilder::new();\n\n let message = Rela...
Rust
crates/brine_voxel_v1/src/chunk_builder/plugin.rs
BGR360/brine
048656dfb3dc5c608536f14d687c6f7a9075df0a
use std::collections::hash_map::Entry; use std::{any::Any, marker::PhantomData}; use bevy::tasks::Task; use bevy::utils::{HashMap, HashSet}; use bevy::{ecs::event::Events, prelude::*, tasks::AsyncComputeTaskPool}; use futures_lite::future; use brine_asset::{api::BlockFace, MinecraftAssets}; use brine_chunk::ChunkSection; use brine_data::BlockStateId; use brine_proto::event; use crate::chunk_builder::component::PendingChunk; use crate::mesh::VoxelMesh; use crate::texture::BlockTextures; use super::component::{ChunkSection as ChunkSectionComponent, PendingMeshAtlas}; use super::{ component::{BuiltChunkBundle, BuiltChunkSectionBundle}, ChunkBuilder, }; #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, SystemLabel)] pub enum System { BuilderTaskSpawn, BuilderResultAddToWorld, } pub struct ChunkBuilderPlugin<T: ChunkBuilder> { shared: bool, _phantom: PhantomData<T>, } impl<T: ChunkBuilder> ChunkBuilderPlugin<T> { pub fn shared() -> Self { Self { shared: true, ..Default::default() } } } impl<T: ChunkBuilder> Default for ChunkBuilderPlugin<T> { fn default() -> Self { Self { shared: false, _phantom: PhantomData, } } } impl<T> Plugin for ChunkBuilderPlugin<T> where T: ChunkBuilder + Default + Send + Sync + 'static, { fn build(&self, app: &mut App) { let mut systems = SystemSet::new(); systems = if self.shared { systems.with_system(Self::builder_task_spawn_shared.label(System::BuilderTaskSpawn)) } else { systems.with_system(Self::builder_task_spawn_unique.label(System::BuilderTaskSpawn)) }; systems = systems .with_system(Self::receive_built_meshes) .with_system(Self::add_built_chunks_to_world.label(System::BuilderResultAddToWorld)); app.add_system_set(systems); } } type MesherTask = Task<(brine_chunk::Chunk, Vec<VoxelMesh>)>; impl<T> ChunkBuilderPlugin<T> where T: ChunkBuilder + Default + Any + Send + Sync + 'static, { fn builder_task_spawn( chunk_event: event::clientbound::ChunkData, commands: &mut Commands, task_pool: &AsyncComputeTaskPool, ) { let chunk = chunk_event.chunk_data; if !chunk.is_full() { return; } let chunk_x = chunk.chunk_x; let chunk_z = chunk.chunk_z; debug!("Received chunk ({}, {}), spawning task", chunk_x, chunk_z); let task: MesherTask = task_pool.spawn(async move { let built = T::default().build_chunk(&chunk); (chunk, built) }); commands.spawn().insert_bundle(( task, PendingChunk::new(T::TYPE), Name::new(format!("Pending Chunk ({}, {})", chunk_x, chunk_z)), )); } fn build_texture_atlas_for_mesh( mesh: &VoxelMesh, chunk_section: &ChunkSection, asset_server: &AssetServer, mc_assets: &MinecraftAssets, texture_builder: &mut BlockTextures, ) -> PendingMeshAtlas { let mut texture_handles: HashSet<Handle<Image>> = Default::default(); let mut face_textures: Vec<Handle<Image>> = Vec::with_capacity(mesh.faces.len()); let mut handle_cache: HashMap<(BlockStateId, BlockFace), Handle<Image>> = Default::default(); for face in mesh.faces.iter() { let [x, y, z] = face.voxel; let face = face.axis.into(); let block_state_id = chunk_section.get_block((x, y, z)).unwrap(); let block_state_id = BlockStateId(block_state_id.0 as u16); let key = (block_state_id, face); let weak_handle = match handle_cache.entry(key) { Entry::Vacant(entry) => { let strong_handle = match mc_assets .get_texture_path_for_block_state_and_face(block_state_id, face) { Some(path) => asset_server.load(path), None => { debug!("No texture for {:?}:{:?}", block_state_id, face); texture_builder.placeholder_texture.clone() } }; if !texture_handles.contains(&strong_handle) { texture_handles.insert(strong_handle.clone()); } entry.insert(strong_handle.as_weak()).clone_weak() } Entry::Occupied(entry) => entry.get().clone_weak(), }; face_textures.push(weak_handle); } let atlas = texture_builder .create_texture_atlas_with_textures(texture_handles.into_iter(), asset_server); PendingMeshAtlas { atlas, face_textures, } } fn add_built_chunk_to_world( chunk_data: brine_chunk::Chunk, voxel_meshes: Vec<VoxelMesh>, atlases: Vec<&TextureAtlas>, face_textures: Vec<Vec<Handle<Image>>>, meshes: &mut Assets<Mesh>, materials: &mut Assets<StandardMaterial>, commands: &mut Commands, ) -> Entity { debug!( "Adding chunk ({}, {}) to world", chunk_data.chunk_x, chunk_data.chunk_z ); commands .spawn() .insert_bundle(BuiltChunkBundle::new( T::TYPE, chunk_data.chunk_x, chunk_data.chunk_z, )) .with_children(move |parent| { for (((section, mut mesh), atlas), face_textures) in chunk_data .sections .into_iter() .zip(voxel_meshes.into_iter()) .zip(atlases.into_iter()) .zip(face_textures.into_iter()) { mesh.adjust_tex_coords(atlas, &face_textures); parent .spawn() .insert_bundle(BuiltChunkSectionBundle::new(T::TYPE, section.chunk_y)) .insert_bundle(PbrBundle { mesh: meshes.add(mesh.to_render_mesh()), material: materials.add(StandardMaterial { base_color_texture: Some(atlas.texture.clone()), unlit: true, ..Default::default() }), ..Default::default() }) .insert(ChunkSectionComponent(section)); } }) .id() } /* ____ _ / ___| _ _ ___| |_ ___ _ __ ___ ___ \___ \| | | / __| __/ _ \ '_ ` _ \/ __| ___) | |_| \__ \ || __/ | | | | \__ \ |____/ \__, |___/\__\___|_| |_| |_|___/ |___/ */ fn builder_task_spawn_unique( mut chunk_events: ResMut<Events<event::clientbound::ChunkData>>, mut commands: Commands, task_pool: Res<AsyncComputeTaskPool>, ) { for chunk_event in chunk_events.drain() { Self::builder_task_spawn(chunk_event, &mut commands, &task_pool); } } fn builder_task_spawn_shared( mut chunk_events: EventReader<event::clientbound::ChunkData>, mut commands: Commands, task_pool: Res<AsyncComputeTaskPool>, ) { for chunk_event in chunk_events.iter() { Self::builder_task_spawn(chunk_event.clone(), &mut commands, &task_pool); } } fn receive_built_meshes( asset_server: Res<AssetServer>, mc_assets: Res<MinecraftAssets>, mut chunks_with_pending_meshes: Query<(Entity, &mut PendingChunk, &mut MesherTask)>, mut texture_builder: ResMut<BlockTextures>, mut commands: Commands, ) { const MAX_PER_FRAME: usize = 1; for (i, (entity, mut pending_chunk, mut mesher_task)) in chunks_with_pending_meshes.iter_mut().enumerate() { if i >= MAX_PER_FRAME { break; } if pending_chunk.builder != T::TYPE { continue; } if let Some((chunk, voxel_meshes)) = future::block_on(future::poll_once(&mut *mesher_task)) { debug!( "Received meshes for Chunk ({}, {})", chunk.chunk_x, chunk.chunk_z ); let texture_atlases = voxel_meshes .iter() .zip(chunk.sections.iter()) .map(|(mesh, chunk_section)| { Self::build_texture_atlas_for_mesh( mesh, chunk_section, &*asset_server, &*mc_assets, &mut *texture_builder, ) }) .collect(); pending_chunk.chunk_data = Some(chunk); pending_chunk.voxel_meshes = Some(voxel_meshes); pending_chunk.texture_atlases = Some(texture_atlases); commands.entity(entity).remove::<MesherTask>(); } } } fn add_built_chunks_to_world( atlases: Res<Assets<TextureAtlas>>, mut chunks_with_pending_atlases: Query<(Entity, &mut PendingChunk), Without<MesherTask>>, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<StandardMaterial>>, mut commands: Commands, ) { for (entity, mut pending_chunk) in chunks_with_pending_atlases.iter_mut() { if pending_chunk.builder != T::TYPE { continue; } let built_atlases: Vec<Option<&TextureAtlas>> = pending_chunk .texture_atlases .as_ref() .unwrap() .iter() .map(|pending_atlas| atlases.get(&pending_atlas.atlas)) .collect(); if built_atlases.iter().any(|atlas| atlas.is_none()) { continue; } let atlases: Vec<&TextureAtlas> = built_atlases.iter().map(|atlas| atlas.unwrap()).collect(); let face_textures: Vec<Vec<Handle<Image>>> = pending_chunk .texture_atlases .take() .unwrap() .into_iter() .map(|atlas| atlas.face_textures) .collect(); let chunk = pending_chunk.chunk_data.take().unwrap(); let voxel_meshes = pending_chunk.voxel_meshes.take().unwrap(); debug!( "Received all texture atlases for Chunk ({}, {})", chunk.chunk_x, chunk.chunk_z ); Self::add_built_chunk_to_world( chunk, voxel_meshes, atlases, face_textures, &mut *meshes, &mut *materials, &mut commands, ); commands.entity(entity).despawn(); } } }
use std::collections::hash_map::Entry; use std::{any::Any, marker::PhantomData}; use bevy::tasks::Task; use bevy::utils::{HashMap, HashSet}; use bevy::{ecs::event::Events, prelude::*, tasks::AsyncComputeTaskPool}; use futures_lite::future; use brine_asset::{api::BlockFace, MinecraftAssets}; use brine_chunk::ChunkSection; use brine_data::BlockStateId; use brine_proto::event; use crate::chunk_builder::component::PendingChunk; use crate::mesh::VoxelMesh; use crate::texture::BlockTextures; use super::component::{ChunkSection as ChunkSectionComponent, PendingMeshAtlas}; use super::{ component::{BuiltChunkBundle, BuiltChunkSectionBundle}, ChunkBuilder, }; #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, SystemLabel)] pub enum System { BuilderTaskSpawn, BuilderResultAddToWorld, } pub struct ChunkBuilderPlugin<T: ChunkBuilder> { shared: bool, _phantom: PhantomData<T>, } impl<T: ChunkBuilder> ChunkBuilderPlugin<T> { pub fn shared() -> Self { Self { shared: true, ..Default::default() } } } impl<T: ChunkBuilder> Default for ChunkBuilderPlugin<T> { fn default() -> Self { Self { shared: false, _phantom: PhantomData, } } } impl<T> Plugin for ChunkBuilderPlugin<T> where T: ChunkBuilder + Default + Send + Sync + 'static, { fn build(&self, app: &mut App) { let mut systems = SystemSet::new(); systems = if self.shared { systems.with_system(Self::builder_task_spawn_shared.label(System::BuilderTaskSpawn)) } else { systems.with_system(Self::builder_task_spawn_unique.label(System::BuilderTaskSpawn)) }; systems = systems .with_system(Self::receive_built_meshes) .with_system(Self::add_built_chunks_to_world.label(System::BuilderResultAddToWorld)); app.add_system_set(systems); } } type MesherTask = Task<(brine_chunk::Chunk, Vec<VoxelMesh>)>; impl<T> ChunkBuilderPlugin<T> where T: ChunkBuilder + Default + Any + Send + Sync + 'static, { fn builder_task_spawn( chunk_event: event::clientbound::ChunkData, commands: &mut Commands, task_pool: &AsyncComputeTaskPool, ) { let chunk = chunk_event.chunk_data; if !chunk.is_full() { return; } let chunk_x = chunk.chunk_x; let chunk_z = chunk.chunk_z; debug!("Received chunk ({}, {}), spawning task", chunk_x, chunk_z); let task: MesherTask = task_pool.spawn(async move { let built = T::default().build_chunk(&chunk); (chunk, built) }); commands.spawn().insert_bundle(( task, PendingChunk::new(T::TYPE), Name::new(format!("Pending Chunk ({}, {})", chunk_x, chunk_z)), )); } fn build_texture_atlas_for_mesh( mesh: &VoxelMesh, chunk_section: &ChunkSection, asset_server: &AssetServer, mc_assets: &MinecraftAssets, texture_builder: &mut BlockTextures, ) -> PendingMeshAtlas { let mut texture_handles: HashSet<Handle<Image>> = Default::default(); let mut face_textures: Vec<Handle<Image>> = Vec::with_capacity(mesh.faces.len()); let mut handle_cache: HashMap<(BlockStateId, BlockFace), Handle<Image>> = Default::default(); for face in mesh.faces.iter() { let [x, y, z] = face.voxel; let face = face.axis.into(); let block_state_id = chunk_section.get_block((x, y, z)).unwrap(); let block_state_id = BlockStateId(block_state_id.0 as u16); let key = (block_state_id, face); let weak_handle = match handle_cache.entry(key) { Entry::Vacant(entry) => {
if !texture_handles.contains(&strong_handle) { texture_handles.insert(strong_handle.clone()); } entry.insert(strong_handle.as_weak()).clone_weak() } Entry::Occupied(entry) => entry.get().clone_weak(), }; face_textures.push(weak_handle); } let atlas = texture_builder .create_texture_atlas_with_textures(texture_handles.into_iter(), asset_server); PendingMeshAtlas { atlas, face_textures, } } fn add_built_chunk_to_world( chunk_data: brine_chunk::Chunk, voxel_meshes: Vec<VoxelMesh>, atlases: Vec<&TextureAtlas>, face_textures: Vec<Vec<Handle<Image>>>, meshes: &mut Assets<Mesh>, materials: &mut Assets<StandardMaterial>, commands: &mut Commands, ) -> Entity { debug!( "Adding chunk ({}, {}) to world", chunk_data.chunk_x, chunk_data.chunk_z ); commands .spawn() .insert_bundle(BuiltChunkBundle::new( T::TYPE, chunk_data.chunk_x, chunk_data.chunk_z, )) .with_children(move |parent| { for (((section, mut mesh), atlas), face_textures) in chunk_data .sections .into_iter() .zip(voxel_meshes.into_iter()) .zip(atlases.into_iter()) .zip(face_textures.into_iter()) { mesh.adjust_tex_coords(atlas, &face_textures); parent .spawn() .insert_bundle(BuiltChunkSectionBundle::new(T::TYPE, section.chunk_y)) .insert_bundle(PbrBundle { mesh: meshes.add(mesh.to_render_mesh()), material: materials.add(StandardMaterial { base_color_texture: Some(atlas.texture.clone()), unlit: true, ..Default::default() }), ..Default::default() }) .insert(ChunkSectionComponent(section)); } }) .id() } /* ____ _ / ___| _ _ ___| |_ ___ _ __ ___ ___ \___ \| | | / __| __/ _ \ '_ ` _ \/ __| ___) | |_| \__ \ || __/ | | | | \__ \ |____/ \__, |___/\__\___|_| |_| |_|___/ |___/ */ fn builder_task_spawn_unique( mut chunk_events: ResMut<Events<event::clientbound::ChunkData>>, mut commands: Commands, task_pool: Res<AsyncComputeTaskPool>, ) { for chunk_event in chunk_events.drain() { Self::builder_task_spawn(chunk_event, &mut commands, &task_pool); } } fn builder_task_spawn_shared( mut chunk_events: EventReader<event::clientbound::ChunkData>, mut commands: Commands, task_pool: Res<AsyncComputeTaskPool>, ) { for chunk_event in chunk_events.iter() { Self::builder_task_spawn(chunk_event.clone(), &mut commands, &task_pool); } } fn receive_built_meshes( asset_server: Res<AssetServer>, mc_assets: Res<MinecraftAssets>, mut chunks_with_pending_meshes: Query<(Entity, &mut PendingChunk, &mut MesherTask)>, mut texture_builder: ResMut<BlockTextures>, mut commands: Commands, ) { const MAX_PER_FRAME: usize = 1; for (i, (entity, mut pending_chunk, mut mesher_task)) in chunks_with_pending_meshes.iter_mut().enumerate() { if i >= MAX_PER_FRAME { break; } if pending_chunk.builder != T::TYPE { continue; } if let Some((chunk, voxel_meshes)) = future::block_on(future::poll_once(&mut *mesher_task)) { debug!( "Received meshes for Chunk ({}, {})", chunk.chunk_x, chunk.chunk_z ); let texture_atlases = voxel_meshes .iter() .zip(chunk.sections.iter()) .map(|(mesh, chunk_section)| { Self::build_texture_atlas_for_mesh( mesh, chunk_section, &*asset_server, &*mc_assets, &mut *texture_builder, ) }) .collect(); pending_chunk.chunk_data = Some(chunk); pending_chunk.voxel_meshes = Some(voxel_meshes); pending_chunk.texture_atlases = Some(texture_atlases); commands.entity(entity).remove::<MesherTask>(); } } } fn add_built_chunks_to_world( atlases: Res<Assets<TextureAtlas>>, mut chunks_with_pending_atlases: Query<(Entity, &mut PendingChunk), Without<MesherTask>>, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<StandardMaterial>>, mut commands: Commands, ) { for (entity, mut pending_chunk) in chunks_with_pending_atlases.iter_mut() { if pending_chunk.builder != T::TYPE { continue; } let built_atlases: Vec<Option<&TextureAtlas>> = pending_chunk .texture_atlases .as_ref() .unwrap() .iter() .map(|pending_atlas| atlases.get(&pending_atlas.atlas)) .collect(); if built_atlases.iter().any(|atlas| atlas.is_none()) { continue; } let atlases: Vec<&TextureAtlas> = built_atlases.iter().map(|atlas| atlas.unwrap()).collect(); let face_textures: Vec<Vec<Handle<Image>>> = pending_chunk .texture_atlases .take() .unwrap() .into_iter() .map(|atlas| atlas.face_textures) .collect(); let chunk = pending_chunk.chunk_data.take().unwrap(); let voxel_meshes = pending_chunk.voxel_meshes.take().unwrap(); debug!( "Received all texture atlases for Chunk ({}, {})", chunk.chunk_x, chunk.chunk_z ); Self::add_built_chunk_to_world( chunk, voxel_meshes, atlases, face_textures, &mut *meshes, &mut *materials, &mut commands, ); commands.entity(entity).despawn(); } } }
let strong_handle = match mc_assets .get_texture_path_for_block_state_and_face(block_state_id, face) { Some(path) => asset_server.load(path), None => { debug!("No texture for {:?}:{:?}", block_state_id, face); texture_builder.placeholder_texture.clone() } };
assignment_statement
[ { "content": "pub fn build_bevy_mesh(voxel_mesh: &VoxelMesh) -> Mesh {\n\n let num_vertices = voxel_mesh.quads.len() * 4;\n\n let num_indices = voxel_mesh.quads.len() * 6;\n\n let mut positions = Vec::with_capacity(num_vertices);\n\n let mut normals = Vec::with_capacity(num_vertices);\n\n let mut...
Rust
src/generator/painter/stroke.rs
zeh/art-generator
916ea37631dc9a0030187af06afad0914b0a39ff
use std::collections::HashMap; use image::{Pixel, Rgb, RgbImage}; use crate::generator::painter::Painter; use crate::generator::utils::color::BlendingMode; use crate::generator::utils::geom::find_target_draw_rect; use crate::generator::utils::pixel::{blend, blend_linear}; use crate::generator::utils::random::{ get_noise_value, get_random_entry_weighted, get_random_noise_sequence, get_random_range, get_random_ranges_bias_weighted, get_random_size_ranges_bias_weighted, get_rng, }; use crate::generator::utils::units::{Margins, SizeUnit, WeightedValue}; use crate::generator::utils::{image::get_pixel_interpolated, random::get_random_color}; pub struct StrokePainter { pub options: Options, } pub struct Options { pub blending_mode: Vec<WeightedValue<BlendingMode>>, pub alpha: Vec<WeightedValue<(f64, f64)>>, pub alpha_bias: f64, pub width: Vec<WeightedValue<(SizeUnit, SizeUnit)>>, pub height: Vec<WeightedValue<(SizeUnit, SizeUnit)>>, pub width_bias: f64, pub height_bias: f64, pub wave_height: Vec<WeightedValue<(SizeUnit, SizeUnit)>>, pub wave_height_bias: f64, pub wave_length: Vec<WeightedValue<(SizeUnit, SizeUnit)>>, pub wave_length_bias: f64, pub anti_alias: bool, pub color_seed: f64, pub rng_seed: u32, pub margins: Margins<SizeUnit>, } impl StrokePainter { pub fn new() -> Self { let options = Options { blending_mode: vec![WeightedValue { value: BlendingMode::default(), weight: 1.0, }], alpha: vec![WeightedValue { value: (1.0, 1.0), weight: 1.0, }], alpha_bias: 0.0, width: vec![WeightedValue { value: (SizeUnit::Fraction(0.0), SizeUnit::Fraction(1.0)), weight: 1.0, }], width_bias: 0.0, height: vec![WeightedValue { value: (SizeUnit::Fraction(0.0), SizeUnit::Fraction(1.0)), weight: 1.0, }], height_bias: 0.0, wave_height: vec![WeightedValue { value: (SizeUnit::Fraction(0.01), SizeUnit::Fraction(0.01)), weight: 1.0, }], wave_height_bias: 0.0, wave_length: vec![WeightedValue { value: (SizeUnit::Fraction(0.5), SizeUnit::Fraction(0.5)), weight: 1.0, }], wave_length_bias: 0.0, anti_alias: true, color_seed: 0.0, rng_seed: 0, margins: Margins::<SizeUnit> { top: SizeUnit::Pixels(0), right: SizeUnit::Pixels(0), bottom: SizeUnit::Pixels(0), left: SizeUnit::Pixels(0), }, }; StrokePainter { options, } } } impl Painter for StrokePainter { fn paint(&self, canvas: &RgbImage, iteration: u32, seed_map: &RgbImage) -> Result<RgbImage, &str> { let mut rng = get_rng(self.options.rng_seed, iteration); let image_area = canvas.dimensions(); let target_area = match find_target_draw_rect(image_area, &self.options.margins) { Ok(rect) => rect, Err(err) => return Err(err), }; let target_visible_area = (image_area.0.min(target_area.width as u32), image_area.1.min(target_area.height as u32)); let rect_w = get_random_size_ranges_bias_weighted( &mut rng, &self.options.width, self.options.width_bias, target_visible_area.0, ); let rect_h = get_random_size_ranges_bias_weighted( &mut rng, &self.options.height, self.options.height_bias, target_visible_area.1, ); let rect_x = get_random_range( &mut rng, target_area.x as f64, (target_area.x + target_area.width) as f64 - rect_w, ); let rect_y = get_random_range( &mut rng, target_area.y as f64, (target_area.y + target_area.height) as f64 - rect_h, ); let x1 = rect_x.round().max(0.0).min(image_area.0 as f64) as u32; let x2 = (rect_x + rect_w).round().max(0.0).min(image_area.0 as f64) as u32; let y1 = rect_y.round().max(0.0).min(image_area.1 as f64) as u32; let y2 = (rect_y + rect_h).round().max(0.0).min(image_area.1 as f64) as u32; let random_color = get_random_color(&mut rng); let seed_color = get_pixel_interpolated(seed_map, (x1 + x2) as f64 / 2.0, (y1 + y2) as f64 / 2.0); let color = blend_linear(&random_color, &seed_color, self.options.color_seed); let alpha = get_random_ranges_bias_weighted(&mut rng, &self.options.alpha, self.options.alpha_bias); let wave_height = get_random_size_ranges_bias_weighted( &mut rng, &self.options.wave_height, self.options.wave_height_bias, target_visible_area.0 as u32, ); let wave_length = get_random_size_ranges_bias_weighted( &mut rng, &self.options.wave_length, self.options.wave_length_bias, target_visible_area.1 as u32, ); let blending_mode = get_random_entry_weighted(&mut rng, &self.options.blending_mode); let mut painted_canvas = canvas.clone(); if wave_height == 0.0 || wave_length == 0.0 { for x in x1..x2 { for y in y1..y2 { let new_pixel = Rgb(blend(painted_canvas.get_pixel(x, y).channels(), &color, alpha, &blending_mode)); painted_canvas.put_pixel(x, y, new_pixel); } } } else { let margins: f64 = wave_height / 2.0; let margin_ceil: u32 = margins.ceil() as u32; let noise = get_random_noise_sequence(&mut rng, -margins, margins); let noise_freq = wave_length; let x1_safe = (x1 as i64 - margin_ceil as i64).max(0) as u32; let x2_safe = (x2 + margin_ceil).min(image_area.0 as u32); let y1_safe = (y1 as i64 - margin_ceil as i64).max(0) as u32; let y2_safe = (y2 + margin_ceil).min(image_area.1 as u32); for x in x1_safe..x2_safe { for y in y1_safe..y2_safe { let alpha_x = if x >= x1 + margin_ceil && x < x2 - margin_ceil { 1.0 } else { let noise_x = get_noise_value(noise, y as f64 / noise_freq); let offset_x1 = x as f64 - (x1 as f64 + noise_x); let alpha_x1 = if offset_x1 > 0.5 { 1.0 } else if offset_x1 < -0.5 { 0.0 } else { offset_x1 + 0.5 }; let offset_x2 = (x2 as f64 + noise_x) - x as f64; let alpha_x2 = if offset_x2 > 0.5 { 1.0 } else if offset_x2 < -0.5 { 0.0 } else { offset_x2 + 0.5 }; alpha_x1 * alpha_x2 }; let alpha_y = if y >= y1 + margin_ceil && y < y2 - margin_ceil { 1.0 } else { let noise_y = get_noise_value(noise, x as f64 / noise_freq); let offset_y1 = y as f64 - (y1 as f64 + noise_y); let alpha_y1 = if offset_y1 > 0.5 { 1.0 } else if offset_y1 < -0.5 { 0.0 } else { offset_y1 + 0.5 }; let offset_y2 = (y2 as f64 + noise_y) - y as f64; let alpha_y2 = if offset_y2 > 0.5 { 1.0 } else if offset_y2 < -0.5 { 0.0 } else { offset_y2 + 0.5 }; alpha_y1 * alpha_y2 }; let new_pixel = Rgb(blend( painted_canvas.get_pixel(x, y).channels(), &color, if self.options.anti_alias { alpha_x * alpha_y * alpha } else { if alpha_x * alpha_y >= 0.5 { 1.0 } else { 0.0 } }, &blending_mode, )); painted_canvas.put_pixel(x, y, new_pixel); } } } Ok(painted_canvas) } fn get_metadata(&self) -> HashMap<String, String> { let mut data = HashMap::new(); data.insert(String::from("RNG seed"), format!("{}", &self.options.rng_seed)); data } }
use std::collections::HashMap; use image::{Pixel, Rgb, RgbImage}; use crate::generator::painter::Painter; use crate::generator::utils::color::BlendingMode; use crate::generator::utils::geom::find_target_draw_rect; use crate::generator::utils::pixel::{blend, blend_linear}; use crate::generator::utils::random::{ get_noise_value, get_random_entry_weighted, get_random_noise_sequence, get_random_range, get_random_ranges_bias_weighted, get_random_size_ranges_bias_weighted, get_rng, }; use crate::generator::utils::units::{Margins, SizeUnit, WeightedValue}; use crate::generator::utils::{image::get_pixel_interpolated, random::get_random_color}; pub struct StrokePainter { pub options: Options, } pub struct Options { pub blending_mode: Vec<WeightedValue<BlendingMode>>, pub alpha: Vec<WeightedValue<(f64, f64)>>, pub alpha_bias: f64, pub width: Vec<WeightedValue<(SizeUnit, SizeUnit)>>, pub height: Vec<WeightedValue<(SizeUnit, SizeUnit)>>, pub width_bias: f64, pub height_bias: f64, pub wave_height: Vec<WeightedValue<(SizeUnit, SizeUnit)>>, pub wave_height_bias: f64, pub wave_length: Vec<WeightedValue<(SizeUnit, SizeUnit)>>, pub wave_length_bias: f64, pub anti_alias: bool, pub color_seed: f64, pub rng_seed: u32, pub margins: Margins<SizeUnit>, } impl StrokePainter { pub fn new() -> Self { let options = Options { blending_mode: vec![WeightedValue { value: BlendingMode::default(), weight: 1.0, }], alpha: vec![WeightedValue { value: (1.0, 1.0), weight: 1.0, }], alpha_bias: 0.0, width: vec![WeightedValue { value: (SizeUnit::Fraction(0.0), SizeUnit::Fraction(1.0)), weight: 1.0, }], width_bias: 0.0, height: vec![WeightedValue { value: (SizeUnit::Fraction(0.0), SizeUnit::Fraction(1.0)), weight: 1.0, }], height_bias: 0.0, wave_height: vec![WeightedValue { value: (SizeUnit::Fraction(0.01), SizeUnit::Fraction(0.01)), weight: 1.0, }], wave_height_bias: 0.0, wave_length: vec![WeightedValue { value: (SizeUnit::Fraction(0.5), SizeUnit::Fraction(0.5)), weight: 1.0, }], wave_length_bias: 0.0, anti_alias: true, color_seed: 0.0, rng_seed: 0, margins: Margins::<SizeUnit> { top: SizeUnit::Pixels(0), right: SizeUnit::Pixels(0), bottom: SizeUnit::Pixels(0), left: SizeUnit::Pixels(0), }, }; StrokePainter { options, } } } impl Painter for StrokePainter { fn paint(&self, canvas: &RgbImage, iteration: u32, seed_map: &RgbImage) -> Result<RgbImage, &str> { let mut rng = get_rng(self.options.rng_seed, iteration); let image_area = canvas.dimensions(); let target_area = match find_target_draw_rect(image_area, &self.options.margins) { Ok(rect) => rect, Err(err) => return Err(err), }; let target_visible_area = (image_area.0.min(target_area.width as u32), image_area.1.min(target_area.height as u32)); let rect_w = get_random_size_ranges_bias_weighted( &mut rng, &self.options.width, self.options.width_bias, target_visible_area.0, ); let rect_h = get_random_size_ranges_bias_weighted( &mut rng, &self.options.height, self.options.height_bias, target_visible_area.1, ); let rect_x = get_random_range( &mut rng, target_area.x as f64, (target_area.x + target_area.width) as f64 - rect_w, ); let rect_y = get_random_range( &mut rng, target_area.y as f64, (target_area.y + target_area.height) as f64 - rect_h, ); let x1 = rect_x.round().max(0.0).min(image_area.0 as f64) as u32; let x2 = (rect_x + rect_w).round().max(0.0).min(image_area.0 as f64) as u32; let y1 = rect_y.round().max(0.0).min(image_area.1 as f64) as u32; let y2 = (rect_y + rect_h).round().max(0.0).min(image_area.1 as f64) as u32; let random_color = get_random_color(&mut rng); let seed_color = get_pixel_interpolated(seed_map, (x1 + x2) as f64 / 2.0, (y1 + y2) as f64 / 2.0); let color = blend_linear(&random_color, &seed_color, self.options.color_seed); let alpha = get_random_ranges_bias_weighted(&mut rng, &self.options.alpha, self.options.alpha_bias); let wave_height = get_random_size_ranges_bias_weighted( &mut rng, &self.options.wave_height, self.options.wave_height_bias, target_visible_area.0 as u32, ); let wave_length = get_random_size_ranges_bias_weighted( &mut rng, &self.options.wave_length, self.options.wave_length_bias, target_visible_area.1 as u32, ); let blending_mode = get_random_entry_weighted(&mut rng, &self.options.blending_mode); let mut painted_canvas = canvas.clone(); if wave_height == 0.0 || wave_length == 0.0 { for x in x1..x2 { for y in y1..y2 { let new_pixel = Rgb(blend(painted_canvas.get_pixel(x, y).channels(), &color, alpha, &blending_mode)); painted_canvas.put_pixel(x, y, new_pixel); } } } else { let margins: f64 = wave_height / 2.0; let margin_ceil: u32 = margins.ceil() as u32; let noise = get_random_noise_sequence(&mut rng, -margins, margins); let noise_freq = wave_length; let x1_safe = (x1 as i64 - margin_ceil as i64).max(0) as u32; let x2_safe = (x2 + margin_ceil).min(image_area.0 as u32); let y1_safe = (y1 as i64 - margin_ceil as i64).max(0) as u32; let y2_safe = (y2 + margin_ceil).min(image_area.1 as u32); for x in x1_safe..x2_safe { for y in y1_safe..y2_safe { let alpha_x = if x >= x1 + margin_ceil && x < x2 - margin_ceil { 1.0 } else { let noise_x = get_noise_value(noise, y as f64 / noise_freq); let offset_x1 = x as f64 - (x1 as f64 + noise_x); let alpha_x1 =
; let offset_x2 = (x2 as f64 + noise_x) - x as f64; let alpha_x2 = if offset_x2 > 0.5 { 1.0 } else if offset_x2 < -0.5 { 0.0 } else { offset_x2 + 0.5 }; alpha_x1 * alpha_x2 }; let alpha_y = if y >= y1 + margin_ceil && y < y2 - margin_ceil { 1.0 } else { let noise_y = get_noise_value(noise, x as f64 / noise_freq); let offset_y1 = y as f64 - (y1 as f64 + noise_y); let alpha_y1 = if offset_y1 > 0.5 { 1.0 } else if offset_y1 < -0.5 { 0.0 } else { offset_y1 + 0.5 }; let offset_y2 = (y2 as f64 + noise_y) - y as f64; let alpha_y2 = if offset_y2 > 0.5 { 1.0 } else if offset_y2 < -0.5 { 0.0 } else { offset_y2 + 0.5 }; alpha_y1 * alpha_y2 }; let new_pixel = Rgb(blend( painted_canvas.get_pixel(x, y).channels(), &color, if self.options.anti_alias { alpha_x * alpha_y * alpha } else { if alpha_x * alpha_y >= 0.5 { 1.0 } else { 0.0 } }, &blending_mode, )); painted_canvas.put_pixel(x, y, new_pixel); } } } Ok(painted_canvas) } fn get_metadata(&self) -> HashMap<String, String> { let mut data = HashMap::new(); data.insert(String::from("RNG seed"), format!("{}", &self.options.rng_seed)); data } }
if offset_x1 > 0.5 { 1.0 } else if offset_x1 < -0.5 { 0.0 } else { offset_x1 + 0.5 }
if_condition
[ { "content": "#[inline(always)]\n\npub fn distance(x1: f64, y1: f64, x2: f64, y2: f64) -> f64 {\n\n\tlet x = x1 - x2;\n\n\tlet y = y1 - y2;\n\n\t(x * x + y * y).sqrt()\n\n}\n\n\n", "file_path": "src/generator/utils/geom.rs", "rank": 0, "score": 301141.6146164706 }, { "content": "/// Parses a...
Rust
src/main.rs
wayfair-tremor/uring
483adf33d61a767a2de9a5ed4ed5cc272cc62ac2
#![recursion_limit = "2048"] mod codec; #[allow(unused)] pub mod errors; pub mod network; mod protocol; mod pubsub; pub mod raft_node; pub mod service; pub mod storage; pub mod version; use crate::network::{ws, Network, RaftNetworkMsg}; use crate::raft_node::*; use crate::service::mring::{self, placement::continuous}; use crate::service::{kv, Service}; use crate::storage::URRocksStorage; use async_std::task; use clap::{App as ClApp, Arg}; use futures::{select, FutureExt, StreamExt}; use serde_derive::{Deserialize, Serialize}; use slog::{Drain, Logger}; use slog_json; use std::time::{Duration, Instant}; pub use uring_common::*; use ws_proto::PSURing; const CHANNEL_SIZE: usize = 64usize; #[macro_use] extern crate slog; #[derive(Deserialize, Serialize)] pub struct KV { key: String, value: serde_json::Value, } #[derive(Deserialize, Serialize, Debug)] pub struct Event { nid: Option<NodeId>, eid: EventId, sid: ServiceId, data: Vec<u8>, } #[derive(Deserialize, Serialize)] pub struct KVs { scope: u16, key: Vec<u8>, value: Vec<u8>, } async fn raft_loop<N: Network>( id: NodeId, bootstrap: bool, ring_size: Option<u64>, pubsub: pubsub::Channel, network: N, logger: Logger, ) where N: 'static, { let mut node: RaftNode<URRocksStorage, _> = if bootstrap { RaftNode::create_raft_leader(&logger, id, pubsub, network).await } else { RaftNode::create_raft_follower(&logger, id, pubsub, network).await }; node.set_raft_tick_duration(Duration::from_millis(100)); node.log().await; let kv = kv::Service::new(&logger, 0); node.add_service(kv::ID, Box::new(kv)); let mut vnode: mring::Service<continuous::Strategy> = mring::Service::new(); if let Some(size) = ring_size { if bootstrap { vnode .execute( node.raft_group.as_ref().unwrap(), &mut node.pubsub, service::mring::Event::set_size(size), ) .await .unwrap(); } } node.add_service(mring::ID, Box::new(vnode)); let version = crate::service::version::Service::new(&logger); node.add_service(crate::service::version::ID, Box::new(version)); let status = crate::service::status::Service::new(&logger); let status = Box::new(status); node.add_service(service::status::ID, status); node.node_loop().await.unwrap() } fn main() -> std::io::Result<()> { use version::VERSION; let matches = ClApp::new("cake") .version(VERSION) .author("The Treamor Team") .about("Uring Demo") .arg( Arg::with_name("id") .short("i") .long("id") .value_name("ID") .help("The Node ID") .takes_value(true), ) .arg( Arg::with_name("bootstrap") .short("b") .long("bootstrap") .value_name("BOOTSTRAP") .help("Sets the node to bootstrap mode and become leader") .takes_value(false), ) .arg( Arg::with_name("ring-size") .short("r") .long("ring-size") .value_name("RING_SIZE") .help("Initialized mring size, only has an effect when used together with --bootstrap") .takes_value(true), ) .arg( Arg::with_name("http-endpoint") .long("http") .value_name("HTTP") .help("http endpoint to listen to") .takes_value(true), ) .arg( Arg::with_name("no-json") .short("n") .long("no-json") .value_name("NOJSON") .help("don't log via json") .takes_value(false), ) .arg( Arg::with_name("peers") .short("p") .long("peers") .value_name("PEERS") .multiple(true) .takes_value(true) .help("Peers to connet to"), ) .arg( Arg::with_name("endpoint") .short("e") .long("endpoint") .value_name("ENDPOINT") .takes_value(true) .default_value("127.0.0.1:8080") .help("Peers to connet to"), ) .get_matches(); let logger = if matches.is_present("no-json") { let decorator = slog_term::TermDecorator::new().build(); let drain = slog_term::FullFormat::new(decorator).build().fuse(); let drain = slog_async::Async::new(drain).build().fuse(); slog::Logger::root(drain, o!()) } else { let drain = slog_json::Json::default(std::io::stderr()).map(slog::Fuse); let drain = slog_async::Async::new(drain).build().fuse(); slog::Logger::root(drain, o!()) }; let peers = matches.values_of_lossy("peers").unwrap_or(vec![]); let ring_size: Option<u64> = matches.value_of("ring-size").map(|s| s.parse().unwrap()); let bootstrap = matches.is_present("bootstrap"); let endpoint = matches.value_of("endpoint").unwrap_or("127.0.0.1:8080"); let id = NodeId(matches.value_of("id").unwrap_or("1").parse().unwrap()); let loop_logger = logger.clone(); let rest_endpoint = matches.value_of("http-endpoint"); let ps_tx = pubsub::start(&logger); let network = ws::Network::new(&logger, id, endpoint, rest_endpoint, peers, ps_tx.clone()); task::block_on(raft_loop( id, bootstrap, ring_size, ps_tx, network, loop_logger, )); Ok(()) }
#![recursion_limit = "2048"] mod codec; #[allow(unused)] pub mod errors; pub mod network; mod protocol; mod pubsub; pub mod raft_node; pub mod service; pub mod storage; pub mod version; use crate::network::{ws, Network, RaftNetworkMsg}; use crate::raft_node::*; use crate::service::mring::{self, placement::continuous}; use crate::service::{kv, Service}; use crate::storage::URRocksStorage; use async_std::task; use clap::{App as ClApp, Arg}; use futures::{select, FutureExt, StreamExt}; use serde_derive::{Deserialize, Serialize}; use slog::{Drain, Logger}; use slog_json; use std::time::{Duration, Instant}; pub use uring_common::*; use ws_proto::PSURing; const CHANNEL_SIZE: usize = 64usize; #[macro_use] extern crate slog; #[derive(Deserialize, Serialize)] pub struct KV { key: String, value: serde_json::Value, } #[derive(Deserialize, Serialize, Debug)] pub struct Event { nid: Option<NodeId>, eid: EventId, sid: ServiceId, data: Vec<u8>, } #[derive(Deserialize, Serialize)] pub struct KVs { scope: u16, key: Vec<u8>, value: Vec<u8>, } async fn raft_loop<N: Network>( id: NodeId, bootstrap: bool, ring_size: Option<u64>, pubsub: pubsub::Channel, network: N, logger: Logger, ) where N: 'static, { let mut node: RaftNode<URRocksStorage, _> = if bootstrap { RaftNode::create_raft_leader(&logger, id, pubsub, network).await } else { RaftNode::create_raft_follower(&logger, id, pubsub, network).await }; node.set_raft_tick_duration(Duration::from_millis(100)); node.log().await; let kv = kv::Service::new(&logger, 0); node.add_service(kv::ID, Box::new(kv)); let mut vnode: mring::Service<continuous::Strategy> = mring::Service::new(); if let Some(size) = ring_size { if bootstrap { vnode .execute( node.raft_group.as_ref().unwrap(), &mut node.pubsub, service::mring::Event::set_size(size), ) .await .unwrap(); } } node.add_service(mring::ID, Box::new(vnode)); let version = crate::service::version::Service::new(&logger); node.add_service(crate::service::version::ID, Box::new(version)); let status = crate::service::status::Service::new(&logger); let status = Box::new(status); node.add_service(service::status::ID, status); node.node_loop().await.unwrap() } fn main() -> std::io::Result<()> { use version::VERSION; let matches = ClApp::new("cake") .version(VERSION) .author("The Treamor Team") .about("Uring Demo") .arg( Arg::with_name("id") .short("i") .long("id") .value_name("ID")
_tx = pubsub::start(&logger); let network = ws::Network::new(&logger, id, endpoint, rest_endpoint, peers, ps_tx.clone()); task::block_on(raft_loop( id, bootstrap, ring_size, ps_tx, network, loop_logger, )); Ok(()) }
.help("The Node ID") .takes_value(true), ) .arg( Arg::with_name("bootstrap") .short("b") .long("bootstrap") .value_name("BOOTSTRAP") .help("Sets the node to bootstrap mode and become leader") .takes_value(false), ) .arg( Arg::with_name("ring-size") .short("r") .long("ring-size") .value_name("RING_SIZE") .help("Initialized mring size, only has an effect when used together with --bootstrap") .takes_value(true), ) .arg( Arg::with_name("http-endpoint") .long("http") .value_name("HTTP") .help("http endpoint to listen to") .takes_value(true), ) .arg( Arg::with_name("no-json") .short("n") .long("no-json") .value_name("NOJSON") .help("don't log via json") .takes_value(false), ) .arg( Arg::with_name("peers") .short("p") .long("peers") .value_name("PEERS") .multiple(true) .takes_value(true) .help("Peers to connet to"), ) .arg( Arg::with_name("endpoint") .short("e") .long("endpoint") .value_name("ENDPOINT") .takes_value(true) .default_value("127.0.0.1:8080") .help("Peers to connet to"), ) .get_matches(); let logger = if matches.is_present("no-json") { let decorator = slog_term::TermDecorator::new().build(); let drain = slog_term::FullFormat::new(decorator).build().fuse(); let drain = slog_async::Async::new(drain).build().fuse(); slog::Logger::root(drain, o!()) } else { let drain = slog_json::Json::default(std::io::stderr()).map(slog::Fuse); let drain = slog_async::Async::new(drain).build().fuse(); slog::Logger::root(drain, o!()) }; let peers = matches.values_of_lossy("peers").unwrap_or(vec![]); let ring_size: Option<u64> = matches.value_of("ring-size").map(|s| s.parse().unwrap()); let bootstrap = matches.is_present("bootstrap"); let endpoint = matches.value_of("endpoint").unwrap_or("127.0.0.1:8080"); let id = NodeId(matches.value_of("id").unwrap_or("1").parse().unwrap()); let loop_logger = logger.clone(); let rest_endpoint = matches.value_of("http-endpoint"); let ps
function_block-random_span
[ { "content": "pub fn log(logger: &Logger) {\n\n info!(logger, \"uring version: {}\", VERSION);\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn for_coverage_only() {\n\n print();\n\n log(&slog::Logger::root(slog::Discard, o!()));\n\n }\n\n}\n", "fi...
Rust
playground/jwk/src/lib.rs
DIN-Foundation/bcs-ntnu-2021
99532334904dfce5f4c2e3fdd816be80c2f5a3c9
pub fn run(config: Config) -> Result<String, std::io::Error> { match config.cmd { CMD::Init{ path } => init(&path), CMD::Doc{ path } => doc(&path), CMD::Did{ path } => did(&path), CMD::Help => help() } } fn init(path: &str) -> Result<String, std::io::Error> { use std::io::Write; if !std::fs::metadata(root_path(path)).is_ok() { std::fs::create_dir_all(root_path(path))?; } if !std::fs::metadata(jwk_path(path)).is_ok() { use did_key::KeyMaterial; let mut csprng = rand::rngs::OsRng {}; let private_key = ed25519_dalek::SecretKey::generate(&mut csprng).to_bytes(); let did_key = did_key::Ed25519KeyPair::from_seed(&private_key); let jwk = publicprivatebytes_to_jwkstr(did_key.public_key_bytes(), did_key.private_key_bytes()); let mut file = std::fs::File::create(jwk_path(path)).unwrap(); file.write(jwk.as_bytes()).unwrap(); Ok(format!("{} is ready", path)) } else { Ok(format!("{} already exists", path)) } } fn doc(path: &str) -> Result<String, std::io::Error> { use did_key::DIDCore; let jwk = std::fs::read(jwk_path(path))?; let jwkstr = String::from_utf8(jwk).unwrap(); let (public,_) = jwkstr_to_publicprivatebytes(&jwkstr); let keypair = did_key::from_existing_key::<did_key::Ed25519KeyPair>(&public, None); let did_doc = keypair.get_did_document(did_key::CONFIG_LD_PUBLIC); let did_doc = serde_json::to_string_pretty(&did_doc).unwrap(); Ok(format!("{}", did_doc)) } fn did(path: &str) -> Result<String, std::io::Error> { use did_key::DIDCore; let jwk = std::fs::read(jwk_path(path))?; let jwkstr = String::from_utf8(jwk).unwrap(); let (public,_) = jwkstr_to_publicprivatebytes(&jwkstr); let keypair = did_key::from_existing_key::<did_key::Ed25519KeyPair>(&public, None); let did_doc = keypair.get_did_document(did_key::CONFIG_LD_PUBLIC); let did = did_doc.id; Ok(format!("{}", did)) } fn help() -> Result<String, std::io::Error> { Ok(String::from(" Usage: didchat <path> <command> didchat <path> init didchat <path> doc didchat <path> did ")) } #[derive(Debug)] enum CMD { Init{ path: String }, Doc{ path: String }, Did{ path: String }, Help } pub struct Config { cmd: CMD, } impl Config { pub fn new(args: &[String]) -> Result<Config, std::io::Error> { let default_cmd = String::from("help"); let path = args.get(1).unwrap_or(&default_cmd).clone(); let cmd = args.get(2).unwrap_or(&default_cmd).clone(); let cmd = if args.len() < 3 { eprintln!("Command missing!"); default_cmd.clone() } else { cmd.clone() }; let cmd: CMD = match &cmd[..] { "did" => { CMD::Did{ path } }, "doc" => { CMD::Doc{ path } }, "init" => { CMD::Init{ path } }, "help" => CMD::Help, &_ => { eprintln!("{} not a valid command!", cmd); CMD::Help }, }; Ok(Config { cmd }) } } fn root_path(path: &str) -> String { format!("{}/.didchat", path) } fn jwk_path(path: &str) -> String { format!("{}/.didchat/me.jwk", path) } fn publicprivatebytes_to_jwkstr(public: Vec<u8>, private: Vec<u8>) -> String { let jwk = ssi::jwk::JWK { params: ssi::jwk::Params::OKP(ssi::jwk::OctetParams { curve: "Ed25519".to_string(), public_key: ssi::jwk::Base64urlUInt(public), private_key: Some(ssi::jwk::Base64urlUInt(private)), }), public_key_use: None, key_operations: None, algorithm: None, key_id: None, x509_url: None, x509_certificate_chain: None, x509_thumbprint_sha1: None, x509_thumbprint_sha256: None }; let _okp = (if let ssi::jwk::Params::OKP(o) = jwk.params.clone() { Some(o) } else { None }).unwrap(); serde_json::to_string(&jwk).unwrap() } fn jwkstr_to_publicprivatebytes(jwkstr: &str) -> (Vec<u8>, Vec<u8>) { let jwk: ssi::jwk::JWK = serde_json::from_str(jwkstr).unwrap(); let okp = (if let ssi::jwk::Params::OKP(o) = jwk.params.clone() { Some(o) } else { panic!("okp == None") }).unwrap(); let privkey: Vec<u8> = if let Some(key) = okp.private_key { key.0 } else { panic!("privkey == None") }; (okp.public_key.0, privkey) }
pub fn run(config: Config) -> Result<String, std::io::Error> { match config.cmd { CMD::Init{ path } => init(&path), CMD::Doc{ path } => doc(&path), CMD::Did{ path } => did(&path), CMD::Help => help() } } fn init(path: &str) -> Result<String, std::io::Error> { use std::io::Write; if !std::fs::metadata(root_path(path)).is_ok() { std::fs::create_dir_all(root_path(path))?; } if !std::fs::metadata(jwk_path(path)).is_ok() { use did_key::KeyMaterial; let mut csprng = rand::rngs::OsRng {}; let private_key = ed25519_dalek::SecretKey::generate(&mut csprng).to_bytes(); let did_key = did_key::Ed25519KeyPair::from_seed(&private_key); let jwk = publicprivatebytes_to_jwkstr(did_key.public_key_bytes(), did_key.private_key_bytes()); let mut file = std::fs::File::create(jwk_path(path)).unwrap(); file.write(jwk.as_bytes()).unwrap(); Ok(format!("{} is ready", path)) } else { Ok(format!("{} already exists", path)) } } fn doc(path: &str) -> Result<String, std::io::Error> { use did_key::DIDCore; let jwk = std::fs::read(jwk_path(path))?; let jwkstr = String::from_utf8(jwk).unwrap(); let (public,_) = jwkstr_to_publicprivatebytes(&jwkstr); let keypair = did_key::from_existing_key::<did_key::Ed25519KeyPair>(&public, None); let did_doc = keypair.get_did_document(did_key::CONFIG_LD_PUBLIC); let did_doc = serde_json::to_string_pretty(&did_doc).unwrap(); Ok(format!("{}", did_doc)) } fn did(path: &str) -> Result<String, std::io::Error> { use did_key::DIDCore; let jwk = std::fs::read(jwk_path(path))?; let jwkstr = String::from_utf8(jwk).unwrap(); let (public,_) = jwkstr_to_publicprivatebytes(&jwkstr); let keypair = did_key::from_existing_key::<did_key::Ed25519KeyPair>(&public, None); let did_doc = keypair.get_did_document(did_key::CONFIG_LD_PUBLIC); let did = did_doc.id; Ok(format!("{}", did)) } fn help() -> Result<String, std::io::Error> { Ok(String::from(" Usage: didchat <path> <command> didchat <path> init didchat <path> doc didchat <path> did ")) } #[derive(Debug)] enum CMD { Init{ path: String }, Doc{ path: String }, Did{ path: String }, Help } pub struct Config { cmd: CMD, } impl Config { pub fn new(args: &[String]) -> Result<Config, std::io::Error> { let default_cmd = String::from("help"); let path = args.get(1).unwrap_or(&default_cmd).clone(); let cmd = args.get(
} fn root_path(path: &str) -> String { format!("{}/.didchat", path) } fn jwk_path(path: &str) -> String { format!("{}/.didchat/me.jwk", path) } fn publicprivatebytes_to_jwkstr(public: Vec<u8>, private: Vec<u8>) -> String { let jwk = ssi::jwk::JWK { params: ssi::jwk::Params::OKP(ssi::jwk::OctetParams { curve: "Ed25519".to_string(), public_key: ssi::jwk::Base64urlUInt(public), private_key: Some(ssi::jwk::Base64urlUInt(private)), }), public_key_use: None, key_operations: None, algorithm: None, key_id: None, x509_url: None, x509_certificate_chain: None, x509_thumbprint_sha1: None, x509_thumbprint_sha256: None }; let _okp = (if let ssi::jwk::Params::OKP(o) = jwk.params.clone() { Some(o) } else { None }).unwrap(); serde_json::to_string(&jwk).unwrap() } fn jwkstr_to_publicprivatebytes(jwkstr: &str) -> (Vec<u8>, Vec<u8>) { let jwk: ssi::jwk::JWK = serde_json::from_str(jwkstr).unwrap(); let okp = (if let ssi::jwk::Params::OKP(o) = jwk.params.clone() { Some(o) } else { panic!("okp == None") }).unwrap(); let privkey: Vec<u8> = if let Some(key) = okp.private_key { key.0 } else { panic!("privkey == None") }; (okp.public_key.0, privkey) }
2).unwrap_or(&default_cmd).clone(); let cmd = if args.len() < 3 { eprintln!("Command missing!"); default_cmd.clone() } else { cmd.clone() }; let cmd: CMD = match &cmd[..] { "did" => { CMD::Did{ path } }, "doc" => { CMD::Doc{ path } }, "init" => { CMD::Init{ path } }, "help" => CMD::Help, &_ => { eprintln!("{} not a valid command!", cmd); CMD::Help }, }; Ok(Config { cmd }) }
function_block-function_prefixed
[ { "content": "//\n\n// Commands\n\n//\n\nfn init(path: &str) -> Result<String, std::io::Error> {\n\n use std::io::Write;\n\n\n\n // 1. Create empty folders\n\n if !std::fs::metadata(root_path(path)).is_ok() {\n\n std::fs::create_dir_all(root_path(path))?;\n\n }\n\n if !std::fs::metadata(na...
Rust
src/libos/src/fs/file_ops/ioctl/mod.rs
qzheng527/ngo
635ce9ef2427fe1b602b40ec89aa3530b167169d
use super::*; use util::mem_util::from_user; pub use self::builtin::*; pub use self::non_builtin::{NonBuiltinIoctlCmd, StructuredIoctlArgType, StructuredIoctlNum}; #[macro_use] mod macros; mod builtin; mod non_builtin; impl_ioctl_nums_and_cmds! { TCGETS => (0x5401, mut KernelTermios), TCSETS => (0x5402, KernelTermios), TIOCGWINSZ => (0x5413, mut WinSize), TIOCSWINSZ => (0x5414, WinSize), FIONBIO => (0x5421, i32), TIOCNOTTY => (0x5422, ()), FIONREAD => (0x541B, mut i32), FIONCLEX => (0x5450, ()), FIOCLEX => (0x5451, ()), SIOCGIFNAME => (0x8910, mut IfReq), SIOCGIFCONF => (0x8912, mut IfConf), SIOCGIFFLAGS => (0x8913, mut IfReq), SIOCGIFADDR => (0x8915, mut IfReq), SIOCGIFDSTADDR => (0x8917, mut IfReq), SIOCGIFBRDADDR => (0x8919, mut IfReq), SIOCGIFNETMASK => (0x891B, mut IfReq), SIOCGIFMTU => (0x8921, mut IfReq), SIOCGIFHWADDR => (0x8927, mut IfReq), SIOCGIFINDEX => (0x8933, mut IfReq), SIOCGIFPFLAGS => (0x8935, mut IfReq), SIOCGIFTXQLEN => (0x8942, mut IfReq), SIOCGIFMAP => (0x8970, mut IfReq), } impl<'a> IoctlRawCmd<'a> { pub fn to_safe_ioctlcmd(&self) -> Result<Box<dyn IoctlCmd>> { Ok(match self { IoctlRawCmd::TCGETS(_) => Box::new(TcGets::new(())), IoctlRawCmd::TCSETS(termios_ref) => { let termios = **termios_ref; Box::new(TcSets::new(termios)) } IoctlRawCmd::TIOCGWINSZ(_) => Box::new(GetWinSize::new(())), IoctlRawCmd::TIOCSWINSZ(winsize_ref) => { let winsize = **winsize_ref; Box::new(SetWinSize::new(winsize)) } IoctlRawCmd::NonBuiltin(inner) => { let nonbuiltin_cmd = unsafe { NonBuiltinIoctlCmd::new(*inner.cmd_num(), inner.arg_ptr() as _)? }; Box::new(nonbuiltin_cmd) } IoctlRawCmd::FIONBIO(non_blocking) => Box::new(SetNonBlocking::new(**non_blocking)), IoctlRawCmd::FIONREAD(_) => Box::new(GetReadBufLen::new(())), IoctlRawCmd::FIONCLEX(_) => Box::new(SetCloseOnExec::new(false)), IoctlRawCmd::FIOCLEX(_) => Box::new(SetCloseOnExec::new(true)), IoctlRawCmd::SIOCGIFCONF(ifconf_mut) => { if !ifconf_mut.ifc_buf.is_null() { if ifconf_mut.ifc_len < 0 { return_errno!(EINVAL, "invalid ifc_len"); } from_user::check_array(ifconf_mut.ifc_buf, ifconf_mut.ifc_len as usize)?; } Box::new(GetIfConf::new(ifconf_mut)) } IoctlRawCmd::SIOCGIFFLAGS(req) | IoctlRawCmd::SIOCGIFNAME(req) | IoctlRawCmd::SIOCGIFADDR(req) | IoctlRawCmd::SIOCGIFDSTADDR(req) | IoctlRawCmd::SIOCGIFBRDADDR(req) | IoctlRawCmd::SIOCGIFNETMASK(req) | IoctlRawCmd::SIOCGIFMTU(req) | IoctlRawCmd::SIOCGIFHWADDR(req) | IoctlRawCmd::SIOCGIFINDEX(req) | IoctlRawCmd::SIOCGIFPFLAGS(req) | IoctlRawCmd::SIOCGIFTXQLEN(req) | IoctlRawCmd::SIOCGIFMAP(req) => { Box::new(GetIfReqWithRawCmd::new(self.cmd_num(), **req)) } _ => { return_errno!(EINVAL, "unsupported cmd"); } }) } pub fn copy_output_from_safe(&mut self, cmd: &dyn IoctlCmd) { match self { IoctlRawCmd::TCGETS(termios_mut) => { let cmd = cmd.downcast_ref::<TcGets>().unwrap(); **termios_mut = *cmd.output().unwrap(); } IoctlRawCmd::TIOCGWINSZ(winsize_mut) => { let cmd = cmd.downcast_ref::<GetWinSize>().unwrap(); **winsize_mut = *cmd.output().unwrap(); } IoctlRawCmd::FIONREAD(len_mut) => { let cmd = cmd.downcast_ref::<GetReadBufLen>().unwrap(); **len_mut = *cmd.output().unwrap(); } IoctlRawCmd::SIOCGIFCONF(ifconf_mut) => { let cmd = cmd.downcast_ref::<GetIfConf>().unwrap(); ifconf_mut.ifc_len = cmd.len() as i32; if !ifconf_mut.ifc_buf.is_null() { let mut raw_buf = unsafe { std::slice::from_raw_parts_mut( ifconf_mut.ifc_buf as _, ifconf_mut.ifc_len as _, ) }; raw_buf.copy_from_slice(cmd.as_slice().unwrap()); } } IoctlRawCmd::SIOCGIFNAME(ifreq_mut) | IoctlRawCmd::SIOCGIFFLAGS(ifreq_mut) | IoctlRawCmd::SIOCGIFADDR(ifreq_mut) | IoctlRawCmd::SIOCGIFDSTADDR(ifreq_mut) | IoctlRawCmd::SIOCGIFBRDADDR(ifreq_mut) | IoctlRawCmd::SIOCGIFNETMASK(ifreq_mut) | IoctlRawCmd::SIOCGIFMTU(ifreq_mut) | IoctlRawCmd::SIOCGIFHWADDR(ifreq_mut) | IoctlRawCmd::SIOCGIFINDEX(ifreq_mut) | IoctlRawCmd::SIOCGIFPFLAGS(ifreq_mut) | IoctlRawCmd::SIOCGIFTXQLEN(ifreq_mut) | IoctlRawCmd::SIOCGIFMAP(ifreq_mut) => { let cmd = cmd.downcast_ref::<GetIfReqWithRawCmd>().unwrap(); **ifreq_mut = *cmd.output().unwrap(); } _ => {} } } } pub fn do_ioctl(fd: FileDesc, raw_cmd: &mut IoctlRawCmd) -> Result<i32> { debug!("ioctl: fd: {}, cmd: {:?}", fd, raw_cmd); let current = current!(); let file_ref = current.file(fd)?; let mut cmd = raw_cmd.to_safe_ioctlcmd()?; if cmd.is::<SetCloseOnExec>() { let is_close_on_exec = cmd.downcast_ref::<SetCloseOnExec>().unwrap().input(); let mut file_table = current.files().lock().unwrap(); let entry = file_table.get_entry_mut(fd)?; entry.set_close_on_spawn(*is_close_on_exec); return Ok(0); } file_ref.ioctl(cmd.as_mut())?; raw_cmd.copy_output_from_safe(cmd.as_ref()); Ok(0) } extern "C" { pub fn occlum_ocall_ioctl( ret: *mut i32, fd: c_int, request: c_int, arg: *mut c_void, len: size_t, ) -> sgx_status_t; }
use super::*; use util::mem_util::from_user; pub use self::builtin::*; pub use self::non_builtin::{NonBuiltinIoctlCmd, StructuredIoctlArgType, StructuredIoctlNum}; #[macro_use] mod macros; mod builtin; mod non_builtin; impl_ioctl_nums_and_cmds! { TCGETS => (0x5401, mut KernelTermios), TCSETS => (0x5402, KernelTermios), TIOCGWINSZ => (0x5413, mut WinSize), TIOCSWINSZ => (0x5414, WinSize), FIONBIO => (0x5421, i32), TIOCNOTTY => (0x5422, ()), FIONREAD => (0x541B, mut i32), FIONCLEX => (0x5450, ()), FIOCLEX => (0x5451, ()), SIOCGIFNAME => (0x8910, mut IfReq), SIOCGIFCONF => (0x8912, mut IfConf), SIOCGIFFLAGS => (0x8913, mut IfReq), SIOCGIFADDR => (0x8915, mut IfReq), SIOCGIFDSTADDR => (0x8917, mut IfReq), SIOCGIFBRDADDR => (0x8919, mut IfReq), SIOCGIFNETMASK => (0x891B, mut IfReq), SIOCGIFMTU => (0x8921, mut IfReq), SIOCGIFHWADDR => (0x8927, mut IfReq), SIOCGIFINDEX => (0x8933, mut IfReq), SIOCGIFPFLAGS => (0x8935, mut IfReq), SIOCGIFTXQLEN => (0x8942, mut IfReq), SIOCGIFMAP => (0x8970, mut IfReq), } impl<'a> IoctlRawCmd<'a> { pub fn to_safe_ioctlcmd(&self) -> Result<Box<dyn IoctlCmd>> { Ok(match self { IoctlRawCmd::TCGETS(_) => Box::new(TcGets::new(())), IoctlRawCmd::TCSETS(termios_ref) => { let termios = **termios_ref; Box::new(TcSets::new(termios)) } IoctlRawCmd::TIOCGWINSZ(_) => Box::new(GetWinSize::new(())), IoctlRawCmd::TIOCSWINSZ(winsize_ref) => { let winsize = **winsize_ref; Box::new(SetWinSize::new(winsize)) } IoctlRawCmd::NonBuiltin(inner) => { let nonbuiltin_cmd = unsafe { NonBuiltinIoctlCmd::new(*inner.cmd_num(), inner.arg_ptr() as _)? }; Box::new(nonbuiltin_cmd) } IoctlRawCmd::FIONBIO(non_blocking) => Box::new(SetNonBlocking::new(**non_blocking)), IoctlRawCmd::FIONREAD(_) => Box::new(GetReadBufLen::new(())), IoctlRawCmd::FIONCLEX(_) => Box::new(SetCloseOnExec::new(false)), IoctlRawCmd::FIOCLEX(_) => Box::new(SetCloseOnExec::new(true)), IoctlRawCmd::SIOCGIFCONF(ifconf_mut) => { if !ifconf_mut.ifc_buf.is_null() { if ifconf_mut.ifc_len < 0 { return_errno!(EINVAL, "invalid ifc_len"); } from_user::check_array(ifconf_mut.ifc_buf, ifconf_mut.ifc_len as usize)?; } Box::new(GetIfConf::new(ifconf_mut)) } IoctlRawCmd::SIOCGIFFLAGS(req) | IoctlRawCmd::SIOCGIFNAME(req) | IoctlRawCmd::SIOCGIFADDR(req) | IoctlRawCmd::SIOCGIFDSTADDR(req)
| IoctlRawCmd::SIOCGIFPFLAGS(req) | IoctlRawCmd::SIOCGIFTXQLEN(req) | IoctlRawCmd::SIOCGIFMAP(req) => { Box::new(GetIfReqWithRawCmd::new(self.cmd_num(), **req)) } _ => { return_errno!(EINVAL, "unsupported cmd"); } }) } pub fn copy_output_from_safe(&mut self, cmd: &dyn IoctlCmd) { match self { IoctlRawCmd::TCGETS(termios_mut) => { let cmd = cmd.downcast_ref::<TcGets>().unwrap(); **termios_mut = *cmd.output().unwrap(); } IoctlRawCmd::TIOCGWINSZ(winsize_mut) => { let cmd = cmd.downcast_ref::<GetWinSize>().unwrap(); **winsize_mut = *cmd.output().unwrap(); } IoctlRawCmd::FIONREAD(len_mut) => { let cmd = cmd.downcast_ref::<GetReadBufLen>().unwrap(); **len_mut = *cmd.output().unwrap(); } IoctlRawCmd::SIOCGIFCONF(ifconf_mut) => { let cmd = cmd.downcast_ref::<GetIfConf>().unwrap(); ifconf_mut.ifc_len = cmd.len() as i32; if !ifconf_mut.ifc_buf.is_null() { let mut raw_buf = unsafe { std::slice::from_raw_parts_mut( ifconf_mut.ifc_buf as _, ifconf_mut.ifc_len as _, ) }; raw_buf.copy_from_slice(cmd.as_slice().unwrap()); } } IoctlRawCmd::SIOCGIFNAME(ifreq_mut) | IoctlRawCmd::SIOCGIFFLAGS(ifreq_mut) | IoctlRawCmd::SIOCGIFADDR(ifreq_mut) | IoctlRawCmd::SIOCGIFDSTADDR(ifreq_mut) | IoctlRawCmd::SIOCGIFBRDADDR(ifreq_mut) | IoctlRawCmd::SIOCGIFNETMASK(ifreq_mut) | IoctlRawCmd::SIOCGIFMTU(ifreq_mut) | IoctlRawCmd::SIOCGIFHWADDR(ifreq_mut) | IoctlRawCmd::SIOCGIFINDEX(ifreq_mut) | IoctlRawCmd::SIOCGIFPFLAGS(ifreq_mut) | IoctlRawCmd::SIOCGIFTXQLEN(ifreq_mut) | IoctlRawCmd::SIOCGIFMAP(ifreq_mut) => { let cmd = cmd.downcast_ref::<GetIfReqWithRawCmd>().unwrap(); **ifreq_mut = *cmd.output().unwrap(); } _ => {} } } } pub fn do_ioctl(fd: FileDesc, raw_cmd: &mut IoctlRawCmd) -> Result<i32> { debug!("ioctl: fd: {}, cmd: {:?}", fd, raw_cmd); let current = current!(); let file_ref = current.file(fd)?; let mut cmd = raw_cmd.to_safe_ioctlcmd()?; if cmd.is::<SetCloseOnExec>() { let is_close_on_exec = cmd.downcast_ref::<SetCloseOnExec>().unwrap().input(); let mut file_table = current.files().lock().unwrap(); let entry = file_table.get_entry_mut(fd)?; entry.set_close_on_spawn(*is_close_on_exec); return Ok(0); } file_ref.ioctl(cmd.as_mut())?; raw_cmd.copy_output_from_safe(cmd.as_ref()); Ok(0) } extern "C" { pub fn occlum_ocall_ioctl( ret: *mut i32, fd: c_int, request: c_int, arg: *mut c_void, len: size_t, ) -> sgx_status_t; }
| IoctlRawCmd::SIOCGIFBRDADDR(req) | IoctlRawCmd::SIOCGIFNETMASK(req) | IoctlRawCmd::SIOCGIFMTU(req) | IoctlRawCmd::SIOCGIFHWADDR(req) | IoctlRawCmd::SIOCGIFINDEX(req)
function_block-random_span
[ { "content": "// TODO: rename this to do_poll after the old version is removed\n\npub fn do_poll_new(poll_fds: &[PollFd], mut timeout: Option<&mut Duration>) -> Result<usize> {\n\n debug!(\"poll: poll_fds: {:?}, timeout: {:?}\", poll_fds, timeout);\n\n\n\n // Always clear the revents fields first\n\n f...
Rust
src/rugl.rs
micahscopes/rugl
bb7fefb08c7d648f41630fe515c0bb128d95de69
/*! An ergonomic macro for creating themetic stateless WebGL applications! # Syntax ```ignore rugl_main! { vertex: " precision mediump float; attribute vec2 position; void main() { gl_Position = vec4(position, 0, 1); } "; fragment: " precision mediump float; uniform vec3 color; void main() { gl_FragColor = color; } "; attributes: { position: [ [-1, 0], [0, -1], [1, 1] ] } uniforms: { color: [1, 0, 0, 1] }, count: 3 } */ use std::borrow::Cow; use crate::webgl::{Attribute, Uniform, WebGlContext}; #[derive(Debug)] pub struct Rugl<'a> { pub inner: RuglInner<'a>, pub context: WebGlContext, } impl Rugl<'_> { pub fn step(&mut self) -> Result<(), String> { self.context.clear_with_color([1.0, 1.0, 1.0, 1.0]); for attribute in self.inner.get_attributes() { self.context.enable_attribute(attribute.get_name())?; } self.context.draw_triangles(*self.inner.get_count()); Ok(()) } } #[derive(Debug)] pub struct RuglInner<'a> { pub vertex: Cow<'a, str>, pub fragment: Cow<'a, str>, pub attributes: Vec<Attribute>, pub uniforms: Vec<Uniform>, pub count: i32, } impl<'a> RuglInner<'a> { pub fn get_vertex_shader(&self) -> &str { &self.vertex } pub fn get_fragment_shader(&self) -> &str { &self.fragment } pub fn get_attributes(&mut self) -> &Vec<Attribute> { &self.attributes } pub fn get_mut_attributes(&mut self) -> &mut Vec<Attribute> { &mut self.attributes } pub fn get_uniforms(&self) -> &Vec<Uniform> { &self.uniforms } pub fn get_mut_uniforms(&mut self) -> &mut Vec<Uniform> { &mut self.uniforms } pub fn get_count(&self) -> &i32 { &self.count } } #[macro_export] macro_rules! rugl_inner { ( $( $i:ident: { $($tokens:tt)* } ),* ) => {{ #[inline] fn build_inner<'a>() -> Result<(RuglInner<'a>, WebGlContext), JsValue> { use std::borrow::Cow; let mut context = WebGlContext::new("canvas")?; let mut inner = RuglInner { $($i: rugl_type!($i: $($tokens)*),)* }; let vertex = context.compile_shader( ShaderType::Vertex( inner.get_vertex_shader(), std::marker::PhantomData ) )?; let fragment = context.compile_shader( ShaderType::Fragment( inner.get_fragment_shader(), std::marker::PhantomData ) )?; context.link_and_add_program(&[vertex, fragment])?; context.use_program()?; let count = inner.get_count().clone(); for attribute in inner.get_mut_attributes() { let mut attr_data = Vec::new(); for layer in attribute.get_qualifiers() { attr_data.extend_from_slice(&layer.to_vec()); } context.create_buffer_with_data(attribute.get_name(), &attr_data[..], count)?; context.bind_buffer_with_name(attribute.get_name())?; context.enable_attribute(attribute.get_name())?; } for uniform in inner.get_mut_uniforms() { context.create_uniform(uniform.get_name(), uniform.inner())?; context.bind_uniform(uniform.get_name())?; } Ok((inner, context)) } match build_inner() { Ok((inner, context)) => Ok(Rugl { inner, context }), Err(err) => { log!("There was an error! {}", err.as_string().unwrap()); Err("There was a problem!!!".to_owned()) } } }} } #[doc(hidden)] #[macro_export] macro_rules! rugl_type { (vertex: $($tokens:tt)+) => { Cow::Borrowed($($tokens)*) }; (fragment: $($tokens:tt)+) => { Cow::Borrowed($($tokens)*) }; (attributes: $($tokens:tt)+) => { parse_ident!(@attribute $($tokens)*) }; (uniforms: $($tokens:tt)+) => { parse_ident!(@uniform $($tokens)*) }; (count: $expr:expr) => { $expr } } #[doc(hidden)] #[macro_export] macro_rules! parse_ident { (@attribute $($id:ident: [$($tokens:tt)*]),+ $(,)* ) => { vec![$( Attribute::from((stringify!($id).to_owned(), determine_bracket_replace!($($tokens)*)) )),*] }; (@uniform $($id:ident: [$($tokens:tt)*]),+ $(,)* ) => { vec![$( Uniform::from((stringify!($id).to_owned(), UniformInner::from(determine_bracket_replace!($($tokens)*))) )),*] }; } #[doc(hidden)] #[macro_export] macro_rules! determine_bracket_replace { ($([$($tokens:tt)*]),*) => { [ $( ($($tokens)*) ),* ] }; ($($tokens:tt)*) => { [ $($tokens)* ] } }
/*! An ergonomic macro for creating themetic stateless WebGL applications! # Syntax ```ignore rugl_main! { vertex: " precision mediump float; attribute vec2 position; void main() { gl_Position = vec4(position, 0, 1); } "; fragment: " precision mediump float; uniform vec3 color; void main() { gl_FragColor = color; } "; attributes: { position: [ [-1, 0], [0, -1], [1, 1] ] } uniforms: { color: [1, 0, 0, 1] }, count: 3 } */ use std::borrow::Cow; use crate::webgl::{Attribute, Uniform, WebGlContext}; #[derive(Debug)] pub struct Rugl<'a> { pub inner: RuglInner<'a>, pub context: WebGlContext, } impl Rugl<'_> { pub fn step(&mut self) -> Result<(), String> { self.context.clear_with_color([1.0, 1.0, 1.0, 1.0]); for attribute in self.inner.get_attributes() { self.context.enable_attribute(attribute.get_name())?; } self.context.draw_triangles(*self.inn
rm(uniform.get_name())?; } Ok((inner, context)) } match build_inner() { Ok((inner, context)) => Ok(Rugl { inner, context }), Err(err) => { log!("There was an error! {}", err.as_string().unwrap()); Err("There was a problem!!!".to_owned()) } } }} } #[doc(hidden)] #[macro_export] macro_rules! rugl_type { (vertex: $($tokens:tt)+) => { Cow::Borrowed($($tokens)*) }; (fragment: $($tokens:tt)+) => { Cow::Borrowed($($tokens)*) }; (attributes: $($tokens:tt)+) => { parse_ident!(@attribute $($tokens)*) }; (uniforms: $($tokens:tt)+) => { parse_ident!(@uniform $($tokens)*) }; (count: $expr:expr) => { $expr } } #[doc(hidden)] #[macro_export] macro_rules! parse_ident { (@attribute $($id:ident: [$($tokens:tt)*]),+ $(,)* ) => { vec![$( Attribute::from((stringify!($id).to_owned(), determine_bracket_replace!($($tokens)*)) )),*] }; (@uniform $($id:ident: [$($tokens:tt)*]),+ $(,)* ) => { vec![$( Uniform::from((stringify!($id).to_owned(), UniformInner::from(determine_bracket_replace!($($tokens)*))) )),*] }; } #[doc(hidden)] #[macro_export] macro_rules! determine_bracket_replace { ($([$($tokens:tt)*]),*) => { [ $( ($($tokens)*) ),* ] }; ($($tokens:tt)*) => { [ $($tokens)* ] } }
er.get_count()); Ok(()) } } #[derive(Debug)] pub struct RuglInner<'a> { pub vertex: Cow<'a, str>, pub fragment: Cow<'a, str>, pub attributes: Vec<Attribute>, pub uniforms: Vec<Uniform>, pub count: i32, } impl<'a> RuglInner<'a> { pub fn get_vertex_shader(&self) -> &str { &self.vertex } pub fn get_fragment_shader(&self) -> &str { &self.fragment } pub fn get_attributes(&mut self) -> &Vec<Attribute> { &self.attributes } pub fn get_mut_attributes(&mut self) -> &mut Vec<Attribute> { &mut self.attributes } pub fn get_uniforms(&self) -> &Vec<Uniform> { &self.uniforms } pub fn get_mut_uniforms(&mut self) -> &mut Vec<Uniform> { &mut self.uniforms } pub fn get_count(&self) -> &i32 { &self.count } } #[macro_export] macro_rules! rugl_inner { ( $( $i:ident: { $($tokens:tt)* } ),* ) => {{ #[inline] fn build_inner<'a>() -> Result<(RuglInner<'a>, WebGlContext), JsValue> { use std::borrow::Cow; let mut context = WebGlContext::new("canvas")?; let mut inner = RuglInner { $($i: rugl_type!($i: $($tokens)*),)* }; let vertex = context.compile_shader( ShaderType::Vertex( inner.get_vertex_shader(), std::marker::PhantomData ) )?; let fragment = context.compile_shader( ShaderType::Fragment( inner.get_fragment_shader(), std::marker::PhantomData ) )?; context.link_and_add_program(&[vertex, fragment])?; context.use_program()?; let count = inner.get_count().clone(); for attribute in inner.get_mut_attributes() { let mut attr_data = Vec::new(); for layer in attribute.get_qualifiers() { attr_data.extend_from_slice(&layer.to_vec()); } context.create_buffer_with_data(attribute.get_name(), &attr_data[..], count)?; context.bind_buffer_with_name(attribute.get_name())?; context.enable_attribute(attribute.get_name())?; } for uniform in inner.get_mut_uniforms() { context.create_uniform(uniform.get_name(), uniform.inner())?; context.bind_unifo
random
[ { "content": "fn main() {\n\n let dest_path = Path::new(\"pkg\");\n\n\n\n if !dest_path.exists() {\n\n fs::create_dir(dest_path).expect(\"Unable to create directory\");\n\n }\n\n\n\n let test = Command::new(\"wasm-bindgen\")\n\n .args(&[\n\n \"target/wasm32-unknown-unknown/d...
Rust
yamux/examples/throughput_test.rs
kingwel-xie/tentacle
9efe228ee6de3577a4ac2967f1055e00ebb32a11
use bytesize::ByteSize; use futures::prelude::*; use log::{info, warn}; use tokio::{ io::{AsyncReadExt, AsyncWriteExt}, net::{TcpListener, TcpStream}, time::delay_for, }; use tokio_yamux::stream::StreamHandle; use tokio_yamux::{config::Config, session::Session}; fn main() { env_logger::init(); if std::env::args().nth(1) == Some("server".to_string()) { info!("Starting server ......"); run_server(); } else { info!("Starting client ......"); run_client(); } } const STR: &str = "fakeu1234567890cmxcmmmmmmmmmsssmssmsmsmxcmcmcnxzlllslsllcccccsannmxmxmxmxmxmxmxmxmmsssjjkzoso."; const LEN: usize = STR.len(); static REQC: AtomicUsize = AtomicUsize::new(0); static RESPC: AtomicUsize = AtomicUsize::new(0); use std::{ str, sync::atomic::{AtomicUsize, Ordering}, time::Duration, }; fn reqc_incr() -> usize { REQC.fetch_add(1, Ordering::Relaxed) } fn reqc() -> usize { REQC.swap(0, Ordering::SeqCst) } fn respc_incr() -> usize { RESPC.fetch_add(1, Ordering::Relaxed) } fn respc() -> usize { RESPC.swap(0, Ordering::SeqCst) } async fn show_metric() { let secs = 10; loop { delay_for(Duration::from_millis(1000 * secs)).await; let reqc = reqc(); let respc = respc(); info!( "{} secs req {}, resp {}; {} req/s, {}/s; {} resp/s {}/s", secs, reqc, respc, reqc as f64 / secs as f64, ByteSize::b(((reqc * LEN) as f64 / secs as f64) as u64).to_string_as(true), respc as f64 / secs as f64, ByteSize::b(((respc * LEN) as f64 / secs as f64) as u64).to_string_as(true), ); } } fn run_server() { let mut rt = tokio::runtime::Runtime::new().unwrap(); rt.spawn(show_metric()); rt.block_on(async move { let mut listener = TcpListener::bind("127.0.0.1:12345").await.unwrap(); while let Ok((socket, _)) = listener.accept().await { info!("accepted a socket: {:?}", socket.peer_addr()); let mut session = Session::new_server(socket, Config::default()); tokio::spawn(async move { while let Some(Ok(mut stream)) = session.next().await { info!("Server accept a stream from client: id={}", stream.id()); tokio::spawn(async move { let mut data = [0u8; LEN]; stream.read_exact(&mut data).await.unwrap(); assert_eq!(data.as_ref(), STR.as_bytes()); loop { stream.write_all(STR.as_bytes()).await.unwrap(); respc_incr(); stream.read_exact(&mut data).await.unwrap(); reqc_incr(); assert_eq!(data.as_ref(), STR.as_bytes()); } }); } }); } }); } fn run_client() { let num = std::env::args() .nth(1) .and_then(|s| s.parse::<usize>().ok()) .unwrap_or(2); let mut rt = tokio::runtime::Runtime::new().unwrap(); rt.block_on(async move { let socket = TcpStream::connect("127.0.0.1:12345").await.unwrap(); let sa = socket.peer_addr().unwrap(); info!("[client] connected to server: {:?}", sa); let mut session = Session::new_client(socket, Config::default()); let streams = (0..num) .into_iter() .map(|_| session.open_stream().unwrap()) .collect::<Vec<_>>(); tokio::spawn(async move { loop { match session.next().await { Some(res) => warn!("res: {:?}", res), None => break, } } warn!("{:?} broken", sa); }); let f = |mut s: StreamHandle| { tokio::spawn(async move { s.write_all(STR.as_bytes()).await.unwrap(); let mut data = [0u8; LEN]; loop { s.read_exact(&mut data).await.unwrap(); assert_eq!(data.as_ref(), STR.as_bytes()); respc_incr(); s.write_all(STR.as_bytes()).await.unwrap(); reqc_incr(); } }) }; for stream in streams { f(stream); } show_metric().await; }); }
use bytesize::ByteSize; use futures::prelude::*; use log::{info, warn}; use tokio::{ io::{AsyncReadExt, AsyncWriteExt}, net::{TcpListener, TcpStream}, time::delay_for, }; use tokio_yamux::stream::StreamHandle; use tokio_yamux::{config::Config, session::Session}; fn main() { env_logger::init(); if std::env::args().nth(1) == Some("server".to_string()) { info!("Starting server ......"); run_server(); } else { info!("Starting
reqc_incr(); assert_eq!(data.as_ref(), STR.as_bytes()); } }); } }); } }); } fn run_client() { let num = std::env::args() .nth(1) .and_then(|s| s.parse::<usize>().ok()) .unwrap_or(2); let mut rt = tokio::runtime::Runtime::new().unwrap(); rt.block_on(async move { let socket = TcpStream::connect("127.0.0.1:12345").await.unwrap(); let sa = socket.peer_addr().unwrap(); info!("[client] connected to server: {:?}", sa); let mut session = Session::new_client(socket, Config::default()); let streams = (0..num) .into_iter() .map(|_| session.open_stream().unwrap()) .collect::<Vec<_>>(); tokio::spawn(async move { loop { match session.next().await { Some(res) => warn!("res: {:?}", res), None => break, } } warn!("{:?} broken", sa); }); let f = |mut s: StreamHandle| { tokio::spawn(async move { s.write_all(STR.as_bytes()).await.unwrap(); let mut data = [0u8; LEN]; loop { s.read_exact(&mut data).await.unwrap(); assert_eq!(data.as_ref(), STR.as_bytes()); respc_incr(); s.write_all(STR.as_bytes()).await.unwrap(); reqc_incr(); } }) }; for stream in streams { f(stream); } show_metric().await; }); }
client ......"); run_client(); } } const STR: &str = "fakeu1234567890cmxcmmmmmmmmmsssmssmsmsmxcmcmcnxzlllslsllcccccsannmxmxmxmxmxmxmxmxmmsssjjkzoso."; const LEN: usize = STR.len(); static REQC: AtomicUsize = AtomicUsize::new(0); static RESPC: AtomicUsize = AtomicUsize::new(0); use std::{ str, sync::atomic::{AtomicUsize, Ordering}, time::Duration, }; fn reqc_incr() -> usize { REQC.fetch_add(1, Ordering::Relaxed) } fn reqc() -> usize { REQC.swap(0, Ordering::SeqCst) } fn respc_incr() -> usize { RESPC.fetch_add(1, Ordering::Relaxed) } fn respc() -> usize { RESPC.swap(0, Ordering::SeqCst) } async fn show_metric() { let secs = 10; loop { delay_for(Duration::from_millis(1000 * secs)).await; let reqc = reqc(); let respc = respc(); info!( "{} secs req {}, resp {}; {} req/s, {}/s; {} resp/s {}/s", secs, reqc, respc, reqc as f64 / secs as f64, ByteSize::b(((reqc * LEN) as f64 / secs as f64) as u64).to_string_as(true), respc as f64 / secs as f64, ByteSize::b(((respc * LEN) as f64 / secs as f64) as u64).to_string_as(true), ); } } fn run_server() { let mut rt = tokio::runtime::Runtime::new().unwrap(); rt.spawn(show_metric()); rt.block_on(async move { let mut listener = TcpListener::bind("127.0.0.1:12345").await.unwrap(); while let Ok((socket, _)) = listener.accept().await { info!("accepted a socket: {:?}", socket.peer_addr()); let mut session = Session::new_server(socket, Config::default()); tokio::spawn(async move { while let Some(Ok(mut stream)) = session.next().await { info!("Server accept a stream from client: id={}", stream.id()); tokio::spawn(async move { let mut data = [0u8; LEN]; stream.read_exact(&mut data).await.unwrap(); assert_eq!(data.as_ref(), STR.as_bytes()); loop { stream.write_all(STR.as_bytes()).await.unwrap(); respc_incr(); stream.read_exact(&mut data).await.unwrap();
random
[ { "content": "fn main() {\n\n init();\n\n\n\n let cycles = std::env::args()\n\n .nth(1)\n\n .and_then(|number| number.parse().ok())\n\n .unwrap_or(100);\n\n\n\n let check_point = std::env::args()\n\n .nth(2)\n\n .and_then(|number| number.parse().ok())\n\n .unwr...
Rust
app/src/core.rs
IgnusG/garlic
dd8100f035c59952ae2db43cb26c9111907f3711
use mio::tcp::{TcpStream, TcpListener}; use mio::{Poll, Token, Ready, PollOpt, Events}; use std::net; use std::net::SocketAddr; use std::sync::mpsc; use std::thread; use std::thread::{JoinHandle}; use std::io::Write; use std::sync::atomic::{AtomicUsize, Ordering, ATOMIC_USIZE_INIT}; use errors::*; use brunch::{send_message, create_connection, create_udp_connection, send_udp_message, receive_udp_message, receive_message}; use messages::Message; use messages::Message::*; use messages::onion::*; use messages::onion::Onion::*; use messages::auth::*; use messages::auth::Auth::*; use messages::rps::*; use messages::rps::Rps::*; use messages::p2p; use messages::p2p::P2PMessage; use config; static NEXT_TUNNEL_ID: AtomicUsize = ATOMIC_USIZE_INIT; static NEXT_REQUEST_ID: AtomicUsize = ATOMIC_USIZE_INIT; struct Communication { receiver: mpsc::Receiver<Message>, sender: mpsc::Sender<StreamType>, } impl Communication { fn send(&self, message: Message) { self.sender.send(StreamType::API(message)); } fn receive(&self) -> Result<Message> { Ok(self.receiver.recv().chain_err(|| "sender diconnected")?) } } struct AuthSession { session_id: u16, rps_peer: RpsPeer } pub enum StreamType { API(Message), P2P(Message) } fn request_peer(comm: &Communication) -> Result<RpsPeer> { comm.send(Rps(Query(RpsQuery {}))); if let Rps(Peer(rps_peer)) = comm.receive()? { Ok(rps_peer) } else { bail!("protocol breach - expected RpsPeer") } } fn encrypt_for_all_peers(peers: &Vec<AuthSession>, data: Vec<u8>, comm: &Communication) -> Result<Vec<u8>> { let request_id = NEXT_TUNNEL_ID.fetch_add(1, Ordering::SeqCst) as u32; comm.send(Auth(CipherEncrypt(AuthCipherCrypt { session_id: peers.first().unwrap().session_id, request_id: request_id, cleartext: true, payload: data }))); let data = if let Auth(CipherEncryptResp(message)) = comm.receive()? { message.payload } else { bail!("protocol breach - expected CipherEncryptResp") }; if peers.len() < 2 { return Ok(data) }; for peer in &peers[1..] { let request_id = NEXT_TUNNEL_ID.fetch_add(1, Ordering::SeqCst) as u32; comm.send(Auth(CipherEncrypt(AuthCipherCrypt { session_id: peers[0].session_id, request_id: request_id, cleartext: false, payload: data.clone() }))); let data = if let Auth(CipherEncryptResp(message)) = comm.receive()? { message.payload } else { bail!("protocol breach - expected CipherEncryptResp") }; }; Ok(data) } struct Connection { udp: Option<net::UdpSocket>, tcp: Option<net::TcpStream> } impl Connection { fn send(&mut self, message: Message) -> Result<()> { if let Some(ref mut conn) = self.tcp { send_message(conn, message); } else if let Some(ref conn) = self.udp { send_udp_message(conn, message); } else { bail!("at least one connection needs to be specified"); } Ok(()) } fn receive(&mut self) -> Result<Message> { if let Some(ref mut conn) = self.tcp { Ok(receive_message(conn)?) } else if let Some(ref conn) = self.udp { Ok(receive_udp_message(conn)?) } else { bail!("at least one connection needs to be specified"); } } } fn connect_to_peer(peer: RpsPeer, peers: &Vec<AuthSession>, conf: &config::Config, comm: &Communication) -> Result<AuthSession> { let request_id = NEXT_TUNNEL_ID.fetch_add(1, Ordering::SeqCst) as u32; comm.send(Auth(SessionStart(AuthSessionStart { request_id: request_id, hostkey: peer.hostkey.clone() }))); let conn = if peers.len() == 0 { let socket = SocketAddr::new(peer.ip_addr, peer.port); Connection { tcp: Some(create_connection(socket)?), udp: None } } else { let peer = &peers.first().unwrap().rps_peer; let socket = SocketAddr::new(peer.ip_addr, peer.port); Connection { udp: Some(create_udp_connection(socket)?), tcp: None } }; if let Auth(SessionHS1(message)) = comm.receive()? { } else { bail!("protocol breach - expected AuthSessionHS1") }; let request_id = NEXT_TUNNEL_ID.fetch_add(1, Ordering::SeqCst) as u32; Ok(AuthSession { session_id: 0, rps_peer: peer }) } fn send_over_data(data: OnionTunnelPayload) -> Result<()> { unimplemented!(); } fn start_dialogue(message: &OnionTunnelBuild, conf: &config::Config, comm: &Communication) { trace_labeled_error!( "dialogue encountered a problem", { let mut peers = vec![]; for _ in 0..conf.min_hop_count { let peer = request_peer(comm)?; let auth_session = connect_to_peer(peer, &peers, conf, comm)?; peers.push(auth_session); } let tunnel_id = NEXT_TUNNEL_ID.fetch_add(1, Ordering::SeqCst) as u32; comm.send(Onion(TunnelReady(OnionTunnelPayload { tunnel_id: tunnel_id, payload: message.hostkey.clone() }))); loop { match comm.receive()? { Onion(TunnelData(message)) => { send_over_data(message); }, Onion(TunnelDestroy(message)) => { break; }, _ => bail!("protocol breach - expected OnionTunnelData or OnionTunnelDestroy") } } }); } fn answer_dialogue(message: &P2PMessage, conf: &config::Config, comm: &Communication) { unimplemented!(); } fn spinup_state_machine(message: Message, conf: config::Config, ty: mpsc::Sender<StreamType>) -> (mpsc::Sender<Message>, JoinHandle<()>) { let (tx, rx) = mpsc::channel(); let handle = thread::spawn(move || { let message = &message; let comm = &Communication { receiver: rx, sender: ty, }; trace_labeled_error!("failed to create state machine", { match *message { Onion(TunnelBuild(ref message)) => start_dialogue(message, &conf, &comm), P2P(ref message) => { match message.message_type { p2p::P2P::Knock => answer_dialogue(message, &conf, &comm), _ => note!("message {} not part of protocol - discarding") } } _ => note!("message {} not part of protocol - discarding") }; }); }); (tx, handle) } pub fn start(rx: &mpsc::Receiver<StreamType>, ty: mpsc::Sender<StreamType>, conf: config::Config) -> Result<()> { loop { status!("Waiting for stream"); }; Ok(()) }
use mio::tcp::{TcpStream, TcpListener}; use mio::{Poll, Token, Ready, PollOpt, Events}; use std::net; use std::net::SocketAddr; use std::sync::mpsc; use std::thread; use std::thread::{JoinHandle}; use std::io::Write; use std::sync::atomic::{AtomicUsize, Ordering, ATOMIC_USIZE_INIT}; use errors::*; use brunch::{send_message, create_connection, create_udp_connection, send_udp_message, receive_udp_message, receive_message}; use messages::Message; use messages::Message::*; use messages::onion::*; use messages::onion::Onion::*; use messages::auth::*; use messages::auth::Auth::*; use messages::rps::*; use messages::rps::Rps::*; use messages::p2p; use messages::p2p::P2PMessage; use config; static NEXT_TUNNEL_ID: AtomicUsize = ATOMIC_USIZE_INIT; static NEXT_REQUEST_ID: AtomicUsize = ATOMIC_USIZE_INIT; struct Communication { receiver: mpsc::Receiver<Message>, sender: mpsc::Sender<StreamType>, } impl Communication { fn send(&self, message: Message) { self.sender.send(StreamType::API(message)); } fn receive(&self) -> Result<Message> { Ok(self.receiver.recv().chain_err(|| "sender diconnected")?) } } struct AuthSession { session_id: u16, rps_peer: RpsPeer } pub enum StreamType { API(Message), P2P(Message) } fn request_peer(comm: &Communication) -> Result<RpsPeer> { comm.send(Rps(Query(RpsQuery {}))); if let Rps(Peer(rps_peer)) = comm.receive()? { Ok(rps_peer) } else { bail!("protocol breach - expected RpsPeer") } } fn encrypt_for_all_peers(peers: &Vec<AuthSession>, data: Vec<u8>, comm: &Communication) -> Result<Vec<u8>> { let request_id = NEXT_TUNNEL_ID.fetch_add(1, Ordering::SeqCst) as u32; comm.send(Auth(CipherEncrypt(AuthCipherCrypt { session_id: peers.first().unwrap().session_id, request_id: request_id, cleartext: true, payload: data }))); let data = if let Auth(CipherEncryptResp(message)) = comm.receive()? { message.payload } else { bail!("protocol breach - expected CipherEncryptResp") }; if p
_over_data(data: OnionTunnelPayload) -> Result<()> { unimplemented!(); } fn start_dialogue(message: &OnionTunnelBuild, conf: &config::Config, comm: &Communication) { trace_labeled_error!( "dialogue encountered a problem", { let mut peers = vec![]; for _ in 0..conf.min_hop_count { let peer = request_peer(comm)?; let auth_session = connect_to_peer(peer, &peers, conf, comm)?; peers.push(auth_session); } let tunnel_id = NEXT_TUNNEL_ID.fetch_add(1, Ordering::SeqCst) as u32; comm.send(Onion(TunnelReady(OnionTunnelPayload { tunnel_id: tunnel_id, payload: message.hostkey.clone() }))); loop { match comm.receive()? { Onion(TunnelData(message)) => { send_over_data(message); }, Onion(TunnelDestroy(message)) => { break; }, _ => bail!("protocol breach - expected OnionTunnelData or OnionTunnelDestroy") } } }); } fn answer_dialogue(message: &P2PMessage, conf: &config::Config, comm: &Communication) { unimplemented!(); } fn spinup_state_machine(message: Message, conf: config::Config, ty: mpsc::Sender<StreamType>) -> (mpsc::Sender<Message>, JoinHandle<()>) { let (tx, rx) = mpsc::channel(); let handle = thread::spawn(move || { let message = &message; let comm = &Communication { receiver: rx, sender: ty, }; trace_labeled_error!("failed to create state machine", { match *message { Onion(TunnelBuild(ref message)) => start_dialogue(message, &conf, &comm), P2P(ref message) => { match message.message_type { p2p::P2P::Knock => answer_dialogue(message, &conf, &comm), _ => note!("message {} not part of protocol - discarding") } } _ => note!("message {} not part of protocol - discarding") }; }); }); (tx, handle) } pub fn start(rx: &mpsc::Receiver<StreamType>, ty: mpsc::Sender<StreamType>, conf: config::Config) -> Result<()> { loop { status!("Waiting for stream"); }; Ok(()) }
eers.len() < 2 { return Ok(data) }; for peer in &peers[1..] { let request_id = NEXT_TUNNEL_ID.fetch_add(1, Ordering::SeqCst) as u32; comm.send(Auth(CipherEncrypt(AuthCipherCrypt { session_id: peers[0].session_id, request_id: request_id, cleartext: false, payload: data.clone() }))); let data = if let Auth(CipherEncryptResp(message)) = comm.receive()? { message.payload } else { bail!("protocol breach - expected CipherEncryptResp") }; }; Ok(data) } struct Connection { udp: Option<net::UdpSocket>, tcp: Option<net::TcpStream> } impl Connection { fn send(&mut self, message: Message) -> Result<()> { if let Some(ref mut conn) = self.tcp { send_message(conn, message); } else if let Some(ref conn) = self.udp { send_udp_message(conn, message); } else { bail!("at least one connection needs to be specified"); } Ok(()) } fn receive(&mut self) -> Result<Message> { if let Some(ref mut conn) = self.tcp { Ok(receive_message(conn)?) } else if let Some(ref conn) = self.udp { Ok(receive_udp_message(conn)?) } else { bail!("at least one connection needs to be specified"); } } } fn connect_to_peer(peer: RpsPeer, peers: &Vec<AuthSession>, conf: &config::Config, comm: &Communication) -> Result<AuthSession> { let request_id = NEXT_TUNNEL_ID.fetch_add(1, Ordering::SeqCst) as u32; comm.send(Auth(SessionStart(AuthSessionStart { request_id: request_id, hostkey: peer.hostkey.clone() }))); let conn = if peers.len() == 0 { let socket = SocketAddr::new(peer.ip_addr, peer.port); Connection { tcp: Some(create_connection(socket)?), udp: None } } else { let peer = &peers.first().unwrap().rps_peer; let socket = SocketAddr::new(peer.ip_addr, peer.port); Connection { udp: Some(create_udp_connection(socket)?), tcp: None } }; if let Auth(SessionHS1(message)) = comm.receive()? { } else { bail!("protocol breach - expected AuthSessionHS1") }; let request_id = NEXT_TUNNEL_ID.fetch_add(1, Ordering::SeqCst) as u32; Ok(AuthSession { session_id: 0, rps_peer: peer }) } fn send
random
[ { "content": "pub fn start (conf: config::Config) -> Result<()> {\n\n status!(\"Brunch is served!\");\n\n\n\n let (tx, rx) = mpsc::channel();\n\n let (ty, ry) = mpsc::channel();\n\n\n\n let api_thread_handle = {\n\n let conf = conf.clone();\n\n let tx = tx.clone();\n\n\n\n creat...
Rust
weechat/src/hooks/commands.rs
troethe/rust-weechat
8533abf0e000659f567e404d3c8aa0d773eff685
use libc::{c_char, c_int}; use std::{borrow::Cow, ffi::CStr, os::raw::c_void, ptr}; use weechat_sys::{t_gui_buffer, t_weechat_plugin, WEECHAT_RC_OK}; use crate::{buffer::Buffer, Args, LossyCString, ReturnCode, Weechat}; use super::Hook; pub struct Command { _hook: Hook, _hook_data: Box<CommandHookData>, } pub trait CommandCallback { fn callback(&mut self, weechat: &Weechat, buffer: &Buffer, arguments: Args); } impl<T: FnMut(&Weechat, &Buffer, Args) + 'static> CommandCallback for T { fn callback(&mut self, weechat: &Weechat, buffer: &Buffer, arguments: Args) { self(weechat, buffer, arguments) } } #[derive(Default)] pub struct CommandSettings { name: String, description: String, arguments: Vec<String>, argument_descriptoin: String, completion: Vec<String>, } impl CommandSettings { pub fn new<P: Into<String>>(name: P) -> Self { CommandSettings { name: name.into(), ..Default::default() } } pub fn description<D: Into<String>>(mut self, descritpion: D) -> Self { self.description = descritpion.into(); self } pub fn add_argument<T: Into<String>>(mut self, argument: T) -> Self { self.arguments.push(argument.into()); self } pub fn arguments_description<T: Into<String>>(mut self, descritpion: T) -> Self { self.argument_descriptoin = descritpion.into(); self } pub fn add_completion<T: Into<String>>(mut self, completion: T) -> Self { self.completion.push(completion.into()); self } } struct CommandHookData { callback: Box<dyn CommandCallback>, weechat_ptr: *mut t_weechat_plugin, } pub struct CommandRun { _hook: Hook, _hook_data: Box<CommandRunHookData>, } pub trait CommandRunCallback { fn callback(&mut self, weechat: &Weechat, buffer: &Buffer, command: Cow<str>) -> ReturnCode; } impl<T: FnMut(&Weechat, &Buffer, Cow<str>) -> ReturnCode + 'static> CommandRunCallback for T { fn callback(&mut self, weechat: &Weechat, buffer: &Buffer, command: Cow<str>) -> ReturnCode { self(weechat, buffer, command) } } struct CommandRunHookData { callback: Box<dyn CommandRunCallback>, weechat_ptr: *mut t_weechat_plugin, } impl CommandRun { pub fn new(command: &str, callback: impl CommandRunCallback + 'static) -> Result<Self, ()> { unsafe extern "C" fn c_hook_cb( pointer: *const c_void, _data: *mut c_void, buffer: *mut t_gui_buffer, command: *const std::os::raw::c_char, ) -> c_int { let hook_data: &mut CommandRunHookData = { &mut *(pointer as *mut CommandRunHookData) }; let cb = &mut hook_data.callback; let weechat = Weechat::from_ptr(hook_data.weechat_ptr); let buffer = weechat.buffer_from_ptr(buffer); let command = CStr::from_ptr(command).to_string_lossy(); cb.callback(&weechat, &buffer, command) as isize as i32 } Weechat::check_thread(); let weechat = unsafe { Weechat::weechat() }; let data = Box::new(CommandRunHookData { callback: Box::new(callback), weechat_ptr: weechat.ptr, }); let data_ref = Box::leak(data); let hook_command_run = weechat.get().hook_command_run.unwrap(); let command = LossyCString::new(command); let hook_ptr = unsafe { hook_command_run( weechat.ptr, command.as_ptr(), Some(c_hook_cb), data_ref as *const _ as *const c_void, ptr::null_mut(), ) }; let hook_data = unsafe { Box::from_raw(data_ref) }; if hook_ptr.is_null() { Err(()) } else { let hook = Hook { ptr: hook_ptr, weechat_ptr: weechat.ptr, }; Ok(CommandRun { _hook: hook, _hook_data: hook_data, }) } } } impl Command { pub fn new( command_settings: CommandSettings, callback: impl CommandCallback + 'static, ) -> Result<Command, ()> { unsafe extern "C" fn c_hook_cb( pointer: *const c_void, _data: *mut c_void, buffer: *mut t_gui_buffer, argc: i32, argv: *mut *mut c_char, _argv_eol: *mut *mut c_char, ) -> c_int { let hook_data: &mut CommandHookData = { &mut *(pointer as *mut CommandHookData) }; let weechat = Weechat::from_ptr(hook_data.weechat_ptr); let buffer = weechat.buffer_from_ptr(buffer); let cb = &mut hook_data.callback; let args = Args::new(argc, argv); cb.callback(&weechat, &buffer, args); WEECHAT_RC_OK } Weechat::check_thread(); let weechat = unsafe { Weechat::weechat() }; let name = LossyCString::new(command_settings.name); let description = LossyCString::new(command_settings.description); let args = LossyCString::new(command_settings.arguments.join("||")); let args_description = LossyCString::new(command_settings.argument_descriptoin); let completion = LossyCString::new(command_settings.completion.join("||")); let data = Box::new(CommandHookData { callback: Box::new(callback), weechat_ptr: weechat.ptr, }); let data_ref = Box::leak(data); let hook_command = weechat.get().hook_command.unwrap(); let hook_ptr = unsafe { hook_command( weechat.ptr, name.as_ptr(), description.as_ptr(), args.as_ptr(), args_description.as_ptr(), completion.as_ptr(), Some(c_hook_cb), data_ref as *const _ as *const c_void, ptr::null_mut(), ) }; let hook_data = unsafe { Box::from_raw(data_ref) }; let hook = Hook { ptr: hook_ptr, weechat_ptr: weechat.ptr, }; if hook_ptr.is_null() { Err(()) } else { Ok(Command { _hook: hook, _hook_data: hook_data, }) } } }
use libc::{c_char, c_int}; use std::{borrow::Cow, ffi::CStr, os::raw::c_void, ptr}; use weechat_sys::{t_gui_buffer, t_weechat_plugin, WEECHAT_RC_OK}; use crate::{buffer::Buffer, Args, LossyCString, ReturnCode, Weechat}; use super::Hook; pub struct Command { _hook: Hook, _hook_data: Box<CommandHookData>, } pub trait CommandCallback { fn callback(&mut self, weechat: &Weechat, buffer: &Buffer, arguments: Args); } impl<T: FnMut(&Weechat, &Buffer, Args) + 'static> CommandCallback for T { fn callback(&mut self, weechat: &Weechat, buffer: &Buffer, arguments: Args) { self(weechat, buffer, arguments) } } #[derive(Default)] pub struct CommandSettings { name: String, description: String, arguments: Vec<String>, argument_descriptoin: String, completion: Vec<String>, } impl CommandSettings {
pub fn description<D: Into<String>>(mut self, descritpion: D) -> Self { self.description = descritpion.into(); self } pub fn add_argument<T: Into<String>>(mut self, argument: T) -> Self { self.arguments.push(argument.into()); self } pub fn arguments_description<T: Into<String>>(mut self, descritpion: T) -> Self { self.argument_descriptoin = descritpion.into(); self } pub fn add_completion<T: Into<String>>(mut self, completion: T) -> Self { self.completion.push(completion.into()); self } } struct CommandHookData { callback: Box<dyn CommandCallback>, weechat_ptr: *mut t_weechat_plugin, } pub struct CommandRun { _hook: Hook, _hook_data: Box<CommandRunHookData>, } pub trait CommandRunCallback { fn callback(&mut self, weechat: &Weechat, buffer: &Buffer, command: Cow<str>) -> ReturnCode; } impl<T: FnMut(&Weechat, &Buffer, Cow<str>) -> ReturnCode + 'static> CommandRunCallback for T { fn callback(&mut self, weechat: &Weechat, buffer: &Buffer, command: Cow<str>) -> ReturnCode { self(weechat, buffer, command) } } struct CommandRunHookData { callback: Box<dyn CommandRunCallback>, weechat_ptr: *mut t_weechat_plugin, } impl CommandRun { pub fn new(command: &str, callback: impl CommandRunCallback + 'static) -> Result<Self, ()> { unsafe extern "C" fn c_hook_cb( pointer: *const c_void, _data: *mut c_void, buffer: *mut t_gui_buffer, command: *const std::os::raw::c_char, ) -> c_int { let hook_data: &mut CommandRunHookData = { &mut *(pointer as *mut CommandRunHookData) }; let cb = &mut hook_data.callback; let weechat = Weechat::from_ptr(hook_data.weechat_ptr); let buffer = weechat.buffer_from_ptr(buffer); let command = CStr::from_ptr(command).to_string_lossy(); cb.callback(&weechat, &buffer, command) as isize as i32 } Weechat::check_thread(); let weechat = unsafe { Weechat::weechat() }; let data = Box::new(CommandRunHookData { callback: Box::new(callback), weechat_ptr: weechat.ptr, }); let data_ref = Box::leak(data); let hook_command_run = weechat.get().hook_command_run.unwrap(); let command = LossyCString::new(command); let hook_ptr = unsafe { hook_command_run( weechat.ptr, command.as_ptr(), Some(c_hook_cb), data_ref as *const _ as *const c_void, ptr::null_mut(), ) }; let hook_data = unsafe { Box::from_raw(data_ref) }; if hook_ptr.is_null() { Err(()) } else { let hook = Hook { ptr: hook_ptr, weechat_ptr: weechat.ptr, }; Ok(CommandRun { _hook: hook, _hook_data: hook_data, }) } } } impl Command { pub fn new( command_settings: CommandSettings, callback: impl CommandCallback + 'static, ) -> Result<Command, ()> { unsafe extern "C" fn c_hook_cb( pointer: *const c_void, _data: *mut c_void, buffer: *mut t_gui_buffer, argc: i32, argv: *mut *mut c_char, _argv_eol: *mut *mut c_char, ) -> c_int { let hook_data: &mut CommandHookData = { &mut *(pointer as *mut CommandHookData) }; let weechat = Weechat::from_ptr(hook_data.weechat_ptr); let buffer = weechat.buffer_from_ptr(buffer); let cb = &mut hook_data.callback; let args = Args::new(argc, argv); cb.callback(&weechat, &buffer, args); WEECHAT_RC_OK } Weechat::check_thread(); let weechat = unsafe { Weechat::weechat() }; let name = LossyCString::new(command_settings.name); let description = LossyCString::new(command_settings.description); let args = LossyCString::new(command_settings.arguments.join("||")); let args_description = LossyCString::new(command_settings.argument_descriptoin); let completion = LossyCString::new(command_settings.completion.join("||")); let data = Box::new(CommandHookData { callback: Box::new(callback), weechat_ptr: weechat.ptr, }); let data_ref = Box::leak(data); let hook_command = weechat.get().hook_command.unwrap(); let hook_ptr = unsafe { hook_command( weechat.ptr, name.as_ptr(), description.as_ptr(), args.as_ptr(), args_description.as_ptr(), completion.as_ptr(), Some(c_hook_cb), data_ref as *const _ as *const c_void, ptr::null_mut(), ) }; let hook_data = unsafe { Box::from_raw(data_ref) }; let hook = Hook { ptr: hook_ptr, weechat_ptr: weechat.ptr, }; if hook_ptr.is_null() { Err(()) } else { Ok(Command { _hook: hook, _hook_data: hook_data, }) } } }
pub fn new<P: Into<String>>(name: P) -> Self { CommandSettings { name: name.into(), ..Default::default() } }
function_block-full_function
[ { "content": "/// Trait for the completion callback.\n\n///\n\n/// A blanket implementation for pure `FnMut` functions exists, if data needs to\n\npub trait CompletionCallback {\n\n /// Callback that will be called if when a completion is requested.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `...
Rust
src/cli.rs
benmaddison/rfz
d4c24a28c2178db24858943a4a29a3cbf218b0c8
use std::convert::TryInto; use std::ffi::{OsStr, OsString}; use std::io::stdout; use std::path::PathBuf; use std::result; use std::str::FromStr; use clap::{crate_authors, crate_description, crate_name, crate_version}; use directories::ProjectDirs; use crate::cmd::{ArgProvider, CmdExec}; use crate::errors::{Error, Result}; pub trait DefaultsProvider { fn dir(&self) -> &OsStr; fn jobs(&self) -> &str; } pub struct Defaults { dir: OsString, jobs: String, } impl Defaults { pub fn get() -> Result<Self> { let dir = match ProjectDirs::from("", "", "rfz") { Some(dirs) => dirs.data_dir().as_os_str().to_owned(), None => { return Err(Error::UserDirectories( "Failed to infer user directory locations".to_string(), )) } }; let jobs = num_cpus::get().to_string(); Ok(Defaults { dir, jobs }) } } impl DefaultsProvider for Defaults { fn dir(&self) -> &OsStr { &self.dir } fn jobs(&self) -> &str { &self.jobs } } pub struct Cli<'a> { defaults: &'a dyn DefaultsProvider, args: clap::ArgMatches<'a>, } impl<'a> Cli<'a> { pub fn init(defaults: &'a dyn DefaultsProvider) -> Self { match Self::init_from(defaults, None) { Ok(cli) => cli, Err(e) => e.exit(), } } fn init_from( defaults: &'a dyn DefaultsProvider, argv: Option<Vec<&str>>, ) -> result::Result<Self, clap::Error> { let app = Cli::build_cli(defaults); let args = match argv { Some(argv) => app.get_matches_from_safe(argv), None => app.get_matches_safe(), }; Ok(Cli { defaults, args: args?, }) } fn build_cli(defaults: &'a dyn DefaultsProvider) -> clap::App { clap::app_from_crate!() .setting(clap::AppSettings::SubcommandRequired) .arg( clap::Arg::with_name("jobs") .short("j") .long("jobs") .takes_value(true) .global(true) .default_value(defaults.jobs()) .help("Number of concurrent jobs to run"), ) .arg( clap::Arg::with_name("dir") .short("d") .long("dir") .takes_value(true) .global(true) .default_value_os(defaults.dir()) .help("Directory containing IETF html docs"), ) .arg( clap::Arg::with_name("verbosity") .short("v") .multiple(true) .global(true) .help("Increase output verbosity"), ) .subcommand( clap::SubCommand::with_name("completions") .about("Print shell completion script") .arg( clap::Arg::with_name("shell") .required(true) .possible_values(&clap::Shell::variants()) .help("Shell for which to generate completion script"), ), ) .subcommand( clap::SubCommand::with_name("index") .about( "List the latest version of each document \ with associated metadata", ) .arg( clap::Arg::with_name("type") .short("t") .long("type") .takes_value(true) .multiple(true) .possible_values(&["draft", "rfc", "bcp", "std"]) .help("Limit output by document type"), ), ) .subcommand( clap::SubCommand::with_name("summary") .about("Print a summary of the metadata in <doc>") .arg( clap::Arg::with_name("doc") .required(true) .help("Path to the document"), ), ) .subcommand( clap::SubCommand::with_name("sync") .about("Syncronize the local document mirror") .arg( clap::Arg::with_name("remote") .short("r") .long("remote") .default_value("rsync.tools.ietf.org::tools.html") .help("Remote 'rsync' target to sync from"), ) .arg( clap::Arg::with_name("command") .long("command") .default_value("rsync") .help("Rsync command"), ), ) } pub fn run(&self) -> Result<()> { match self.args.subcommand() { ("completions", Some(sub_matches)) => { self.print_completions(sub_matches); Ok(()) } (subcommand, Some(sub_matches)) => { let args = CliArgs::from(sub_matches); let exec = CmdExec::init(subcommand, &args)?; exec.run() } _ => Err(Error::CliError("No sub-command was found".to_string())), } } fn print_completions(&self, sub_matches: &clap::ArgMatches) { let shell = clap::Shell::from_str(sub_matches.value_of("shell").unwrap()).unwrap(); let mut app = Cli::build_cli(self.defaults); let _stdout = stdout(); #[cfg(not(test))] let mut writer = _stdout.lock(); #[cfg(test)] let mut writer = std::io::sink(); app.gen_completions_to(crate_name!(), shell, &mut writer); } } struct CliArgs<'a>(&'a clap::ArgMatches<'a>); impl<'a> CliArgs<'a> { fn from(sub_matches: &'a clap::ArgMatches<'a>) -> Self { CliArgs(sub_matches) } } impl ArgProvider for CliArgs<'_> { fn jobs(&self) -> usize { usize::from_str(self.0.value_of("jobs").unwrap()).unwrap() } fn dir(&self) -> PathBuf { PathBuf::from(self.0.value_of("dir").unwrap()) } fn verbosity(&self) -> usize { match self.0.occurrences_of("verbosity").try_into() { Ok(n) => n, Err(_) => usize::MAX, } } fn path(&self) -> PathBuf { PathBuf::from(self.0.value_of("doc").unwrap()) } fn rsync_cmd(&self) -> &str { self.0.value_of("command").unwrap() } fn rsync_remote(&self) -> &str { self.0.value_of("remote").unwrap() } fn types(&self) -> Option<Vec<&str>> { match self.0.values_of("type") { Some(values) => Some(values.collect()), None => None, } } } #[cfg(test)] mod test { use super::*; use crate::test::resource_path; use std::str::FromStr; struct DummyDefaults; impl DefaultsProvider for DummyDefaults { fn jobs(&self) -> &str { "1" } fn dir(&self) -> &OsStr { OsStr::new("/home/foo/rfz") } } #[test] fn test_cli_defaults() -> Result<()> { let defaults = Defaults::get()?; assert!(usize::from_str(defaults.jobs()).unwrap() > 0); Ok(()) } #[test] fn test_empty_args() { let defaults = DummyDefaults {}; let argv = Some(vec!["rfz"]); match Cli::init_from(&defaults, argv) { Err(e) => assert_eq!(e.kind, clap::ErrorKind::MissingSubcommand), Ok(_) => panic!("Expected MissingSubcommand Error"), } } #[test] fn test_dummy_index() { let defaults = DummyDefaults {}; let argv = Some(vec!["rfz", "index"]); let cli = Cli::init_from(&defaults, argv).unwrap(); match cli.args.subcommand() { (subcommand, Some(args)) => { assert_eq!(subcommand, "index"); let cli_args = CliArgs::from(args); assert_eq!(cli_args.jobs(), 1); assert_eq!(cli_args.dir(), PathBuf::from("/home/foo/rfz")); assert_eq!(cli_args.types(), None); } _ => panic!("Cli parsing failed"), } } #[test] fn test_dummy_index_filtered() { let defaults = DummyDefaults {}; let argv = Some(vec!["rfz", "index", "--type", "rfc"]); let cli = Cli::init_from(&defaults, argv).unwrap(); match cli.args.subcommand() { (subcommand, Some(args)) => { assert_eq!(subcommand, "index"); let cli_args = CliArgs::from(args); assert_eq!(cli_args.jobs(), 1); assert_eq!(cli_args.dir(), PathBuf::from("/home/foo/rfz")); assert_eq!(cli_args.types(), Some(vec!["rfc"])); } _ => panic!("Cli parsing failed"), } } #[test] fn test_dummy_summary() { let defaults = DummyDefaults {}; let argv = Some(vec!["rfz", "summary", "/home/foo/rfz/bar.html"]); let cli = Cli::init_from(&defaults, argv).unwrap(); match cli.args.subcommand() { (subcommand, Some(args)) => { assert_eq!(subcommand, "summary"); let cli_args = CliArgs::from(args); assert_eq!(cli_args.path(), PathBuf::from("/home/foo/rfz/bar.html")); } _ => panic!("Cli parsing failed"), } } #[test] fn test_dummy_sync() { let defaults = DummyDefaults {}; let argv = Some(vec!["rfz", "sync", "-v"]); let cli = Cli::init_from(&defaults, argv).unwrap(); match cli.args.subcommand() { (subcommand, Some(args)) => { assert_eq!(subcommand, "sync"); let cli_args = CliArgs::from(args); assert_eq!(cli_args.rsync_cmd(), "rsync"); assert_eq!(cli_args.rsync_remote(), "rsync.tools.ietf.org::tools.html"); assert_eq!(cli_args.verbosity(), 1) } _ => panic!("Cli parsing failed"), } } #[test] fn test_exec_index() -> Result<()> { let defaults = Defaults::get()?; let dir = resource_path(""); let argv = Some(vec!["rfz", "index", "-d", dir.to_str().unwrap()]); let cli = Cli::init_from(&defaults, argv).unwrap(); cli.run() } #[test] fn test_exec_completions() -> Result<()> { let defaults = Defaults::get()?; let argv = Some(vec!["rfz", "completions", "bash"]); let cli = Cli::init_from(&defaults, argv).unwrap(); cli.run() } #[test] fn test_exec_unknown_shell() -> Result<()> { let defaults = Defaults::get()?; let argv = Some(vec!["rfz", "completions", "crash"]); match Cli::init_from(&defaults, argv) { Err(e) => assert_eq!(e.kind, clap::ErrorKind::InvalidValue), Ok(_) => panic!("Expected InvalidValue Error"), }; Ok(()) } }
use std::convert::TryInto; use std::ffi::{OsStr, OsString}; use std::io::stdout; use std::path::PathBuf; use std::result; use std::str::FromStr; use clap::{crate_authors, crate_description, crate_name, crate_version}; use directories::ProjectDirs; use crate::cmd::{ArgProvider, CmdExec}; use crate::errors::{Error, Result}; pub trait DefaultsProvider { fn dir(&self) -> &OsStr; fn jobs(&self) -> &str; } pub struct Defaults { dir: OsString, jobs: String, } impl Defaults { pub fn get() -> Result<Self> { let dir = match ProjectDirs::from("", "", "rfz") { Some(dirs) => dirs.data_dir().as_os_str().to_owned(), None => { return Err(Error::UserDirectories( "Failed to infer user directory locations".to_string(), )) } }; let jobs = num_cpus::get().to_string(); Ok(Defaults { dir, jobs }) } } impl DefaultsProvider for Defaults { fn dir(&self) -> &OsStr { &self.dir } fn jobs(&self) -> &str { &self.jobs } } pub struct Cli<'a> { defaults: &'a dyn DefaultsProvider, args: clap::ArgMatches<'a>, } impl<'a> Cli<'a> { pub fn init(defaults: &'a dyn DefaultsProvider) -> Self { match Self::init_from(defaults, None) { Ok(cli) => cli, Err(e) => e.exit(), } } fn init_from( defaults: &'a dyn DefaultsProvider, argv: Option<Vec<&str>>, ) -> result::Result<Self, clap::Error> { let app = Cli::build_cli(defaults); let args = match argv { Some(argv) => app.get_matches_from_safe(argv), None => app.get_matches_safe(), }; Ok(Cli { defaults, args: args?, }) } fn build_cli(defaults: &'a dyn DefaultsProvider) -> clap::App { clap::app_from_crate!() .setting(clap::AppSettings::SubcommandRequired) .arg( clap::Arg::with_name("jobs") .short("j") .long("jobs") .takes_value(true) .global(true) .default_value(defaults.jobs()) .help("Number of concurrent jobs to run"), ) .arg( clap::Arg::with_name("dir") .short("d") .long("dir") .takes_value(true) .global(true) .default_value_os(defaults.dir()) .help("Directory containing IETF html docs"), ) .arg( clap::Arg::with_name("verbosity") .short("v") .multiple(true) .global(true) .help("Increase output verbosity"), ) .subcommand( clap::SubCommand::with_name("completions") .about("Print shell completion script") .arg( clap::Arg::with_name("shell") .required(true) .possible_values(&clap::Shell::variants()) .help("Shell for which to generate completion script"), ), ) .subcommand( clap::SubCommand::with_name("index") .about( "List the latest version of each document \ with associated metadata", ) .arg( clap::Arg::with_name("type") .short("t") .long("type") .takes_value(true) .multiple(true) .possible_values(&["draft", "rfc", "bcp", "std"]) .help("Limit output by document type"), ), ) .subcommand( clap::SubCommand::with_name("summary") .about("Print a summary of the metadata in <doc>") .arg( clap::Arg::with_name("doc") .required(true) .help("Path to the document"), ), ) .subcommand( clap::SubCommand::with_name("sync") .about("Syncronize the local document mirror") .arg( clap::Arg::with_name("remote") .short("r") .long("remote") .default_value("rsync.tools.ietf.org::tools.html") .help("Remote 'rsync' target to sync from"), ) .arg( clap::Arg::with_name("command") .long("command") .default_value("rsync") .help("Rsync command"), ), ) } pub fn run(&self) -> Result<()> { match self.args.subcommand() { ("completions", Some(sub_matches)) => { self.print_completions(sub_matches); Ok(()) } (subcommand, Some(sub_matches)) => { let args = CliArgs::from(sub_matches); let exec = CmdExec::init(subcommand, &args)?; exec.run() } _ => Err(Error::CliError("No sub-command was found".to_string())), } } fn print_completions(&self, sub_matches: &clap::ArgMatches) { let shell = clap::Shell::from_str(sub_matches.value_of("shell").unwrap()).unwrap(); let mut app = Cli::build_cli(self.defaults); let _stdout = stdout(); #[cfg(not(test))] let mut writer = _stdout.lock(); #[cfg(test)] let mut writer = std::io::sink(); app.gen_completions_to(crate_name!(), shell, &mut writer); } } struct CliArgs<'a>(&'a clap::ArgMatches<'a>); impl<'a> CliArgs<'a> { fn from(sub_matches: &'a clap::ArgMatches<'a>) -> Self { CliArgs(sub_matches) } } impl ArgProvider for CliArgs<'_> { fn jobs(&self) -> usize { usize::from_str(self.0.value_of("jobs").unwrap()).unwrap() } fn dir(&self) -> PathBuf { PathBuf::from(self.0.value_of("dir").unwrap()) } fn verbosity(&self) -> usize { match self.0.occurrences_of("verbosity").try_into() { Ok(n) => n, Err(_) => usize::MAX, } } fn path(&self) -> PathBuf { PathBuf::from(self.0.value_of("doc").unwrap()) } fn rsync_cmd(&self) -> &str { self.0.value_of("command").unwrap() } fn rsync_remote(&self) -> &str { self.0.value_of("remote").unwrap() } fn types(&self) -> Option<Vec<&str>> { match self.0.values_of("type") { Some(values) => Some(values.collect()), None => None, } } } #[cfg(test)] mod test { use super::*; use crate::test::resource_path; use std::str::FromStr; struct DummyDefaults; impl DefaultsProvider for DummyDefaults { fn jobs(&self) -> &str { "1" } fn dir(&self) -> &OsStr { OsStr::new("/home/foo/rfz") } } #[test] fn test_cli_defaults() -> Result<()> { let defaults = Defaults::get()?; assert!(usize::from_str(defaults.jobs()).unwrap() > 0); Ok(()) } #[test] fn test_empty_args() { let defaults = DummyDefaults {}; let argv = Some(vec!["rfz"]); match Cli::init_from(&defaults, argv) { Err(e) => assert_eq!(e.kind, clap::ErrorKind::MissingSubcommand), Ok(_) => panic!("Expected MissingSubcommand Error"), } } #[test] fn test_dummy_index() { let defaults = DummyDefaults {}; let argv = Some(vec!["rfz", "index"]); let cli = Cli::init_from(&defaults, argv).unwrap(); match cli.args.subcommand() { (subcommand, Some(args)) => { assert_eq!(subcommand, "index"); let cli_args = CliArgs::from(args); assert_eq!(cli_args.jobs(), 1); assert_eq!(cli_args.dir(), PathBuf::from("/home/foo/rfz")); assert_eq!(cli_args.types(), None); } _ => panic!("Cli parsing failed"), } } #[test] fn test_dummy_index_filtered() { let defaults = DummyDefaults {}; let argv = Some(vec!["rfz", "index", "--type", "rfc"]); let cli = Cli::init_from(&defaults, argv).unwrap(); match cli.args.subcommand() { (subcommand, Some(args)) => { assert_eq!(subcommand, "index"); let cli_args = CliArgs::from(args); assert_eq!(cli_args.jobs(), 1); assert_eq!(cli_args.dir(), PathBuf::from("/home/foo/rfz")); assert_eq!(cli_args.types(), Some(vec!["rfc"])); } _ => panic!("Cli parsing failed"), } } #[test] fn test_dummy_summary() { let defaults = DummyDefaults {}; let argv = Some(vec!["rfz", "summary", "/home/foo/rfz/bar.html"]); let cli = Cli::init_from(&defaults, argv).unwrap();
} #[test] fn test_dummy_sync() { let defaults = DummyDefaults {}; let argv = Some(vec!["rfz", "sync", "-v"]); let cli = Cli::init_from(&defaults, argv).unwrap(); match cli.args.subcommand() { (subcommand, Some(args)) => { assert_eq!(subcommand, "sync"); let cli_args = CliArgs::from(args); assert_eq!(cli_args.rsync_cmd(), "rsync"); assert_eq!(cli_args.rsync_remote(), "rsync.tools.ietf.org::tools.html"); assert_eq!(cli_args.verbosity(), 1) } _ => panic!("Cli parsing failed"), } } #[test] fn test_exec_index() -> Result<()> { let defaults = Defaults::get()?; let dir = resource_path(""); let argv = Some(vec!["rfz", "index", "-d", dir.to_str().unwrap()]); let cli = Cli::init_from(&defaults, argv).unwrap(); cli.run() } #[test] fn test_exec_completions() -> Result<()> { let defaults = Defaults::get()?; let argv = Some(vec!["rfz", "completions", "bash"]); let cli = Cli::init_from(&defaults, argv).unwrap(); cli.run() } #[test] fn test_exec_unknown_shell() -> Result<()> { let defaults = Defaults::get()?; let argv = Some(vec!["rfz", "completions", "crash"]); match Cli::init_from(&defaults, argv) { Err(e) => assert_eq!(e.kind, clap::ErrorKind::InvalidValue), Ok(_) => panic!("Expected InvalidValue Error"), }; Ok(()) } }
match cli.args.subcommand() { (subcommand, Some(args)) => { assert_eq!(subcommand, "summary"); let cli_args = CliArgs::from(args); assert_eq!(cli_args.path(), PathBuf::from("/home/foo/rfz/bar.html")); } _ => panic!("Cli parsing failed"), }
if_condition
[ { "content": "fn index(args: &dyn ArgProvider) -> Result<()> {\n\n let collection = match Collection::from_dir(args.dir()) {\n\n Ok(set) => set,\n\n Err(e) => return Err(e),\n\n };\n\n let _stdout = stdout();\n\n #[cfg(not(test))]\n\n let mut writer = _stdout.lock();\n\n #[cfg(te...
Rust
src/protocol/argument.rs
dylanmckay/flep
c020400f4ead85c6261dbe29bded876aad83af97
use {Error, ErrorKind}; use std::io::prelude::*; use std::ascii::AsciiExt; use std::io; pub trait Argument : Sized { fn read_with_space(read: &mut BufRead) -> Result<Self, Error> { let mut buf: [u8; 1] = [0]; assert_eq!(read.read(&mut buf)?, 1, "unexpected EOF while checking for space"); assert_eq!(buf[0] as char, ' ', "expected space preceding argument"); Self::read(read) } fn read(read: &mut BufRead) -> Result<Self, Error>; fn write(&self, write: &mut Write) -> Result<(), Error>; fn parse_text(text: &str) -> Self { let mut buffer = io::Cursor::new(text); Self::read_with_space(&mut buffer).unwrap() } fn bytes(&self) -> Vec<u8> { let mut buffer = io::Cursor::new(Vec::new()); self.write(&mut buffer).unwrap(); buffer.into_inner() } fn to_string(&self) -> String { String::from_utf8(self.bytes()).unwrap() } } impl Argument for String { fn read(read: &mut BufRead) -> Result<Self, Error> { let bytes: Result<Vec<u8>, _> = read.bytes().collect(); let bytes = bytes?; match String::from_utf8(bytes) { Ok(s) => Ok(s), Err(..) => Err(ErrorKind::InvalidArgument("argument is not valid UTF-8".to_owned()).into()), } } fn write(&self, write: &mut Write) -> Result<(), Error> { for c in self.chars() { assert!(c.is_ascii(), "only ASCII is supported in FTP"); } write!(write, "{}", self)?; Ok(()) } } macro_rules! impl_argument_integer { ($ty:ty) => { impl Argument for $ty { fn read(read: &mut BufRead) -> Result<Self, Error> { let s = String::read(read)?; match s.parse() { Ok(i) => Ok(i), Err(..) => Err(ErrorKind::InvalidArgument("argument is not an integer".to_owned()).into()), } } fn write(&self, write: &mut Write) -> Result<(), Error> { write!(write, "{}", self)?; Ok(()) } } } } impl_argument_integer!(u8); impl_argument_integer!(i8); impl_argument_integer!(u16); impl_argument_integer!(i16); impl_argument_integer!(u32); impl_argument_integer!(i32); impl_argument_integer!(u64); impl_argument_integer!(i64); impl<T: Argument> Argument for Option<T> { fn read_with_space(read: &mut BufRead) -> Result<Self, Error> { let mut buf: [u8; 1] = [0]; if read.read(&mut buf)? == 1 { let inner = T::read(read)?; Ok(Some(inner)) } else { Ok(None) } } fn read(_read: &mut BufRead) -> Result<Self, Error> { unreachable!(); } fn write(&self, write: &mut Write) -> Result<(), Error> { if let Some(ref thing) = *self { write!(write, " ")?; thing.write(write)?; } Ok(()) } } #[cfg(test)] mod test { pub use super::*; mod optional { use std::io; pub use super::*; fn parse<T: Argument>(text: &str) -> Option<T> { let mut buf = io::Cursor::new(text); Argument::read_with_space(&mut buf).unwrap() } #[test] fn correctly_reads_a_present_value() { let value: Option<String> = parse(" foo"); assert_eq!(value, Some("foo".to_owned())); } #[test] fn correctly_reads_a_missing_value() { let value: Option<String> = parse(""); assert_eq!(value, None); } #[test] fn correctly_writes_a_present_value() { assert_eq!(Some("foo".to_owned()).to_string(), " foo"); } #[test] fn correctly_writes_an_empty_value() { let value: Option<String> = None; assert_eq!(value.to_string(), ""); } } }
use {Error, ErrorKind}; use std::io::prelude::*; use std::ascii::AsciiExt; use std::io; pub trait Argument : Sized { fn read_with_space(read: &mut BufRead) -> Result<Self, Error> { let mut buf: [u8; 1] = [0]; assert_eq!(read.read(&mut buf)?, 1, "unexpected EOF while checking for space"); assert_eq!(buf[0] as char, ' ', "expected space preceding argument"); Self::read(read) } fn read(read: &mut BufRead) -> Result<Self, Error>; fn write(&self, write: &mut Write) -> Result<(), Error>; fn parse_text(text: &str) -> Self { let mut buffer = io::Cursor::new(text); Self::read_with_space(&mut buffer).unwrap() } fn bytes(&self) -> Vec<u8> { let mut buffer = io::Cursor::new(Vec::new()); self.write(&mut buffer).unwrap(); buffer.into_inner() } fn to_string(&self) -> String { String::from_utf8(self.bytes()).unwrap() } } impl Argument for String { fn read(read: &mut BufRead) -> Result<Self, Error> { let bytes: Result<Vec<u8>, _> = read.bytes().collect(); let bytes = bytes?;
} fn write(&self, write: &mut Write) -> Result<(), Error> { for c in self.chars() { assert!(c.is_ascii(), "only ASCII is supported in FTP"); } write!(write, "{}", self)?; Ok(()) } } macro_rules! impl_argument_integer { ($ty:ty) => { impl Argument for $ty { fn read(read: &mut BufRead) -> Result<Self, Error> { let s = String::read(read)?; match s.parse() { Ok(i) => Ok(i), Err(..) => Err(ErrorKind::InvalidArgument("argument is not an integer".to_owned()).into()), } } fn write(&self, write: &mut Write) -> Result<(), Error> { write!(write, "{}", self)?; Ok(()) } } } } impl_argument_integer!(u8); impl_argument_integer!(i8); impl_argument_integer!(u16); impl_argument_integer!(i16); impl_argument_integer!(u32); impl_argument_integer!(i32); impl_argument_integer!(u64); impl_argument_integer!(i64); impl<T: Argument> Argument for Option<T> { fn read_with_space(read: &mut BufRead) -> Result<Self, Error> { let mut buf: [u8; 1] = [0]; if read.read(&mut buf)? == 1 { let inner = T::read(read)?; Ok(Some(inner)) } else { Ok(None) } } fn read(_read: &mut BufRead) -> Result<Self, Error> { unreachable!(); } fn write(&self, write: &mut Write) -> Result<(), Error> { if let Some(ref thing) = *self { write!(write, " ")?; thing.write(write)?; } Ok(()) } } #[cfg(test)] mod test { pub use super::*; mod optional { use std::io; pub use super::*; fn parse<T: Argument>(text: &str) -> Option<T> { let mut buf = io::Cursor::new(text); Argument::read_with_space(&mut buf).unwrap() } #[test] fn correctly_reads_a_present_value() { let value: Option<String> = parse(" foo"); assert_eq!(value, Some("foo".to_owned())); } #[test] fn correctly_reads_a_missing_value() { let value: Option<String> = parse(""); assert_eq!(value, None); } #[test] fn correctly_writes_a_present_value() { assert_eq!(Some("foo".to_owned()).to_string(), " foo"); } #[test] fn correctly_writes_an_empty_value() { let value: Option<String> = None; assert_eq!(value.to_string(), ""); } } }
match String::from_utf8(bytes) { Ok(s) => Ok(s), Err(..) => Err(ErrorKind::InvalidArgument("argument is not valid UTF-8".to_owned()).into()), }
if_condition
[ { "content": "/// Runs a FTP server on a given address.\n\n///\n\n/// Sets up an FTP server locally and begins to wait for clients\n\n/// to connect.\n\npub fn run<F,A>(server: &mut F, address: A) -> Result<(), Error>\n\n where F: Server,\n\n A: ToSocketAddrs {\n\n let mut addresses = address.to_...
Rust
wincolor/src/winapi_inline.rs
crlf0710/termcolor
1059a1e540ac8ab7a2c99508e1b304fcce192705
#![allow(bad_style, overflowing_literals, unused_macros, unused_imports, dead_code)] #[macro_use] pub mod macros { macro_rules! STRUCT { (#[debug] $($rest:tt)*) => ( STRUCT!{#[cfg_attr(feature = "impl-debug", derive(Debug))] $($rest)*} ); ($(#[$attrs:meta])* struct $name:ident { $($field:ident: $ftype:ty,)+ }) => ( #[repr(C)] #[derive(Copy)] $(#[$attrs])* pub struct $name { $(pub $field: $ftype,)+ } impl Clone for $name { #[inline] fn clone(&self) -> $name { *self } } #[cfg(feature = "impl-default")] impl Default for $name { #[inline] fn default() -> $name { unsafe { $crate::_core::mem::zeroed() } } } ); } } pub mod shared { pub mod minwindef { use self::winapi::ctypes::{ c_char, c_float, c_int, c_long, c_uchar, c_uint, c_ulong, c_ushort, c_void, }; use crate::winapi_inline as winapi; pub type DWORD = c_ulong; pub type BOOL = c_int; pub type WORD = c_ushort; pub type LPDWORD = *mut DWORD; STRUCT! {#[debug] struct FILETIME { dwLowDateTime: DWORD, dwHighDateTime: DWORD, }} } pub mod winerror { use self::winapi::shared::minwindef::DWORD; use crate::winapi_inline as winapi; pub const NO_ERROR: DWORD = 0; } } pub mod um { pub mod wincon { use self::winapi::shared::minwindef::{BOOL, DWORD, WORD}; pub use self::winapi::um::wincontypes::{COORD, PCOORD, PSMALL_RECT, SMALL_RECT}; use self::winapi::um::winnt::HANDLE; use crate::winapi_inline as winapi; pub const FOREGROUND_BLUE: WORD = 0x0001; pub const FOREGROUND_GREEN: WORD = 0x0002; pub const FOREGROUND_RED: WORD = 0x0004; pub const FOREGROUND_INTENSITY: WORD = 0x0008; pub const BACKGROUND_BLUE: WORD = 0x0010; pub const BACKGROUND_GREEN: WORD = 0x0020; pub const BACKGROUND_RED: WORD = 0x0040; pub const BACKGROUND_INTENSITY: WORD = 0x0080; STRUCT! {struct CONSOLE_SCREEN_BUFFER_INFO { dwSize: COORD, dwCursorPosition: COORD, wAttributes: WORD, srWindow: SMALL_RECT, dwMaximumWindowSize: COORD, }} pub type PCONSOLE_SCREEN_BUFFER_INFO = *mut CONSOLE_SCREEN_BUFFER_INFO; pub const ENABLE_PROCESSED_INPUT: DWORD = 0x0001; pub const ENABLE_LINE_INPUT: DWORD = 0x0002; pub const ENABLE_ECHO_INPUT: DWORD = 0x0004; pub const ENABLE_WINDOW_INPUT: DWORD = 0x0008; pub const ENABLE_MOUSE_INPUT: DWORD = 0x0010; pub const ENABLE_INSERT_MODE: DWORD = 0x0020; pub const ENABLE_QUICK_EDIT_MODE: DWORD = 0x0040; pub const ENABLE_EXTENDED_FLAGS: DWORD = 0x0080; pub const ENABLE_AUTO_POSITION: DWORD = 0x0100; pub const ENABLE_VIRTUAL_TERMINAL_INPUT: DWORD = 0x0200; pub const ENABLE_PROCESSED_OUTPUT: DWORD = 0x0001; pub const ENABLE_WRAP_AT_EOL_OUTPUT: DWORD = 0x0002; pub const ENABLE_VIRTUAL_TERMINAL_PROCESSING: DWORD = 0x0004; pub const DISABLE_NEWLINE_AUTO_RETURN: DWORD = 0x0008; pub const ENABLE_LVB_GRID_WORLDWIDE: DWORD = 0x0010; extern "system" { pub fn GetConsoleScreenBufferInfo( hConsoleOutput: HANDLE, lpConsoleScreenBufferInfo: PCONSOLE_SCREEN_BUFFER_INFO, ) -> BOOL; pub fn SetConsoleTextAttribute(hConsoleOutput: HANDLE, wAttributes: WORD) -> BOOL; } } pub mod wincontypes { use self::winapi::um::winnt::SHORT; use crate::winapi_inline as winapi; STRUCT! {struct COORD { X: SHORT, Y: SHORT, }} pub type PCOORD = *mut COORD; STRUCT! {struct SMALL_RECT { Left: SHORT, Top: SHORT, Right: SHORT, Bottom: SHORT, }} pub type PSMALL_RECT = *mut SMALL_RECT; } pub mod consoleapi { use self::winapi::shared::minwindef::{BOOL, DWORD, LPDWORD}; use self::winapi::um::winnt::HANDLE; use crate::winapi_inline as winapi; extern "system" { pub fn GetConsoleMode(hConsoleHandle: HANDLE, lpMode: LPDWORD) -> BOOL; pub fn SetConsoleMode(hConsoleHandle: HANDLE, dwMode: DWORD) -> BOOL; } } pub mod errhandlingapi { use self::winapi::shared::minwindef::DWORD; use crate::winapi_inline as winapi; extern "system" { pub fn GetLastError() -> DWORD; } } pub mod fileapi { use self::winapi::shared::minwindef::{BOOL, DWORD, FILETIME}; use self::winapi::um::winnt::HANDLE; use crate::winapi_inline as winapi; STRUCT! {struct BY_HANDLE_FILE_INFORMATION { dwFileAttributes: DWORD, ftCreationTime: FILETIME, ftLastAccessTime: FILETIME, ftLastWriteTime: FILETIME, dwVolumeSerialNumber: DWORD, nFileSizeHigh: DWORD, nFileSizeLow: DWORD, nNumberOfLinks: DWORD, nFileIndexHigh: DWORD, nFileIndexLow: DWORD, }} pub type PBY_HANDLE_FILE_INFORMATION = *mut BY_HANDLE_FILE_INFORMATION; pub type LPBY_HANDLE_FILE_INFORMATION = *mut BY_HANDLE_FILE_INFORMATION; extern "system" { pub fn GetFileInformationByHandle( hFile: HANDLE, lpFileInformation: LPBY_HANDLE_FILE_INFORMATION, ) -> BOOL; pub fn GetFileType(hFile: HANDLE) -> DWORD; } } pub mod winnt { use self::winapi::ctypes::{ __int64, __uint64, c_char, c_int, c_long, c_short, c_uint, c_ulong, c_void, wchar_t, }; use self::winapi::shared::minwindef::DWORD; use crate::winapi_inline as winapi; pub type SHORT = c_short; pub type HANDLE = *mut c_void; pub const FILE_ATTRIBUTE_READONLY: DWORD = 0x00000001; pub const FILE_ATTRIBUTE_HIDDEN: DWORD = 0x00000002; pub const FILE_ATTRIBUTE_SYSTEM: DWORD = 0x00000004; pub const FILE_ATTRIBUTE_DIRECTORY: DWORD = 0x00000010; pub const FILE_ATTRIBUTE_ARCHIVE: DWORD = 0x00000020; pub const FILE_ATTRIBUTE_DEVICE: DWORD = 0x00000040; pub const FILE_ATTRIBUTE_NORMAL: DWORD = 0x00000080; pub const FILE_ATTRIBUTE_TEMPORARY: DWORD = 0x00000100; pub const FILE_ATTRIBUTE_SPARSE_FILE: DWORD = 0x00000200; pub const FILE_ATTRIBUTE_REPARSE_POINT: DWORD = 0x00000400; pub const FILE_ATTRIBUTE_COMPRESSED: DWORD = 0x00000800; pub const FILE_ATTRIBUTE_OFFLINE: DWORD = 0x00001000; pub const FILE_ATTRIBUTE_NOT_CONTENT_INDEXED: DWORD = 0x00002000; pub const FILE_ATTRIBUTE_ENCRYPTED: DWORD = 0x00004000; pub const FILE_ATTRIBUTE_INTEGRITY_STREAM: DWORD = 0x00008000; pub const FILE_ATTRIBUTE_VIRTUAL: DWORD = 0x00010000; pub const FILE_ATTRIBUTE_NO_SCRUB_DATA: DWORD = 0x00020000; pub const FILE_ATTRIBUTE_EA: DWORD = 0x00040000; pub const FILE_ATTRIBUTE_PINNED: DWORD = 0x00080000; pub const FILE_ATTRIBUTE_UNPINNED: DWORD = 0x00100000; pub const FILE_ATTRIBUTE_RECALL_ON_OPEN: DWORD = 0x00040000; pub const FILE_ATTRIBUTE_RECALL_ON_DATA_ACCESS: DWORD = 0x00400000; } pub mod winbase { use self::winapi::shared::minwindef::DWORD; use crate::winapi_inline as winapi; pub const FILE_FLAG_WRITE_THROUGH: DWORD = 0x80000000; pub const FILE_FLAG_OVERLAPPED: DWORD = 0x40000000; pub const FILE_FLAG_NO_BUFFERING: DWORD = 0x20000000; pub const FILE_FLAG_RANDOM_ACCESS: DWORD = 0x10000000; pub const FILE_FLAG_SEQUENTIAL_SCAN: DWORD = 0x08000000; pub const FILE_FLAG_DELETE_ON_CLOSE: DWORD = 0x04000000; pub const FILE_FLAG_BACKUP_SEMANTICS: DWORD = 0x02000000; pub const FILE_FLAG_POSIX_SEMANTICS: DWORD = 0x01000000; pub const FILE_FLAG_SESSION_AWARE: DWORD = 0x00800000; pub const FILE_FLAG_OPEN_REPARSE_POINT: DWORD = 0x00200000; pub const FILE_FLAG_OPEN_NO_RECALL: DWORD = 0x00100000; pub const FILE_FLAG_FIRST_PIPE_INSTANCE: DWORD = 0x00080000; pub const FILE_FLAG_OPEN_REQUIRING_OPLOCK: DWORD = 0x00040000; pub const FILE_TYPE_UNKNOWN: DWORD = 0x0000; pub const FILE_TYPE_DISK: DWORD = 0x0001; pub const FILE_TYPE_CHAR: DWORD = 0x0002; pub const FILE_TYPE_PIPE: DWORD = 0x0003; pub const FILE_TYPE_REMOTE: DWORD = 0x8000; } } pub mod ctypes { pub use std::os::raw::c_void; pub type c_char = i8; pub type c_schar = i8; pub type c_uchar = u8; pub type c_short = i16; pub type c_ushort = u16; pub type c_int = i32; pub type c_uint = u32; pub type c_long = i32; pub type c_ulong = u32; pub type c_longlong = i64; pub type c_ulonglong = u64; pub type c_float = f32; pub type c_double = f64; pub type __int8 = i8; pub type __uint8 = u8; pub type __int16 = i16; pub type __uint16 = u16; pub type __int32 = i32; pub type __uint32 = u32; pub type __int64 = i64; pub type __uint64 = u64; pub type wchar_t = u16; }
#![allow(bad_style, overflowing_literals, unused_macros, unused_imports, dead_code)] #[macro_use] pub mod macros { macro_rules! STRUCT { (#[debug] $($rest:tt)*) => ( STRUCT!{#[cfg_attr(feature = "impl-debug", derive(Debug))] $($rest)*} ); ($(#[$attrs:meta])* struct $name:ident { $($field:ident: $ftype:ty,)+ }) => ( #[repr(C)] #[derive(Copy)] $(#[$attrs])* pub struct $name { $(pub $field: $ftype,)+ } impl Clone for $name { #[inline] fn clone(&self) -> $name { *self } } #[cfg(feature = "impl-default")] impl Default for $name { #[inline] fn default() -> $name { unsafe { $crate::_core::mem::zeroed() } } } ); } } pub mod shared { pub mod minwindef { use self::winapi::ctypes::{ c_char, c_float, c_int, c_long, c_uchar, c_uint, c_ulong, c_ushort, c_void, }; use crate::winapi_inline as winapi; pub type DWORD = c_ulong; pub type BOOL = c_int; pub type WORD = c_ushort; pub type LPDWORD = *mut DWORD; STRUCT! {#[debug] struct FILETIME { dwLowDateTime: DWORD, dwHighDateTime: DWORD, }} } pub mod winerror { use self::winapi::shared::minwindef::DWORD; use crate::winapi_inline as winapi; pub const NO_ERROR: DWORD = 0; } } pub mod um { pub mod wincon { use self::winapi::shared::minwindef::{BOOL, DWORD, WORD}; pub use self::winapi::um::wincontypes::{COORD, PCOORD, PSMALL_RECT, SMALL_RECT}; use self::winapi::um::winnt::HANDLE; use crate::winapi_inline as winapi; pub const FOREGROUND_BLUE: WORD = 0x0001; pub const FOREGROUND_GREEN: WORD = 0x0002; pub const FOREGROUND_RED: WORD = 0x0004; pub const FOREGROUND_INTENSITY: WORD = 0x0008; pub const BACKGROUND_BLUE: WORD = 0x0010; pub const BACKGROUND_GREEN: WORD = 0x0020; pub const BACKGROUND_RED: WORD = 0x0040; pub const BACKGROUND_INTENSITY: WORD = 0x0080; STRUCT! {struct CONSOLE_SCREEN_BUFFER_INFO { dwSize: COORD, dwCursorPosition: COORD, wAttributes: WORD, srWindow: SMALL_RECT, dwMaximumWindowSize: COORD, }} pub type PCONSOLE_SCREEN_BUFFER_INFO = *mut CONSOLE_SCREEN_BUFFER_INFO; pub const ENABLE_PROCESSED_INPUT: DWORD = 0x0001; pub const ENABLE_LINE_INPUT: DWORD = 0x0002; pub const ENABLE_ECHO_INPUT: DWORD = 0x0004; pub const ENABLE_WINDOW_INPUT: DWORD = 0x0008; pub const ENABLE_MOUSE_INPUT: DWORD = 0x0010; pub const ENABLE_INSERT_MODE: DWORD = 0x0020; pub const ENABLE_QUICK_EDIT_MODE: DWORD = 0x0040; pub const ENABLE_EXTENDED_FLAGS: DWORD = 0x0080; pub const ENABLE_AUTO_POSITION: DWORD = 0x0100; pub const ENABLE_VIRTUAL_TERMINAL_INPUT: DWORD = 0x0200; pub const ENABLE_PROCESSED_OUTPUT: DWORD = 0x0001; pub const ENABLE_WRAP_AT_EOL_OUTPUT: DWORD = 0x0002; pub const ENABLE_VIRTUAL_TERMINAL_PROCESSING: DWORD = 0x0004; pub const DISABLE_NEWLINE_AUTO_RETURN: DWORD = 0x0008; pub const ENABLE_LVB_GRID_WORLDWIDE: DWORD = 0x0010; extern "system" { pub fn GetConsoleScreenBufferInfo( hConsoleOutput: HANDLE, lpConsoleScreenBufferInfo: PCONSOLE_SCREEN_BUFFER_INFO, ) -> BOOL; pub fn SetConsoleTextAttribute(hConsoleOutput: HANDLE, wAttributes: WORD) -> BOOL; } } pub mod wincontypes { use self::winapi::um::winnt::SHORT; use crate::winapi_inline as winapi; STRUCT! {struct COORD { X: SHORT, Y: SHORT, }} pub type PCOORD = *mut COORD; STRUCT! {struct SMALL_RECT { Left: SHORT, Top: SHORT, Right: SHORT, Bottom: SHORT, }} pub type PSMALL_RECT = *mut SMALL_RECT; } pub mod consoleapi { use self::winapi::shared::minwindef::{BOOL, DWORD, LPDWORD}; use self::winapi::um::winnt::HANDLE; use crate::winapi_inline as winapi; extern "system" { pub fn GetConsoleMode(hConsoleHandle: HANDLE, lpMode: LPDWORD) -> BOOL; pub fn SetConsoleMode(hConsoleHandle: HANDLE, dwMode: DWORD) -> BOOL; } } pub mod errhandlingapi { use self::winapi::shared::minwindef::DWORD; use crate::winapi_inline as winapi; extern "system" { pub fn GetLastError() -> DWORD; } } pub mod fileapi { use self::winapi::shared::minwindef::{BOOL, DWORD, FILETIME}; use self::winapi::um::winnt::HANDLE; use crate::winapi_inline as winapi; STRUCT! {struct BY_HANDLE_FILE_INFORMATION { dwFileAttributes: DWORD, ftCreationTime: FILETIME, ftLastAccessTime: FILETIME, ftLastWriteTime: FILETIME, dwVolumeSerialNumber: DWORD, nFileSizeHigh: DWORD, nFileSizeLow: DWORD, nNumberOfLinks: DWORD, nFileIndexHigh: DWORD, nFileIndexLow: DWORD, }} pub type PBY_HANDLE_FILE_INFORMATION = *mut BY_HANDLE_FILE_INFORMATION; pub type LPBY_HANDLE_FILE_INFORMATION = *mut BY_HANDLE_FILE_INFORMA
0x00020000; pub const FILE_ATTRIBUTE_EA: DWORD = 0x00040000; pub const FILE_ATTRIBUTE_PINNED: DWORD = 0x00080000; pub const FILE_ATTRIBUTE_UNPINNED: DWORD = 0x00100000; pub const FILE_ATTRIBUTE_RECALL_ON_OPEN: DWORD = 0x00040000; pub const FILE_ATTRIBUTE_RECALL_ON_DATA_ACCESS: DWORD = 0x00400000; } pub mod winbase { use self::winapi::shared::minwindef::DWORD; use crate::winapi_inline as winapi; pub const FILE_FLAG_WRITE_THROUGH: DWORD = 0x80000000; pub const FILE_FLAG_OVERLAPPED: DWORD = 0x40000000; pub const FILE_FLAG_NO_BUFFERING: DWORD = 0x20000000; pub const FILE_FLAG_RANDOM_ACCESS: DWORD = 0x10000000; pub const FILE_FLAG_SEQUENTIAL_SCAN: DWORD = 0x08000000; pub const FILE_FLAG_DELETE_ON_CLOSE: DWORD = 0x04000000; pub const FILE_FLAG_BACKUP_SEMANTICS: DWORD = 0x02000000; pub const FILE_FLAG_POSIX_SEMANTICS: DWORD = 0x01000000; pub const FILE_FLAG_SESSION_AWARE: DWORD = 0x00800000; pub const FILE_FLAG_OPEN_REPARSE_POINT: DWORD = 0x00200000; pub const FILE_FLAG_OPEN_NO_RECALL: DWORD = 0x00100000; pub const FILE_FLAG_FIRST_PIPE_INSTANCE: DWORD = 0x00080000; pub const FILE_FLAG_OPEN_REQUIRING_OPLOCK: DWORD = 0x00040000; pub const FILE_TYPE_UNKNOWN: DWORD = 0x0000; pub const FILE_TYPE_DISK: DWORD = 0x0001; pub const FILE_TYPE_CHAR: DWORD = 0x0002; pub const FILE_TYPE_PIPE: DWORD = 0x0003; pub const FILE_TYPE_REMOTE: DWORD = 0x8000; } } pub mod ctypes { pub use std::os::raw::c_void; pub type c_char = i8; pub type c_schar = i8; pub type c_uchar = u8; pub type c_short = i16; pub type c_ushort = u16; pub type c_int = i32; pub type c_uint = u32; pub type c_long = i32; pub type c_ulong = u32; pub type c_longlong = i64; pub type c_ulonglong = u64; pub type c_float = f32; pub type c_double = f64; pub type __int8 = i8; pub type __uint8 = u8; pub type __int16 = i16; pub type __uint16 = u16; pub type __int32 = i32; pub type __uint32 = u32; pub type __int64 = i64; pub type __uint64 = u64; pub type wchar_t = u16; }
TION; extern "system" { pub fn GetFileInformationByHandle( hFile: HANDLE, lpFileInformation: LPBY_HANDLE_FILE_INFORMATION, ) -> BOOL; pub fn GetFileType(hFile: HANDLE) -> DWORD; } } pub mod winnt { use self::winapi::ctypes::{ __int64, __uint64, c_char, c_int, c_long, c_short, c_uint, c_ulong, c_void, wchar_t, }; use self::winapi::shared::minwindef::DWORD; use crate::winapi_inline as winapi; pub type SHORT = c_short; pub type HANDLE = *mut c_void; pub const FILE_ATTRIBUTE_READONLY: DWORD = 0x00000001; pub const FILE_ATTRIBUTE_HIDDEN: DWORD = 0x00000002; pub const FILE_ATTRIBUTE_SYSTEM: DWORD = 0x00000004; pub const FILE_ATTRIBUTE_DIRECTORY: DWORD = 0x00000010; pub const FILE_ATTRIBUTE_ARCHIVE: DWORD = 0x00000020; pub const FILE_ATTRIBUTE_DEVICE: DWORD = 0x00000040; pub const FILE_ATTRIBUTE_NORMAL: DWORD = 0x00000080; pub const FILE_ATTRIBUTE_TEMPORARY: DWORD = 0x00000100; pub const FILE_ATTRIBUTE_SPARSE_FILE: DWORD = 0x00000200; pub const FILE_ATTRIBUTE_REPARSE_POINT: DWORD = 0x00000400; pub const FILE_ATTRIBUTE_COMPRESSED: DWORD = 0x00000800; pub const FILE_ATTRIBUTE_OFFLINE: DWORD = 0x00001000; pub const FILE_ATTRIBUTE_NOT_CONTENT_INDEXED: DWORD = 0x00002000; pub const FILE_ATTRIBUTE_ENCRYPTED: DWORD = 0x00004000; pub const FILE_ATTRIBUTE_INTEGRITY_STREAM: DWORD = 0x00008000; pub const FILE_ATTRIBUTE_VIRTUAL: DWORD = 0x00010000; pub const FILE_ATTRIBUTE_NO_SCRUB_DATA: DWORD =
random
[ { "content": "/// Returns the file type of the given handle.\n\n///\n\n/// If there was a problem querying the file type, then an error is returned.\n\n///\n\n/// This corresponds to calling [`GetFileType`].\n\n///\n\n/// [`GetFileType`]: https://docs.microsoft.com/en-us/windows/desktop/api/fileapi/nf-fileapi-g...
Rust
src/mimc_hash.rs
oskarth/semaphore-rs
d462a4372f1fd9c27610f2acfe4841fab1d396aa
use crate::util::keccak256; use once_cell::sync::Lazy; use zkp_u256::U256; const NUM_ROUNDS: usize = 220; static MODULUS: Lazy<U256> = Lazy::new(|| { U256::from_decimal_str( "21888242871839275222246405745257275088548364400416034343698204186575808495617", ) .unwrap() }); static ROUND_CONSTANTS: Lazy<[U256; NUM_ROUNDS]> = Lazy::new(|| { const SEED: &str = "mimcsponge"; let mut result = [U256::ZERO; NUM_ROUNDS]; let mut bytes = keccak256(SEED.as_bytes()); for constant in result[1..NUM_ROUNDS - 1].iter_mut() { bytes = keccak256(&bytes); *constant = U256::from_bytes_be(&bytes); *constant %= &*MODULUS; } result }); fn mix(left: &mut U256, right: &mut U256) { debug_assert!(*left < *MODULUS); debug_assert!(*right < *MODULUS); for round_constant in &*ROUND_CONSTANTS { let t = (&*left + round_constant) % &*MODULUS; let t2 = t.mulmod(&t, &*MODULUS); let t4 = t2.mulmod(&t2, &*MODULUS); let t5 = t.mulmod(&t4, &*MODULUS); *right += t5; *right %= &*MODULUS; std::mem::swap(left, right); } std::mem::swap(left, right); } #[must_use] pub fn hash(values: &[U256]) -> U256 { let mut left = U256::ZERO; let mut right = U256::ZERO; for value in values { let value = value % &*MODULUS; left += value; left %= &*MODULUS; mix(&mut left, &mut right); } left } #[cfg(test)] pub mod test { use super::*; use hex_literal::hex; #[test] fn test_round_constants() { assert_eq!(ROUND_CONSTANTS[0], U256::ZERO); assert_eq!( ROUND_CONSTANTS[1], U256::from_decimal_str( "7120861356467848435263064379192047478074060781135320967663101236819528304084" ) .unwrap() ); assert_eq!( ROUND_CONSTANTS[2], U256::from_decimal_str( "5024705281721889198577876690145313457398658950011302225525409148828000436681" ) .unwrap() ); assert_eq!( ROUND_CONSTANTS[218], U256::from_decimal_str( "2119542016932434047340813757208803962484943912710204325088879681995922344971" ) .unwrap() ); assert_eq!(ROUND_CONSTANTS[219], U256::ZERO); } #[test] fn test_mix() { let mut left = U256::ONE; let mut right = U256::ZERO; mix(&mut left, &mut right); assert_eq!( left, U256::from_decimal_str( "8792246410719720074073794355580855662772292438409936688983564419486782556587" ) .unwrap() ); assert_eq!( right, U256::from_decimal_str( "7326554092124867281481480523863654579712861994895051796475958890524736238844" ) .unwrap() ); left += U256::from(2); mix(&mut left, &mut right); assert_eq!( left, U256::from_decimal_str( "19814528709687996974327303300007262407299502847885145507292406548098437687919" ) .unwrap() ); assert_eq!( right, U256::from_decimal_str( "3888906192024793285683241274210746486868893421288515595586335488978789653213" ) .unwrap() ); } #[test] fn test_hash() { assert_eq!( hash(&[U256::from(1_u64), U256::from(2_u64)]), U256::from_bytes_be(&hex!( "2bcea035a1251603f1ceaf73cd4ae89427c47075bb8e3a944039ff1e3d6d2a6f" )) ); assert_eq!( hash(&[ U256::from(1_u64), U256::from(2_u64), U256::from(3_u64), U256::from(4_u64) ]), U256::from_bytes_be(&hex!( "03e86bdc4eac70bd601473c53d8233b145fe8fd8bf6ef25f0b217a1da305665c" )) ); } } #[cfg(feature = "bench")] pub mod bench { #[allow(clippy::wildcard_imports)] use super::*; use criterion::Criterion; pub fn group(criterion: &mut Criterion) { bench_mix(criterion); } fn bench_mix(criterion: &mut Criterion) { let mut left = U256::ONE; let mut right = U256::ZERO; criterion.bench_function("mimc_mix", move |bencher| { bencher.iter(|| mix(&mut left, &mut right)); }); } }
use crate::util::keccak256; use once_cell::sync::Lazy; use zkp_u256::U256; const NUM_ROUNDS: usize = 220; static MODULUS: Lazy<U256> = Lazy::new(|| { U256::from_decimal_str( "21888242871839275222246405745257275088548364400416034343698204186575808495617", ) .unwrap() }); static ROUND_CONSTANTS: Lazy<[U256; NUM_ROUNDS]> = Lazy::new(|| { const SEED: &str = "mimcsponge"; let mut result = [U256::ZERO; NUM_ROUNDS]; let mut bytes = keccak256(SEED.as_bytes()); for constant in result[1..NUM_ROUNDS - 1].iter_mut() { bytes = keccak256(&bytes); *constant = U256::from_bytes_be(&bytes); *constant %= &*MODULUS; } result }); fn mix(left: &mut U256, right: &mut U256) { debug_assert!(*left < *MODULUS); debug_assert!(*right < *MODULUS); for round_constant in &*ROUND_CONSTANTS { let t = (&*left + round_constant) % &*MODULUS; let t2 = t.mulmod(&t, &*MODULUS); let t4 = t2.mulmod(&t2, &*MODULUS); let t5 = t.mulmod(&t4, &*MODULUS); *right += t5; *right %= &*MODULUS; std::mem::swap(left, right); } std::mem::swap(left, right); } #[must_use] pub fn hash(values: &[U256]) -> U256 { let mut left = U256::ZERO; let mut right = U256::ZERO; for value in values { let value = value % &*MODULUS; left += value; left %= &*MODULUS; mix(&mut left, &mut right); } left } #[cfg(test)] pub mod test { use super::*; use hex_literal::hex; #[test] fn test_roun
#[test] fn test_mix() { let mut left = U256::ONE; let mut right = U256::ZERO; mix(&mut left, &mut right); assert_eq!( left, U256::from_decimal_str( "8792246410719720074073794355580855662772292438409936688983564419486782556587" ) .unwrap() ); assert_eq!( right, U256::from_decimal_str( "7326554092124867281481480523863654579712861994895051796475958890524736238844" ) .unwrap() ); left += U256::from(2); mix(&mut left, &mut right); assert_eq!( left, U256::from_decimal_str( "19814528709687996974327303300007262407299502847885145507292406548098437687919" ) .unwrap() ); assert_eq!( right, U256::from_decimal_str( "3888906192024793285683241274210746486868893421288515595586335488978789653213" ) .unwrap() ); } #[test] fn test_hash() { assert_eq!( hash(&[U256::from(1_u64), U256::from(2_u64)]), U256::from_bytes_be(&hex!( "2bcea035a1251603f1ceaf73cd4ae89427c47075bb8e3a944039ff1e3d6d2a6f" )) ); assert_eq!( hash(&[ U256::from(1_u64), U256::from(2_u64), U256::from(3_u64), U256::from(4_u64) ]), U256::from_bytes_be(&hex!( "03e86bdc4eac70bd601473c53d8233b145fe8fd8bf6ef25f0b217a1da305665c" )) ); } } #[cfg(feature = "bench")] pub mod bench { #[allow(clippy::wildcard_imports)] use super::*; use criterion::Criterion; pub fn group(criterion: &mut Criterion) { bench_mix(criterion); } fn bench_mix(criterion: &mut Criterion) { let mut left = U256::ONE; let mut right = U256::ZERO; criterion.bench_function("mimc_mix", move |bencher| { bencher.iter(|| mix(&mut left, &mut right)); }); } }
d_constants() { assert_eq!(ROUND_CONSTANTS[0], U256::ZERO); assert_eq!( ROUND_CONSTANTS[1], U256::from_decimal_str( "7120861356467848435263064379192047478074060781135320967663101236819528304084" ) .unwrap() ); assert_eq!( ROUND_CONSTANTS[2], U256::from_decimal_str( "5024705281721889198577876690145313457398658950011302225525409148828000436681" ) .unwrap() ); assert_eq!( ROUND_CONSTANTS[218], U256::from_decimal_str( "2119542016932434047340813757208803962484943912710204325088879681995922344971" ) .unwrap() ); assert_eq!(ROUND_CONSTANTS[219], U256::ZERO); }
function_block-function_prefixed
[ { "content": "// See <https://internals.rust-lang.org/t/path-to-lexical-absolute/14940>\n\nfn absolute(path: &str) -> Result<PathBuf> {\n\n let path = Path::new(path);\n\n let mut absolute = if path.is_absolute() {\n\n PathBuf::new()\n\n } else {\n\n std::env::current_dir()?\n\n };\n\n...
Rust
src/player.rs
ttempleton/rust-battleship
48d45f8c5d8c73ec399e1b6781418cdb5774fdc0
use crate::{direction::Direction, ship::Ship, space::Space}; use std::cmp; pub struct Player { is_cpu: bool, spaces: Vec<Space>, ships: Vec<Ship>, grid_size: [u8; 2], grid_cursor: [u8; 2], } impl Player { pub fn new(grid_size: [u8; 2], ship_count: usize, is_cpu: bool) -> Player { Player { is_cpu: is_cpu, spaces: Space::all_grid_spaces(&grid_size), ships: Vec::with_capacity(ship_count), grid_size: grid_size, grid_cursor: [0, 0], } } pub fn select_space(&mut self, pos: &[u8; 2]) -> Result<(), &'static str> { let space_index = self.space_index(pos); let ship_hit = self.ships.iter().position(|s| s.pos().contains(pos)); self.spaces[space_index].set_checked(ship_hit.is_some())?; Ok(()) } pub fn sink_ship_if_all_hit(&mut self, pos: &[u8; 2]) -> Result<bool, &'static str> { if let Some(index) = self.ships.iter().position(|s| s.pos().contains(pos)) { let sunk = self.ships[index] .pos() .iter() .all(|p| self.space(p).is_hit()); if sunk { self.ships[index].set_sunk()?; } Ok(sunk) } else { Err("no ship at the given position") } } pub fn all_ships_sunk(&self) -> bool { self.ships.iter().all(|ship| ship.is_sunk()) } pub fn suggested_checks(&self) -> Vec<[u8; 2]> { let mut select = vec![]; let directions = Direction::all(); let hit_spaces = self .spaces .iter() .filter(|s| s.is_hit() && self.ship(s.pos()).unwrap().is_active()) .collect::<Vec<&Space>>(); for space in &hit_spaces { for direction in &directions { let unchecked = self.find_unchecked_space(space.pos(), *direction, true); if let Some(pos) = unchecked { if !select.contains(&pos) { select.push(pos); } } } } if hit_spaces.len() > 0 && select.is_empty() { for direction in &directions { let unchecked = self.find_unchecked_space(hit_spaces[0].pos(), *direction, false); if let Some(pos) = unchecked { select.push(pos); } } } if select.is_empty() { select = self .spaces .iter() .filter(|space| space.is_unchecked()) .map(|space| *space.pos()) .collect::<Vec<[u8; 2]>>(); } select } pub fn add_ship( &mut self, head: [u8; 2], direction: Direction, length: u8, placement: bool, ) -> Result<(), &'static str> { if self.ships.len() == self.ships.capacity() { Err("tried to add ship to a player with all ships already added") } else { let pos = self .get_ship_position(head, direction, length) .ok_or("tried to place a ship partially out of bounds")?; if !placement && !self.valid_ship_position(&pos) { Err("tried to place a ship in an invalid position") } else { let mut ship = Ship::new(pos)?; if !placement { ship.set_active()?; } self.ships.push(ship); Ok(()) } } } pub fn move_placement_ship(&mut self, direction: Direction) -> Result<(), &'static str> { let index = self.ships.len() - 1; let old_head = self.ships[index].pos()[0]; let new_head = self .movement(&old_head, direction) .ok_or("movement not possible without going out of bounds")?; let ship_pos = self .get_ship_position( new_head, self.ships[index].dir(), self.ships[index].len() as u8, ) .ok_or("movement not possible without going out of bounds")?; self.ships[index].set_pos(ship_pos)?; Ok(()) } pub fn place_placement_ship(&mut self) -> Result<(), &'static str> { let index = self.ships.len() - 1; if !self.valid_ship_position(&self.ships[index].pos()) { Err("placement ship overlaps with another ship") } else { self.ships[index].set_active()?; Ok(()) } } pub fn rotate_placement_ship(&mut self) -> Result<(), &'static str> { let index = self.ships.len() - 1; let ship_len = self.ships[index].len() as u8; let dir = self.ships[index].dir().rotated(); let old_head = self.ships[index].pos()[0]; let new_head = match dir { Direction::North => [ old_head[0], cmp::min(old_head[1], self.grid_size[1] - ship_len), ], Direction::East => [cmp::max(old_head[0], ship_len - 1), old_head[1]], Direction::South => [old_head[0], cmp::max(old_head[1], ship_len - 1)], Direction::West => [ cmp::min(old_head[0], self.grid_size[0] - ship_len), old_head[1], ], }; let ship_pos = self.get_ship_position(new_head, dir, ship_len).unwrap(); self.ships[index].set_pos(ship_pos)?; Ok(()) } pub fn get_ship_position( &self, head: [u8; 2], direction: Direction, length: u8, ) -> Option<Vec<[u8; 2]>> { let valid = match direction { Direction::North => head[1] + length <= self.grid_size[1], Direction::East => head[0] >= length - 1, Direction::South => head[1] >= length - 1, Direction::West => head[0] + length <= self.grid_size[0], }; if valid { let mut ship = Vec::with_capacity(length as usize); for pos in 0..length { let pos_u8 = pos as u8; ship.push(match direction { Direction::North => [head[0], head[1] + pos_u8], Direction::East => [head[0] - pos_u8, head[1]], Direction::South => [head[0], head[1] - pos_u8], Direction::West => [head[0] + pos_u8, head[1]], }); } Some(ship) } else { None } } fn valid_ship_position(&self, new_ship: &[[u8; 2]]) -> bool { new_ship.iter().all(|s| { self.valid_space(s) && !self.ship_is_in_space(s) && !(self.ship_is_next_to(s) && self.is_cpu) }) } pub fn ships(&self) -> &[Ship] { &self.ships } fn ship(&self, pos: &[u8; 2]) -> Option<&Ship> { self.ships.iter().find(|s| s.pos().contains(pos)) } pub fn ship_is_in_space(&self, pos: &[u8; 2]) -> bool { self.ships .iter() .any(|s| s.pos().contains(pos) && !s.is_placement()) } fn ship_is_next_to(&self, pos: &[u8; 2]) -> bool { let &[x, y] = pos; x > 0 && self.ship_is_in_space(&[x - 1, y]) || x < self.grid_size[0] - 1 && self.ship_is_in_space(&[x + 1, y]) || y > 0 && self.ship_is_in_space(&[x, y - 1]) || y < self.grid_size[1] - 1 && self.ship_is_in_space(&[x, y + 1]) } pub fn spaces(&self) -> &[Space] { &self.spaces } fn valid_space(&self, pos: &[u8; 2]) -> bool { pos[0] < self.grid_size[0] && pos[1] < self.grid_size[1] } pub fn space(&self, pos: &[u8; 2]) -> &Space { self.spaces.get(self.space_index(pos)).unwrap() } fn space_index(&self, pos: &[u8; 2]) -> usize { self.grid_size[0] as usize * pos[0] as usize + pos[1] as usize } fn movement(&self, pos: &[u8; 2], direction: Direction) -> Option<[u8; 2]> { let valid = match direction { Direction::North => pos[1] > 0, Direction::East => pos[0] < self.grid_size[0] - 1, Direction::South => pos[1] < self.grid_size[1] - 1, Direction::West => pos[0] > 0, }; match valid { true => Some(match direction { Direction::North => [pos[0], pos[1] - 1], Direction::East => [pos[0] + 1, pos[1]], Direction::South => [pos[0], pos[1] + 1], Direction::West => [pos[0] - 1, pos[1]], }), false => None, } } fn find_unchecked_space( &self, pos: &[u8; 2], direction: Direction, check_for_line: bool, ) -> Option<[u8; 2]> { let mut check_pos = self.movement(pos, direction); while let Some(next_pos) = check_pos { let next_space = self.space(&next_pos); match next_space.is_hit() { true => check_pos = self.movement(&next_pos, direction), false => { if !next_space.is_unchecked() { check_pos = None; } break; } }; } if check_for_line && check_pos.is_some() { let unchecked = check_pos.unwrap(); let opposite_dir = direction.opposite(); let prev_pos = self.movement(&unchecked, opposite_dir).unwrap(); if &prev_pos == pos { check_pos = None; } } check_pos } pub fn grid_cursor(&self) -> &[u8; 2] { &self.grid_cursor } pub fn move_grid_cursor(&mut self, direction: Direction) -> Result<(), &'static str> { if let Some(new_cursor) = self.movement(&self.grid_cursor, direction) { self.set_grid_cursor(&new_cursor)?; Ok(()) } else { Err("tried to move grid cursor out of bounds") } } pub fn set_grid_cursor(&mut self, pos: &[u8; 2]) -> Result<(), &'static str> { if self.space_index(pos) < self.spaces.len() { self.grid_cursor = *pos; Ok(()) } else { Err("tried to set the grid cursor to a nonexistent space") } } pub fn is_cpu(&self) -> bool { self.is_cpu } pub fn placement_ship(&self) -> Result<&Ship, &'static str> { let ships_len = self.ships.len(); if ships_len == 0 { Err("player has no ships") } else if !self.ships[ships_len - 1].is_placement() { Err("player has no placement ship") } else { Ok(&self.ships[self.ships.len() - 1]) } } pub fn placement_ship_mut(&mut self) -> Result<&mut Ship, &'static str> { let ships_len = self.ships.len(); if ships_len == 0 { Err("player has no ships") } else if !self.ships[ships_len - 1].is_placement() { Err("player has no placement ship") } else { Ok(&mut self.ships[ships_len - 1]) } } }
use crate::{direction::Direction, ship::Ship, space::Space}; use std::cmp; pub struct Player { is_cpu: bool, spaces: Vec<Space>, ships: Vec<Ship>, grid_size: [u8; 2], grid_cursor: [u8; 2], } impl Player { pub fn new(grid_size: [u8; 2], ship_count: usize, is_cpu: bool) -> Player { Player { is_cpu: is_cpu, spaces: Space::all_grid_spaces(&grid_size), ships: Vec::with_capacity(ship_count), grid_size: grid_size, grid_cursor: [0, 0], } } pub fn select_space(&mut self, pos: &[u8; 2]) -> Result<(), &'static str> { let space_index = self.space_index(pos); let ship_hit = self.ships.iter().position(|s| s.pos().contains(pos)); self.spaces[space_index].set_checked(ship_hit.is_some())?; Ok(()) } pub fn sink_ship_if_all_hit(&mut self, pos: &[u8; 2]) -> Result<bool, &'static str> { if let Some(index) = self.ships.iter().position(|s| s.pos().contains(pos)) { let sunk = self.ships[index] .pos() .iter() .all(|p| self.space(p).is_hit()); if sunk { self.ships[index].set_sunk()?; } Ok(sunk) } else { Err("no ship at the given position") } } pub fn all_ships_sunk(&self) -> bool { self.ships.iter().all(|ship| ship.is_sunk()) } pub fn suggested_checks(&self) -> Vec<[u8; 2]> { let mut select = vec![]; let directions = Direction::all(); let hit_spaces = self .spaces .iter() .filter(|s| s.is_hit() && self.ship(s.pos()).unwrap().is_active()) .collect::<Vec<&Space>>(); for space in &hit_spaces { for direction in &directions { let unchecked = self.find_unchecked_space(space.pos(), *direction, true); if let Some(pos) = unchecked { if !select.contains(&pos) { select.push(pos); } } } } if hit_spaces.len() > 0 && select.is_empty() { for direction in &directions { let unchecked = self.find_unchecked_space(hit_spaces[0].pos(), *direction, false); if let Some(pos) = unchecked { select.push(pos); } } } if select.is_empty() { select = self .spaces .iter() .filter(|space| space.is_unchecked()) .map(|space| *space.pos()) .collect::<Vec<[u8; 2]>>(); } select }
pub fn move_placement_ship(&mut self, direction: Direction) -> Result<(), &'static str> { let index = self.ships.len() - 1; let old_head = self.ships[index].pos()[0]; let new_head = self .movement(&old_head, direction) .ok_or("movement not possible without going out of bounds")?; let ship_pos = self .get_ship_position( new_head, self.ships[index].dir(), self.ships[index].len() as u8, ) .ok_or("movement not possible without going out of bounds")?; self.ships[index].set_pos(ship_pos)?; Ok(()) } pub fn place_placement_ship(&mut self) -> Result<(), &'static str> { let index = self.ships.len() - 1; if !self.valid_ship_position(&self.ships[index].pos()) { Err("placement ship overlaps with another ship") } else { self.ships[index].set_active()?; Ok(()) } } pub fn rotate_placement_ship(&mut self) -> Result<(), &'static str> { let index = self.ships.len() - 1; let ship_len = self.ships[index].len() as u8; let dir = self.ships[index].dir().rotated(); let old_head = self.ships[index].pos()[0]; let new_head = match dir { Direction::North => [ old_head[0], cmp::min(old_head[1], self.grid_size[1] - ship_len), ], Direction::East => [cmp::max(old_head[0], ship_len - 1), old_head[1]], Direction::South => [old_head[0], cmp::max(old_head[1], ship_len - 1)], Direction::West => [ cmp::min(old_head[0], self.grid_size[0] - ship_len), old_head[1], ], }; let ship_pos = self.get_ship_position(new_head, dir, ship_len).unwrap(); self.ships[index].set_pos(ship_pos)?; Ok(()) } pub fn get_ship_position( &self, head: [u8; 2], direction: Direction, length: u8, ) -> Option<Vec<[u8; 2]>> { let valid = match direction { Direction::North => head[1] + length <= self.grid_size[1], Direction::East => head[0] >= length - 1, Direction::South => head[1] >= length - 1, Direction::West => head[0] + length <= self.grid_size[0], }; if valid { let mut ship = Vec::with_capacity(length as usize); for pos in 0..length { let pos_u8 = pos as u8; ship.push(match direction { Direction::North => [head[0], head[1] + pos_u8], Direction::East => [head[0] - pos_u8, head[1]], Direction::South => [head[0], head[1] - pos_u8], Direction::West => [head[0] + pos_u8, head[1]], }); } Some(ship) } else { None } } fn valid_ship_position(&self, new_ship: &[[u8; 2]]) -> bool { new_ship.iter().all(|s| { self.valid_space(s) && !self.ship_is_in_space(s) && !(self.ship_is_next_to(s) && self.is_cpu) }) } pub fn ships(&self) -> &[Ship] { &self.ships } fn ship(&self, pos: &[u8; 2]) -> Option<&Ship> { self.ships.iter().find(|s| s.pos().contains(pos)) } pub fn ship_is_in_space(&self, pos: &[u8; 2]) -> bool { self.ships .iter() .any(|s| s.pos().contains(pos) && !s.is_placement()) } fn ship_is_next_to(&self, pos: &[u8; 2]) -> bool { let &[x, y] = pos; x > 0 && self.ship_is_in_space(&[x - 1, y]) || x < self.grid_size[0] - 1 && self.ship_is_in_space(&[x + 1, y]) || y > 0 && self.ship_is_in_space(&[x, y - 1]) || y < self.grid_size[1] - 1 && self.ship_is_in_space(&[x, y + 1]) } pub fn spaces(&self) -> &[Space] { &self.spaces } fn valid_space(&self, pos: &[u8; 2]) -> bool { pos[0] < self.grid_size[0] && pos[1] < self.grid_size[1] } pub fn space(&self, pos: &[u8; 2]) -> &Space { self.spaces.get(self.space_index(pos)).unwrap() } fn space_index(&self, pos: &[u8; 2]) -> usize { self.grid_size[0] as usize * pos[0] as usize + pos[1] as usize } fn movement(&self, pos: &[u8; 2], direction: Direction) -> Option<[u8; 2]> { let valid = match direction { Direction::North => pos[1] > 0, Direction::East => pos[0] < self.grid_size[0] - 1, Direction::South => pos[1] < self.grid_size[1] - 1, Direction::West => pos[0] > 0, }; match valid { true => Some(match direction { Direction::North => [pos[0], pos[1] - 1], Direction::East => [pos[0] + 1, pos[1]], Direction::South => [pos[0], pos[1] + 1], Direction::West => [pos[0] - 1, pos[1]], }), false => None, } } fn find_unchecked_space( &self, pos: &[u8; 2], direction: Direction, check_for_line: bool, ) -> Option<[u8; 2]> { let mut check_pos = self.movement(pos, direction); while let Some(next_pos) = check_pos { let next_space = self.space(&next_pos); match next_space.is_hit() { true => check_pos = self.movement(&next_pos, direction), false => { if !next_space.is_unchecked() { check_pos = None; } break; } }; } if check_for_line && check_pos.is_some() { let unchecked = check_pos.unwrap(); let opposite_dir = direction.opposite(); let prev_pos = self.movement(&unchecked, opposite_dir).unwrap(); if &prev_pos == pos { check_pos = None; } } check_pos } pub fn grid_cursor(&self) -> &[u8; 2] { &self.grid_cursor } pub fn move_grid_cursor(&mut self, direction: Direction) -> Result<(), &'static str> { if let Some(new_cursor) = self.movement(&self.grid_cursor, direction) { self.set_grid_cursor(&new_cursor)?; Ok(()) } else { Err("tried to move grid cursor out of bounds") } } pub fn set_grid_cursor(&mut self, pos: &[u8; 2]) -> Result<(), &'static str> { if self.space_index(pos) < self.spaces.len() { self.grid_cursor = *pos; Ok(()) } else { Err("tried to set the grid cursor to a nonexistent space") } } pub fn is_cpu(&self) -> bool { self.is_cpu } pub fn placement_ship(&self) -> Result<&Ship, &'static str> { let ships_len = self.ships.len(); if ships_len == 0 { Err("player has no ships") } else if !self.ships[ships_len - 1].is_placement() { Err("player has no placement ship") } else { Ok(&self.ships[self.ships.len() - 1]) } } pub fn placement_ship_mut(&mut self) -> Result<&mut Ship, &'static str> { let ships_len = self.ships.len(); if ships_len == 0 { Err("player has no ships") } else if !self.ships[ships_len - 1].is_placement() { Err("player has no placement ship") } else { Ok(&mut self.ships[ships_len - 1]) } } }
pub fn add_ship( &mut self, head: [u8; 2], direction: Direction, length: u8, placement: bool, ) -> Result<(), &'static str> { if self.ships.len() == self.ships.capacity() { Err("tried to add ship to a player with all ships already added") } else { let pos = self .get_ship_position(head, direction, length) .ok_or("tried to place a ship partially out of bounds")?; if !placement && !self.valid_ship_position(&pos) { Err("tried to place a ship in an invalid position") } else { let mut ship = Ship::new(pos)?; if !placement { ship.set_active()?; } self.ships.push(ship); Ok(()) } } }
function_block-full_function
[ { "content": " 0 => Direction::North,\n\n 1 => Direction::East,\n\n 2 => Direction::South,\n\n 3 => Direction::West,\n\n _ => unreachable!(),\n\n }\n\n }\n\n\n\n /// Returns the direction travelled from `pos1` to `pos2` if the positions\n\n ...
Rust
plugins/afl/afl_mutate/src/afl_mutate.rs
elast0ny/CROWDFUZZ
340fd0e9e03e147ebe977d456e8f6052bcf183eb
use std::mem::MaybeUninit; pub use ::afl_lib::*; pub use ::cflib::*; mod mutators; pub use mutators::*; mod bit_flip; pub use bit_flip::*; mod arithmetic; pub use arithmetic::*; mod interesting; pub use interesting::*; mod havoc; pub use havoc::*; cflib::register!(name, env!("CARGO_PKG_NAME")); cflib::register!(load, init); cflib::register!(pre_fuzz, validate); cflib::register!(fuzz, mutate_input); cflib::register!(unload, destroy); struct State { force_update: bool, prev_input_idx: usize, stage_name: String, cur_stage: MutatorStage, stat_cur_stage: StatStr, stat_total_iterations: StatNum, stat_stage_progress: StatNum, restore_input: &'static mut bool, no_select: &'static mut bool, no_mutate: &'static bool, inputs: &'static Vec<CfInputInfo>, cur_input_idx: &'static usize, cur_input: &'static mut CfInput, afl: &'static mut AflGlobals, afl_queue: &'static mut AflQueue, } fn init(core: &mut dyn PluginInterface, store: &mut CfStore) -> Result<*mut u8> { #[allow(invalid_value)] let state = Box::new(unsafe { State { force_update: true, prev_input_idx: 0, stage_name: String::new(), cur_stage: MutatorStage::default(), stat_cur_stage: core.new_stat_str("stage", 128, "[init]")?, stat_stage_progress: core.new_stat_num("progress", 0)?, stat_total_iterations: core.new_stat_num("iterations", 0)?, restore_input: store.as_mutref(STORE_RESTORE_INPUT, Some(core))?, no_select: store.as_mutref(STORE_NO_SELECT, Some(core))?, no_mutate: store.as_mutref(STORE_NO_MUTATE, Some(core))?, inputs: MaybeUninit::zeroed().assume_init(), cur_input_idx: MaybeUninit::zeroed().assume_init(), cur_input: MaybeUninit::zeroed().assume_init(), afl: MaybeUninit::zeroed().assume_init(), afl_queue: MaybeUninit::zeroed().assume_init(), } }); Ok(Box::into_raw(state) as _) } fn validate( core: &mut dyn PluginInterface, store: &mut CfStore, plugin_ctx: *mut u8, ) -> Result<()> { let state = box_ref!(plugin_ctx, State); unsafe { state.inputs = store.as_mutref(STORE_INPUT_LIST, Some(core))?; state.cur_input_idx = store.as_ref(STORE_INPUT_IDX, Some(core))?; state.cur_input = store.as_mutref(STORE_INPUT_BYTES, Some(core))?; match store.as_mutref(STORE_AFL_GLOBALS, None) { Ok(v) => state.afl = v, Err(e) => { core.warn("Missing AFL globals ! Is the `afl_state` plugin running ?"); return Err(e); } }; state.afl_queue = store.as_mutref(STORE_AFL_QUEUE, Some(core))?; } Ok(()) } fn mutate_input( _core: &mut dyn PluginInterface, _store: &mut CfStore, plugin_ctx: *mut u8, ) -> Result<()> { let s = box_ref!(plugin_ctx, State); if *s.no_mutate { if !s.force_update { s.stat_cur_stage.set("None"); *s.stat_stage_progress.val = 0; *s.stat_total_iterations.val = 0; s.force_update = true; } return Ok(()); } let stage = &mut s.cur_stage; let input = &mut s.cur_input; let afl = &mut s.afl; let q = unsafe { s.afl_queue.get_unchecked_mut(*s.cur_input_idx) }; if s.force_update || s.prev_input_idx != *s.cur_input_idx { stage.sync_to_input(q, afl, input); s.stage_name.clear(); stage.update_state(input, Some(&mut s.stage_name), Some(s.stat_total_iterations.val)); s.stat_cur_stage.set(&s.stage_name); *s.stat_stage_progress.val = 0; s.prev_input_idx = *s.cur_input_idx; s.force_update = false; } loop { match stage.mutate(input) { StageResult::WillRestoreInput => { *s.no_select = true; *s.restore_input = false; } StageResult::CantRestoreInput => { *s.restore_input = true; *s.no_select = false; } StageResult::Update => { s.stage_name.clear(); stage.update_state(input, Some(&mut s.stage_name), None); s.stat_cur_stage.set(&s.stage_name); continue; } StageResult::Done => { if stage.next(q, afl, input) { s.stage_name.clear(); stage.update_state(input, Some(&mut s.stage_name), Some(s.stat_total_iterations.val)); s.stat_cur_stage.set(&s.stage_name); *s.stat_stage_progress.val = 0; continue; } q.passed_det = true; *s.restore_input = false; *s.no_select = false; } }; break; } *s.stat_stage_progress.val += 1; Ok(()) } fn destroy( _core: &mut dyn PluginInterface, _store: &mut CfStore, plugin_ctx: *mut u8, ) -> Result<()> { let _state = box_take!(plugin_ctx, State); Ok(()) }
use std::mem::MaybeUninit; pub use ::afl_lib::*; pub use ::cflib::*; mod mutators; pub use mutators::*; mod bit_flip; pub use bit_flip::*; mod arithmetic; pub use arithmetic::*; mod interesting; pub use interesting::*; mod havoc; pub use havoc::*; cflib::register!(name, env!("CARGO_PKG_NAME")); cflib::register!(load, init); cflib::register!(pre_fuzz, validate); cflib::register!(fuzz, mutate_input); cflib::register!(unload, destroy); struct State { force_update: bool, prev_input_idx: usize, stage_name: String, cur_stage: MutatorStage, stat_cur_stage: StatStr, stat_total_iterations: StatNum, stat_stage_progress: StatNum, restore_input: &'static mut bool, no_select: &'static mut bool, no_mutate: &'static bool, inputs: &'static Vec<CfInputInfo>, cur_input_idx: &'static usize, cur_input: &'static mut CfInput, afl: &'static mut AflGlobals, afl_queue: &'static mut AflQueue, } fn init(core: &mut dyn PluginInterface, store: &mut CfStore) -> Result<*mut u8> { #[allow(invalid_value)] let state = Box::new(unsafe { State { force_update: true, prev_input_idx: 0, stage_name: String::new(), cur_stage: MutatorStage::default(), stat_cur_stage: core.new_stat_str("stage", 128, "[init]")?, stat_stage_progress: core.new_stat_num("progress", 0)?, stat_total_iterations: core.new_stat_num("iterations", 0)?, restore_input: store.as_mutref(STORE_RESTORE_INPUT, Some(core))?, no_select: store.as_mutref(STORE_NO_SELECT, Some(core))?, no_mutate: store.as_mutref(STORE_NO_MUTATE, Some(core))?, inputs: MaybeUninit::zeroed().assume_init(), cur_input_idx: MaybeUninit::zeroed().assume_init(), cur_input: MaybeUninit::zeroed().assume_init(),
fn validate( core: &mut dyn PluginInterface, store: &mut CfStore, plugin_ctx: *mut u8, ) -> Result<()> { let state = box_ref!(plugin_ctx, State); unsafe { state.inputs = store.as_mutref(STORE_INPUT_LIST, Some(core))?; state.cur_input_idx = store.as_ref(STORE_INPUT_IDX, Some(core))?; state.cur_input = store.as_mutref(STORE_INPUT_BYTES, Some(core))?; match store.as_mutref(STORE_AFL_GLOBALS, None) { Ok(v) => state.afl = v, Err(e) => { core.warn("Missing AFL globals ! Is the `afl_state` plugin running ?"); return Err(e); } }; state.afl_queue = store.as_mutref(STORE_AFL_QUEUE, Some(core))?; } Ok(()) } fn mutate_input( _core: &mut dyn PluginInterface, _store: &mut CfStore, plugin_ctx: *mut u8, ) -> Result<()> { let s = box_ref!(plugin_ctx, State); if *s.no_mutate { if !s.force_update { s.stat_cur_stage.set("None"); *s.stat_stage_progress.val = 0; *s.stat_total_iterations.val = 0; s.force_update = true; } return Ok(()); } let stage = &mut s.cur_stage; let input = &mut s.cur_input; let afl = &mut s.afl; let q = unsafe { s.afl_queue.get_unchecked_mut(*s.cur_input_idx) }; if s.force_update || s.prev_input_idx != *s.cur_input_idx { stage.sync_to_input(q, afl, input); s.stage_name.clear(); stage.update_state(input, Some(&mut s.stage_name), Some(s.stat_total_iterations.val)); s.stat_cur_stage.set(&s.stage_name); *s.stat_stage_progress.val = 0; s.prev_input_idx = *s.cur_input_idx; s.force_update = false; } loop { match stage.mutate(input) { StageResult::WillRestoreInput => { *s.no_select = true; *s.restore_input = false; } StageResult::CantRestoreInput => { *s.restore_input = true; *s.no_select = false; } StageResult::Update => { s.stage_name.clear(); stage.update_state(input, Some(&mut s.stage_name), None); s.stat_cur_stage.set(&s.stage_name); continue; } StageResult::Done => { if stage.next(q, afl, input) { s.stage_name.clear(); stage.update_state(input, Some(&mut s.stage_name), Some(s.stat_total_iterations.val)); s.stat_cur_stage.set(&s.stage_name); *s.stat_stage_progress.val = 0; continue; } q.passed_det = true; *s.restore_input = false; *s.no_select = false; } }; break; } *s.stat_stage_progress.val += 1; Ok(()) } fn destroy( _core: &mut dyn PluginInterface, _store: &mut CfStore, plugin_ctx: *mut u8, ) -> Result<()> { let _state = box_take!(plugin_ctx, State); Ok(()) }
afl: MaybeUninit::zeroed().assume_init(), afl_queue: MaybeUninit::zeroed().assume_init(), } }); Ok(Box::into_raw(state) as _) }
function_block-function_prefix_line
[ { "content": "// Initialize our plugin\n\nfn init(core: &mut dyn PluginInterface, store: &mut CfStore) -> Result<*mut u8> {\n\n #[allow(invalid_value)]\n\n let mut s = Box::new(unsafe {\n\n State {\n\n afl: AflGlobals::default(),\n\n queue: Vec::new(),\n\n is_calibr...
Rust
kapp_platforms/src/windows/application_windows.rs
kettle11/kettlewin
36109e9ab506b9bce55da6e1cdee0d10e64e6dc4
use super::external_windows::*; use super::utils_windows::*; use std::convert::TryInto; use std::ptr::{null, null_mut}; use kapp_platform_common::*; pub static mut CURRENT_CURSOR: HCURSOR = null_mut(); pub static mut WINDOWS_TO_REDRAW: Vec<WindowId> = Vec::new(); pub struct PlatformApplication { window_class_name: Vec<u16>, h_instance: HINSTANCE, } pub(crate) struct WindowData { pub minimum_width: u32, pub minimum_height: u32, pub maximum_width: u32, pub maximum_height: u32, } impl PlatformApplicationTrait for PlatformApplication { type EventLoop = PlatformEventLoop; fn new() -> Self { unsafe { SetProcessDpiAwareness(PROCESS_PER_MONITOR_DPI_AWARE); let window_class_name = win32_string("windowing_rust"); let h_instance = GetModuleHandleW(null_mut()); let window_class = WNDCLASSW { style: CS_DBLCLKS, lpfnWndProc: Some(super::event_loop_windows::window_callback), cbClsExtra: 0, cbWndExtra: 0, hInstance: h_instance, hIcon: null_mut(), hCursor: null_mut(), hbrBackground: null_mut(), lpszMenuName: null_mut(), lpszClassName: window_class_name.as_ptr(), }; CURRENT_CURSOR = LoadCursorW(null_mut(), IDC_ARROW); RegisterClassW(&window_class); Self { window_class_name, h_instance, } } } fn event_loop(&mut self) -> Self::EventLoop { PlatformEventLoop {} } fn set_window_position(&mut self, window_id: WindowId, x: u32, y: u32) { unsafe { let mut rect = RECT { left: 0, top: 0, right: 0, bottom: 0, }; GetWindowRect(window_id.raw() as HWND, &mut rect); let width = rect.right - rect.left; let height = rect.bottom - rect.top; MoveWindow( window_id.raw() as HWND, x as i32, y as i32, width, height, FALSE, ); } } fn set_window_size(&mut self, window_id: WindowId, width: u32, height: u32) { unsafe { let mut rect = RECT { left: 0, top: 0, right: 0, bottom: 0, }; GetWindowRect(window_id.raw() as HWND, &mut rect); MoveWindow( window_id.raw() as HWND, rect.left, rect.top, width as i32, height as i32, FALSE, ); } } fn set_window_title(&mut self, window_id: WindowId, title: &str) { let title = win32_string(title); unsafe { SetWindowTextW(window_id.raw() as HWND, title.as_ptr()); } } fn minimize_window(&mut self, window_id: WindowId) { unsafe { ShowWindow(window_id.raw() as HWND, SW_MINIMIZE); } } fn maximize_window(&mut self, window_id: WindowId) { unsafe { ShowWindow(window_id.raw() as HWND, SW_MAXIMIZE); } } fn fullscreen_window(&mut self, window_id: WindowId) { unsafe { let hwnd = window_id.raw() as HWND; let screen_width = GetSystemMetrics(SM_CXSCREEN); let screen_height = GetSystemMetrics(SM_CYSCREEN); SetWindowLongPtrW(hwnd, GWL_STYLE, (WS_VISIBLE | WS_POPUP).try_into().unwrap()); let mut rect = RECT { left: 0, top: 0, right: 0, bottom: 0, }; GetWindowRect(window_id.raw() as HWND, &mut rect); MoveWindow( window_id.raw() as HWND, 0, 0, screen_width as i32, screen_height as i32, FALSE, ); } } fn restore_window(&mut self, window_id: WindowId) { unsafe { let hwnd = window_id.raw() as HWND; let window_style = WS_OVERLAPPEDWINDOW | WS_VISIBLE | CS_OWNDC; SetWindowLongPtrW(hwnd, GWL_STYLE, window_style.try_into().unwrap()); ShowWindow(window_id.raw() as HWND, SW_RESTORE); } } fn close_window(&mut self, window_id: WindowId) { unsafe { CloseWindow(window_id.raw() as HWND); } } fn redraw_window(&mut self, window_id: WindowId) { redraw_manager::add_draw_request(window_id); } fn get_window_size(&mut self, window_id: WindowId) -> (u32, u32) { let mut rect = RECT { left: 0, top: 0, right: 0, bottom: 0, }; unsafe { GetClientRect(window_id.raw() as HWND, &mut rect); } ( (rect.right - rect.left) as u32, (rect.bottom - rect.top) as u32, ) } fn get_window_scale(&mut self, window_id: WindowId) -> f64 { let dpi = unsafe { GetDpiForWindow(window_id.raw() as HWND) }; dpi as f64 / USER_DEFAULT_SCREEN_DPI as f64 } fn lock_mouse_position(&mut self) { unsafe { let mut position = POINT { x: 0, y: 0 }; GetCursorPos(&mut position); let rect = RECT { left: position.x, top: position.y, right: position.x, bottom: position.y, }; ClipCursor(&rect); } } fn unlock_mouse_position(&mut self) { unsafe { ClipCursor(null()); } } fn new_window(&mut self, window_parameters: &WindowParameters) -> WindowId { unsafe { let extended_style = WS_EX_APPWINDOW; let window_style = WS_OVERLAPPEDWINDOW | WS_VISIBLE | CS_OWNDC; let title = win32_string(&window_parameters.title); let (x, y) = if let Some(position) = window_parameters.position { (position.0 as i32, position.1 as i32) } else { (CW_USEDEFAULT, CW_USEDEFAULT) }; let (width, height) = window_parameters .size .map_or((CW_USEDEFAULT, CW_USEDEFAULT), |d| { let mut rect = RECT { left: 0, top: 0, right: d.0 as i32, bottom: d.1 as i32, }; AdjustWindowRectEx(&mut rect, window_style, FALSE, extended_style); (rect.right - rect.left, rect.bottom - rect.top) }); let (minimum_width, minimum_height) = window_parameters.minimum_size.unwrap_or(( GetSystemMetrics(SM_CXMINTRACK) as u32, GetSystemMetrics(SM_CYMINTRACK) as u32, )); let (maximum_width, maximum_height) = window_parameters.maximum_size.unwrap_or(( GetSystemMetrics(SM_CXMAXTRACK) as u32, GetSystemMetrics(SM_CYMAXTRACK) as u32, )); let window_data = Box::new(WindowData { minimum_width, minimum_height, maximum_width, maximum_height, }); let data = Box::leak(window_data) as *mut WindowData as *mut std::ffi::c_void; let window_handle = CreateWindowExW( extended_style, self.window_class_name.as_ptr(), title.as_ptr(), window_style, x as i32, y as i32, width, height, null_mut(), null_mut(), self.h_instance, data, ); let window_id = WindowId::new(window_handle as *mut std::ffi::c_void); redraw_manager::add_draw_request(window_id); WINDOWS_TO_REDRAW.push(window_id); window_id } } fn quit(&self) { unsafe { PostQuitMessage(0); } } fn set_cursor(&mut self, cursor: Cursor) { unsafe { let cursor = match cursor { Cursor::Arrow => LoadCursorW(null_mut(), IDC_ARROW), Cursor::IBeam => LoadCursorW(null_mut(), IDC_IBEAM), Cursor::PointingHand => LoadCursorW(null_mut(), IDC_ARROW), Cursor::OpenHand => LoadCursorW(null_mut(), IDC_HAND), Cursor::ClosedHand => LoadCursorW(null_mut(), IDC_HAND), }; SetCursor(super::application_windows::CURRENT_CURSOR); let mut position = POINT { x: 0, y: 0 }; GetCursorPos(&mut position); SetCursorPos(position.x, position.y); CURRENT_CURSOR = cursor; } } fn hide_cursor(&mut self) { unsafe { ShowCursor(FALSE); } } fn show_cursor(&mut self) { unsafe { ShowCursor(TRUE); } } fn raw_window_handle(&self, window_id: WindowId) -> RawWindowHandle { raw_window_handle::RawWindowHandle::Windows(raw_window_handle::windows::WindowsHandle { hwnd: unsafe { window_id.raw() }, hinstance: self.h_instance as *mut std::ffi::c_void, ..raw_window_handle::windows::WindowsHandle::empty() }) } fn start_text_input(&mut self) { todo!() } fn end_text_input(&mut self) { todo!() } fn set_text_input_rectangle( &mut self, _window_id: WindowId, _x: f64, _y: f64, _width: f64, _height: f64, ) { todo!() } } impl Drop for PlatformApplication { fn drop(&mut self) { self.quit(); } } pub struct PlatformEventLoop {} impl PlatformEventLoopTrait for PlatformEventLoop { fn run(&self, callback: Box<dyn FnMut(kapp_platform_common::Event)>) { super::event_loop_windows::run(callback); } }
use super::external_windows::*; use super::utils_windows::*; use std::convert::TryInto; use std::ptr::{null, null_mut}; use kapp_platform_common::*; pub static mut CURRENT_CURSOR: HCURSOR = null_mut(); pub static mut WINDOWS_TO_REDRAW: Vec<WindowId> = Vec::new(); pub struct PlatformApplication { window_class_name: Vec<u16>, h_instance: HINSTANCE, } pub(crate) struct WindowData { pub minimum_width: u32, pub minimum_height: u32, pub maximum_width: u32, pub maximum_height: u32, } impl PlatformApplicationTrait for PlatformApplication { type EventLoop = PlatformEventLoop; fn new() -> Self { unsafe { SetProcessDpiAwareness(PROCESS_PER_MONITOR_DPI_AWARE); let window_class_name = win32_string("windowing_rust"); let h_instance = GetModuleHandleW(null_mut()); let window_class = WNDCLASSW { style: CS_DBLCLKS, lpfnWndProc: Some(super::event_loop_windows::window_callback), cbClsExtra: 0, cbWndExtra: 0, hInstance: h_instance, hIcon: null_mut(), hCursor: null_mut(), hbrBackground: null_mut(), lpszMenuName: null_mut(), lpszClassName: window_class_name.as_ptr(), }; CURRENT_CURSOR = LoadCursorW(null_mut(), IDC_ARROW); RegisterClassW(&window_class); Self { window_class_name, h_instance, } } } fn event_loop(&mut self) -> Self::EventLoop { PlatformEventLoop {} } fn set_window_position(&mut self, window_id: WindowId, x: u32, y: u32) { unsafe { let mut rect = RECT { left: 0, top: 0, right: 0, bottom: 0, }; GetWindowRect(window_id.raw() as HWND, &mut rect); let width = rect.right - rect.left; let height = rect.bottom - rect.top; MoveWindow( window_id.raw() as HWND, x as i32, y as i32, width, height, FALSE, ); } } fn set_window_size(&mut self, window_id: WindowId, width: u32, height: u32) { unsafe { let mut rect = RECT { left: 0, top: 0, right: 0, bottom: 0, }; GetWindowRect(window_id.raw() as HWND, &mut rect); MoveWindow( window_id.raw() as HWND, rect.left, rect.top, width as i32, height as i32, FALSE, ); } } fn set_window_title(&mut self, window_id: WindowId, title: &str) { let title = win32_string(title); unsafe { SetWindowTextW(window_id.raw() as HWND, title.as_ptr()); } } fn minimize_window(&mut self, window_id: WindowId) { unsafe { ShowWindow(window_id.raw() as HWND, SW_MINIMIZE); } } fn maximize_window(&mut self, window_id: WindowId) { unsafe { ShowWindow(window_id.raw() as HWND, SW_MAXIMIZE); } } fn fullscreen_window(&mut self, window_id: WindowId) { unsafe { let hwnd = window_id.raw() as HWND; let screen_width = GetSystemMetrics(SM_CXSCREEN); let screen_height = GetSystemMetrics(SM_CYSCREEN); SetWindowLongPtrW(hwnd, GWL_STYLE, (WS_VISIBLE | WS_POPUP).try_into().unwrap()); let mut rect = RECT { left: 0, top: 0, right: 0, bottom: 0, }; GetWindowRect(window_id.raw() as HWND, &mut rect); MoveWindow( window_id.raw() as HWND, 0, 0, screen_width as i32, screen_height as i32, FALSE, ); } } fn restore_window(&mut self, window_id: WindowId) { unsafe { let hwnd = window_id.raw() as HWND; let window_style = WS_OVERLAPPEDWINDOW | WS_VISIBLE | CS_OWNDC; SetWindowLongPtrW(hwnd, GWL_STYLE, window_style.try_into().unwrap()); ShowWindow(window_id.raw() as HWND, SW_RESTORE); } } fn close_window(&mut self, window_id: WindowId) { unsafe { CloseWindow(window_id.raw() as HWND); } } fn redraw_window(&mut self, window_id: WindowId) { redraw_manager::add_draw_request(window_id); } fn get_window_size(&mut self, window_id: WindowId) -> (u32, u32) { let mut rect = RECT { left: 0, top: 0, right: 0, bottom: 0, }; unsafe { GetClientRect(window_id.raw() as HWND, &mut rect); } ( (rect.right - rect.left) as u32, (rect.bottom - rect.top) as u32, ) } fn get_window_scale(&mut self, window_id: WindowId) -> f64 { let dpi = unsafe { GetDpiForWindow(window_id.raw() as HWND) }; dpi as f64 / USER_DEFAULT_SCREEN_DPI as f64 } fn lock_mouse_position(&mut self) { unsafe { let mut position = POINT { x: 0, y: 0 }; GetCursorPos(&mut position); let rect = RECT { left: position.x, top: position.y, right: position.x, bottom: position.y, }; ClipCursor(&rect); } } fn unlock_mouse_position(&mut self) { unsafe { ClipCursor(null()); } } fn new_window(&mut self, window_parameters: &WindowParameters) -> WindowId { unsafe { let extended_style = WS_EX_APPWINDOW; let window_style = WS_OVERLAPPEDWINDOW | WS_VISIBLE | CS_OWNDC; let title = win32_string(&window_parameters.title); let (x, y) = if let Some(position) = window_parameters.position { (position.0 as i32, position.1 as i32) } else { (CW_USEDEFAULT, CW_USEDEFAULT) }; let (width, height) = window_parameters .size .map_or((CW_USEDEFAULT, CW_USEDEFAULT), |d| { let mut rect = RECT { left: 0, top: 0, right: d.0 as i32, bottom: d.1 as i32, }; AdjustWindowRectEx(&mut rect, window_style, FALSE, extended_style); (rect.right - rect.left, rect.bottom - rect.top) }); let (minimum_width, minimum_height) = window_parameters.minimum_size.unwrap_or(( GetSystemMetrics(SM_CXMINTRACK) as u32, GetSystemMetrics(SM_CYMINTRACK) as u32, )); let (maximum_width, maximum_height) = window_parameters.maximum_size.unwrap_or(( GetSystemMetrics(SM_CXMAXTRACK) as u32, GetSystemMetrics(SM_CYMAXTRACK) as u32, )); let window_data = Box::new(WindowData { minimum_width, minimum_height, maximum_width, maximum_height, }); let data = Box::leak(window_data) as *mut WindowData as *mut std::ffi::c_void; let window_handle = CreateWindowExW( extended_style, self.window_class_name.as_ptr(), title.as_ptr(), window_style, x as i32, y as i32, width, height, null_mut(), null_mut(), self.h_instance, data, ); let window_id = WindowId::new(window_handle as *mut std::ffi::c_void); redraw_manager::add_draw_request(window_id); WINDOWS_TO_REDRAW.push(window_id); window_id } } fn quit(&self) { unsafe { PostQuitMessage(0); } } fn set_cursor(&mut self, cursor: Cursor) { unsafe { let cursor =
; SetCursor(super::application_windows::CURRENT_CURSOR); let mut position = POINT { x: 0, y: 0 }; GetCursorPos(&mut position); SetCursorPos(position.x, position.y); CURRENT_CURSOR = cursor; } } fn hide_cursor(&mut self) { unsafe { ShowCursor(FALSE); } } fn show_cursor(&mut self) { unsafe { ShowCursor(TRUE); } } fn raw_window_handle(&self, window_id: WindowId) -> RawWindowHandle { raw_window_handle::RawWindowHandle::Windows(raw_window_handle::windows::WindowsHandle { hwnd: unsafe { window_id.raw() }, hinstance: self.h_instance as *mut std::ffi::c_void, ..raw_window_handle::windows::WindowsHandle::empty() }) } fn start_text_input(&mut self) { todo!() } fn end_text_input(&mut self) { todo!() } fn set_text_input_rectangle( &mut self, _window_id: WindowId, _x: f64, _y: f64, _width: f64, _height: f64, ) { todo!() } } impl Drop for PlatformApplication { fn drop(&mut self) { self.quit(); } } pub struct PlatformEventLoop {} impl PlatformEventLoopTrait for PlatformEventLoop { fn run(&self, callback: Box<dyn FnMut(kapp_platform_common::Event)>) { super::event_loop_windows::run(callback); } }
match cursor { Cursor::Arrow => LoadCursorW(null_mut(), IDC_ARROW), Cursor::IBeam => LoadCursorW(null_mut(), IDC_IBEAM), Cursor::PointingHand => LoadCursorW(null_mut(), IDC_ARROW), Cursor::OpenHand => LoadCursorW(null_mut(), IDC_HAND), Cursor::ClosedHand => LoadCursorW(null_mut(), IDC_HAND), }
if_condition
[ { "content": "fn get_window_data(hwnd: HWND) -> Option<*mut WindowData> {\n\n let data = unsafe { GetWindowLongPtrW(hwnd, GWLP_USERDATA) as *mut WindowData };\n\n if data == std::ptr::null_mut() {\n\n None\n\n } else {\n\n Some(data)\n\n }\n\n}\n", "file_path": "kapp_platforms/src/...
Rust
fix41/src/standard_message_header.rs
nappa85/serde_fix
1f11fc5484e6f7fd516c430a61241fb7070e7d4c
use serde::{Deserialize, Serialize}; #[derive(Serialize, Deserialize, Clone, Debug, Default, PartialEq)] pub struct StandardMessageHeader<const T: char> { #[serde(rename = "8")] #[serde(default)] pub begin_string: fix_common::FixVersion<1>, #[serde(deserialize_with = "fix_common::workarounds::from_str")] #[serde(rename = "9")] pub body_length: u32, #[serde(rename = "35")] #[serde(default)] pub msg_type: MsgType<T>, #[serde(rename = "49")] pub sender_comp_id: String, #[serde(rename = "56")] pub target_comp_id: String, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "115")] pub on_behalf_of_comp_id: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "128")] pub deliver_to_comp_id: Option<String>, #[serde(rename = "90")] #[serde(skip_serializing_if = "Option::is_none")] #[serde(alias = "91")] pub secure_data: Option<fix_common::EncodedText<91>>, #[serde(deserialize_with = "fix_common::workarounds::from_str")] #[serde(rename = "34")] pub msg_seq_num: u32, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "50")] pub sender_sub_id: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "142")] pub sender_location_id: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "57")] pub target_sub_id: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "143")] pub target_location_id: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "116")] pub on_behalf_of_sub_id: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "144")] pub on_behalf_of_location_id: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "129")] pub deliver_to_sub_id: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "145")] pub deliver_to_location_id: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "43")] pub poss_dup_flag: Option<PossDupFlag>, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "97")] pub poss_resend: Option<PossResend>, #[serde(rename = "52")] pub sending_time: fix_common::UTCTimeOnly, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "122")] pub orig_sending_time: Option<fix_common::UTCTimeOnly>, } #[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)] pub enum MsgType<const T: char> { #[serde(rename = "0")] Heartbeat, #[serde(rename = "1")] TestRequest, #[serde(rename = "2")] ResendRequest, #[serde(rename = "3")] Reject, #[serde(rename = "4")] SequenceReset, #[serde(rename = "5")] Logout, #[serde(rename = "6")] IndicationOfInterest, #[serde(rename = "7")] Advertisement, #[serde(rename = "8")] ExecutionReport, #[serde(rename = "9")] OrderCancelReject, #[serde(rename = "A")] Logon, #[serde(rename = "B")] News, #[serde(rename = "C")] Email, #[serde(rename = "D")] NewOrderSingle, #[serde(rename = "E")] NewOrderList, #[serde(rename = "F")] OrderCancelRequest, #[serde(rename = "G")] OrderCancelReplaceRequest, #[serde(rename = "H")] OrderStatusRequest, #[serde(rename = "J")] Allocation, #[serde(rename = "K")] ListCancelRequest, #[serde(rename = "L")] ListExecute, #[serde(rename = "M")] ListStatusRequest, #[serde(rename = "N")] ListStatus, #[serde(rename = "P")] AllocationAck, #[serde(rename = "Q")] DonTKnowTrade, #[serde(rename = "R")] QuoteRequest, #[serde(rename = "S")] Quote, #[serde(rename = "T")] SettlementInstructions, } impl<const T: char> Default for MsgType<T> { fn default() -> Self { match T { '0' => MsgType::Heartbeat, '1' => MsgType::TestRequest, '2' => MsgType::ResendRequest, '3' => MsgType::Reject, '4' => MsgType::SequenceReset, '5' => MsgType::Logout, '6' => MsgType::IndicationOfInterest, '7' => MsgType::Advertisement, '8' => MsgType::ExecutionReport, '9' => MsgType::OrderCancelReject, 'A' => MsgType::Logon, 'B' => MsgType::News, 'C' => MsgType::Email, 'D' => MsgType::NewOrderSingle, 'E' => MsgType::NewOrderList, 'F' => MsgType::OrderCancelRequest, 'G' => MsgType::OrderCancelReplaceRequest, 'H' => MsgType::OrderStatusRequest, 'J' => MsgType::Allocation, 'K' => MsgType::ListCancelRequest, 'L' => MsgType::ListExecute, 'M' => MsgType::ListStatusRequest, 'N' => MsgType::ListStatus, 'P' => MsgType::AllocationAck, 'Q' => MsgType::DonTKnowTrade, 'R' => MsgType::QuoteRequest, 'S' => MsgType::Quote, 'T' => MsgType::SettlementInstructions, _ => unimplemented!(), } } } #[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)] pub enum PossDupFlag { #[serde(rename = "Y")] PossibleDuplicate, #[serde(rename = "N")] OriginalTransmission, } impl Default for PossDupFlag { fn default() -> Self { PossDupFlag::PossibleDuplicate } } #[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)] pub enum PossResend { #[serde(rename = "Y")] PossibleResend, #[serde(rename = "N")] OriginalTransmission, } impl Default for PossResend { fn default() -> Self { PossResend::PossibleResend } }
use serde::{Deserialize, Serialize}; #[derive(Serialize, Deserialize, Clone, Debug, Default, PartialEq)] pub struct StandardMessageHeader<const T: char> { #[serde(rename = "8")] #[serde(default)] pub begin_string: fix_common::FixVersion<1>, #[serde(deserialize_with = "fix_common::workarounds::from_str")] #[serde(rename = "9")] pub body_length: u32, #[serde(rename = "35")] #[serde(default)] pub msg_type: MsgType<T>, #[serde(rename = "49")] pub sender_comp_id: String, #[serde(rename = "56")] pub target_comp_id: String, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "115")] pub on_behalf_of_comp_id: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "128")] pub deliver_to_comp_id: Option<String>, #[serde(rename = "90")] #[serde(skip_serializing_if = "Option::is_none")] #[serde(alias = "91")] pub secure_data: Option<fix_common::EncodedText<91>>, #[serde(deserialize_with = "fix_common::workarounds::from_str")] #[serde(rename = "34")] pub msg_seq_num: u32, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "50")] pub sender_sub_id: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "142")] pub sender_location_id: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "57")] pub target_sub_id: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "143")] pub target_location_id: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "116")] pub on_behalf_of_sub_id: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "144")] pub on_behalf_of_location_id: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "129")] pub deliver_to_sub_id: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "145")] pub deliver_to_location_id: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "43")] pub poss_dup_flag: Option<PossDupFlag>, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "97")] pub poss_resend: Option<PossResend>, #[serde(rename = "52")] pub sending_time: fix_common::UTCTimeOnly, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "122")] pub orig_sending_time: Option<fix_common::UTCTimeOnly>, } #[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)] pub enum MsgType<const T: char> { #[serde(rename = "0")] Heartbeat, #[serde(rename = "1")] TestRequest, #[serde(rename = "2")] ResendRequest, #[serde(rename = "3")] Reject, #[serde(rename = "4")] SequenceReset, #[serde(rename = "5")] Logout, #[serde(rename = "6")] IndicationOfInterest, #[serde(rename = "7")] Advertisement, #[serde(rename = "8")] ExecutionReport, #[serde(rename = "9")] OrderCancelReject, #[serde(rename = "A")] Logon, #[serde(rename = "B")] News, #[serde(rename = "C")] Email, #[serde(rename = "D")] NewOrderSingle, #[serde(rename = "E")] NewOrderList, #[serde(rename = "F")] OrderCancelRequest, #[serde(rename = "G")] OrderCancelReplaceRequest, #[serde(rename = "H")] OrderStatusRequest, #[serde(rename = "J")] Allocation, #[serde(rename = "K")] ListCancelRequest, #[serde(rename = "L")] ListExecute, #[serde(rename = "M")] ListStatusRequest, #[serde(rename = "N")] ListStatus, #[serde(rename = "P")] AllocationAck, #[serde(rename = "Q")] DonTKnowTrade, #[serde(rename = "R")] QuoteRequest, #[serde(rename = "S")] Quote, #[serde(rename = "T")] SettlementInstructions, } impl<const T: char> Default for MsgType<T> { fn default() -> Self { match T { '0' => MsgType::Heartbeat, '1' => MsgType::TestRequest, '2' => MsgType::ResendRequest, '3' => MsgType::Reject, '4' => MsgType::SequenceReset, '5' => MsgType::Logout, '6' => MsgType::IndicationOfInterest, '7' => MsgType::Advertisement, '
} #[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)] pub enum PossDupFlag { #[serde(rename = "Y")] PossibleDuplicate, #[serde(rename = "N")] OriginalTransmission, } impl Default for PossDupFlag { fn default() -> Self { PossDupFlag::PossibleDuplicate } } #[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)] pub enum PossResend { #[serde(rename = "Y")] PossibleResend, #[serde(rename = "N")] OriginalTransmission, } impl Default for PossResend { fn default() -> Self { PossResend::PossibleResend } }
8' => MsgType::ExecutionReport, '9' => MsgType::OrderCancelReject, 'A' => MsgType::Logon, 'B' => MsgType::News, 'C' => MsgType::Email, 'D' => MsgType::NewOrderSingle, 'E' => MsgType::NewOrderList, 'F' => MsgType::OrderCancelRequest, 'G' => MsgType::OrderCancelReplaceRequest, 'H' => MsgType::OrderStatusRequest, 'J' => MsgType::Allocation, 'K' => MsgType::ListCancelRequest, 'L' => MsgType::ListExecute, 'M' => MsgType::ListStatusRequest, 'N' => MsgType::ListStatus, 'P' => MsgType::AllocationAck, 'Q' => MsgType::DonTKnowTrade, 'R' => MsgType::QuoteRequest, 'S' => MsgType::Quote, 'T' => MsgType::SettlementInstructions, _ => unimplemented!(), } }
function_block-function_prefix_line
[ { "content": "/// Serializes a value into a FiX `String` buffer.\n\n///\n\n/// ```\n\n/// let meal = &[\n\n/// (\"bread\", \"baguette\"),\n\n/// (\"cheese\", \"comté\"),\n\n/// (\"meat\", \"ham\"),\n\n/// (\"fat\", \"butter\"),\n\n/// ];\n\n///\n\n/// assert_eq!(\n\n/// serde_fix::to_string(...
Rust
alap_gen/src/attributes.rs
pwil3058/rs_lalr1_parsers
1cd7a8a75450f2848cbcf8048c0e92b167c3e4bb
use lexan; #[cfg(not(feature = "bootstrap"))] use crate::alap_gen::AATerminal; #[cfg(feature = "bootstrap")] use crate::bootstrap::AATerminal; use crate::production::ProductionTail; use crate::symbol::non_terminal::NonTerminal; use crate::symbol::tag::TagOrToken; use crate::symbol::{Associativity, Symbol}; use std::collections::BTreeSet; #[derive(Debug, Clone)] pub enum AttributeData { Token(lexan::Token<AATerminal>), SyntaxError(lexan::Token<AATerminal>, BTreeSet<AATerminal>), LexicalError(lexan::Error<AATerminal>, BTreeSet<AATerminal>), Number(u32), Symbol(Symbol), SymbolList(Vec<Symbol>), LeftHandSide(NonTerminal), TagOrToken(TagOrToken), TagOrTokenList(Vec<TagOrToken>), ProductionTail(ProductionTail), ProductionTailList(Vec<ProductionTail>), Action(String), Predicate(String), AssociativityAndPrecedence(Associativity, u16), Default, } impl Default for AttributeData { fn default() -> Self { AttributeData::Default } } impl AttributeData { pub fn matched_text(&self) -> &String { match self { AttributeData::Token(token) => token.lexeme(), AttributeData::SyntaxError(token, _) => token.lexeme(), AttributeData::LexicalError(error, _) => match error { lexan::Error::UnexpectedText(text, _) => text, lexan::Error::AmbiguousMatches(_, text, _) => text, lexan::Error::AdvancedWhenEmpty(_) => panic!("Wrong attribute variant."), }, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn location(&self) -> &lexan::Location { match self { AttributeData::Token(token) => token.location(), AttributeData::SyntaxError(token, _) => token.location(), AttributeData::LexicalError(error, _) => match error { lexan::Error::UnexpectedText(_, location) => location, lexan::Error::AmbiguousMatches(_, _, location) => location, lexan::Error::AdvancedWhenEmpty(location) => location, }, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn text_and_location(&self) -> (&String, &lexan::Location) { match self { AttributeData::Token(token) => (token.lexeme(), token.location()), AttributeData::SyntaxError(token, _) => (token.lexeme(), token.location()), AttributeData::LexicalError(error, _) => match error { lexan::Error::UnexpectedText(text, location) => (text, location), lexan::Error::AmbiguousMatches(_, text, location) => (text, location), lexan::Error::AdvancedWhenEmpty(_) => panic!("Wrong attribute variant."), }, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn number(&self) -> u32 { match self { AttributeData::Number(number) => *number, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn symbol(&self) -> &Symbol { match self { AttributeData::Symbol(symbol) => symbol, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn symbol_list(&self) -> &Vec<Symbol> { match self { AttributeData::SymbolList(list) => list, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn symbol_list_mut(&mut self) -> &mut Vec<Symbol> { match self { AttributeData::SymbolList(list) => list, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn left_hand_side(&self) -> &NonTerminal { match self { AttributeData::LeftHandSide(lhs) => lhs, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn tag_or_token(&self) -> &TagOrToken { match self { AttributeData::TagOrToken(tag_or_token) => tag_or_token, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn tag_or_token_list(&self) -> &Vec<TagOrToken> { match self { AttributeData::TagOrTokenList(list) => list, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn tag_or_token_list_mut(&mut self) -> &mut Vec<TagOrToken> { match self { AttributeData::TagOrTokenList(list) => list, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn production_tail(&self) -> &ProductionTail { match self { AttributeData::ProductionTail(production_tail) => production_tail, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn production_tail_list(&self) -> &Vec<ProductionTail> { match self { AttributeData::ProductionTailList(list) => list, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn production_tail_list_mut(&mut self) -> &mut Vec<ProductionTail> { match self { AttributeData::ProductionTailList(list) => list, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn action(&self) -> &str { match self { AttributeData::Action(action) => action, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn predicate(&self) -> &str { match self { AttributeData::Predicate(predicate) => predicate, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn associativity_and_precedence(&self) -> (Associativity, u16) { match self { AttributeData::AssociativityAndPrecedence(associativity, precedence) => { (*associativity, *precedence) } _ => panic!("{:?}: Wrong attribute variant.", self), } } } impl From<lexan::Token<AATerminal>> for AttributeData { fn from(token: lexan::Token<AATerminal>) -> Self { AttributeData::Token(token) } } impl From<lalr1_plus::Error<AATerminal>> for AttributeData { fn from(error: lalr1_plus::Error<AATerminal>) -> Self { match error { lalr1_plus::Error::LexicalError(error, expected) => { AttributeData::LexicalError(error, expected) } lalr1_plus::Error::SyntaxError(token, expected) => { AttributeData::SyntaxError(token, expected) } } } }
use lexan; #[cfg(not(feature = "bootstrap"))] use crate::alap_gen::AATerminal; #[cfg(feature = "bootstrap")] use crate::bootstrap::AATerminal; use crate::production::ProductionTail; use crate::symbol::non_terminal::NonTerminal; use crate::symbol::tag::TagOrToken; use crate::symbol::{Associativity, Symbol}; use std::collections::BTreeSet; #[derive(Debug, Clon
h self { AttributeData::SymbolList(list) => list, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn symbol_list_mut(&mut self) -> &mut Vec<Symbol> { match self { AttributeData::SymbolList(list) => list, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn left_hand_side(&self) -> &NonTerminal { match self { AttributeData::LeftHandSide(lhs) => lhs, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn tag_or_token(&self) -> &TagOrToken { match self { AttributeData::TagOrToken(tag_or_token) => tag_or_token, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn tag_or_token_list(&self) -> &Vec<TagOrToken> { match self { AttributeData::TagOrTokenList(list) => list, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn tag_or_token_list_mut(&mut self) -> &mut Vec<TagOrToken> { match self { AttributeData::TagOrTokenList(list) => list, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn production_tail(&self) -> &ProductionTail { match self { AttributeData::ProductionTail(production_tail) => production_tail, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn production_tail_list(&self) -> &Vec<ProductionTail> { match self { AttributeData::ProductionTailList(list) => list, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn production_tail_list_mut(&mut self) -> &mut Vec<ProductionTail> { match self { AttributeData::ProductionTailList(list) => list, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn action(&self) -> &str { match self { AttributeData::Action(action) => action, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn predicate(&self) -> &str { match self { AttributeData::Predicate(predicate) => predicate, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn associativity_and_precedence(&self) -> (Associativity, u16) { match self { AttributeData::AssociativityAndPrecedence(associativity, precedence) => { (*associativity, *precedence) } _ => panic!("{:?}: Wrong attribute variant.", self), } } } impl From<lexan::Token<AATerminal>> for AttributeData { fn from(token: lexan::Token<AATerminal>) -> Self { AttributeData::Token(token) } } impl From<lalr1_plus::Error<AATerminal>> for AttributeData { fn from(error: lalr1_plus::Error<AATerminal>) -> Self { match error { lalr1_plus::Error::LexicalError(error, expected) => { AttributeData::LexicalError(error, expected) } lalr1_plus::Error::SyntaxError(token, expected) => { AttributeData::SyntaxError(token, expected) } } } }
e)] pub enum AttributeData { Token(lexan::Token<AATerminal>), SyntaxError(lexan::Token<AATerminal>, BTreeSet<AATerminal>), LexicalError(lexan::Error<AATerminal>, BTreeSet<AATerminal>), Number(u32), Symbol(Symbol), SymbolList(Vec<Symbol>), LeftHandSide(NonTerminal), TagOrToken(TagOrToken), TagOrTokenList(Vec<TagOrToken>), ProductionTail(ProductionTail), ProductionTailList(Vec<ProductionTail>), Action(String), Predicate(String), AssociativityAndPrecedence(Associativity, u16), Default, } impl Default for AttributeData { fn default() -> Self { AttributeData::Default } } impl AttributeData { pub fn matched_text(&self) -> &String { match self { AttributeData::Token(token) => token.lexeme(), AttributeData::SyntaxError(token, _) => token.lexeme(), AttributeData::LexicalError(error, _) => match error { lexan::Error::UnexpectedText(text, _) => text, lexan::Error::AmbiguousMatches(_, text, _) => text, lexan::Error::AdvancedWhenEmpty(_) => panic!("Wrong attribute variant."), }, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn location(&self) -> &lexan::Location { match self { AttributeData::Token(token) => token.location(), AttributeData::SyntaxError(token, _) => token.location(), AttributeData::LexicalError(error, _) => match error { lexan::Error::UnexpectedText(_, location) => location, lexan::Error::AmbiguousMatches(_, _, location) => location, lexan::Error::AdvancedWhenEmpty(location) => location, }, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn text_and_location(&self) -> (&String, &lexan::Location) { match self { AttributeData::Token(token) => (token.lexeme(), token.location()), AttributeData::SyntaxError(token, _) => (token.lexeme(), token.location()), AttributeData::LexicalError(error, _) => match error { lexan::Error::UnexpectedText(text, location) => (text, location), lexan::Error::AmbiguousMatches(_, text, location) => (text, location), lexan::Error::AdvancedWhenEmpty(_) => panic!("Wrong attribute variant."), }, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn number(&self) -> u32 { match self { AttributeData::Number(number) => *number, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn symbol(&self) -> &Symbol { match self { AttributeData::Symbol(symbol) => symbol, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn symbol_list(&self) -> &Vec<Symbol> { matc
random
[ { "content": "fn rhs_associated_precedence(symbols: &[Symbol]) -> Option<(Associativity, u16)> {\n\n for symbol in symbols.iter() {\n\n match symbol {\n\n Symbol::Terminal(token) => {\n\n return Some(token.associativity_and_precedence());\n\n }\n\n _ => ...
Rust
src/main.rs
romanz/trezor-sq
77b426a0ac54b348556fb0ed6baa2d493e8e693c
use std::ffi::OsString; use std::io; use std::path::Path; extern crate clap; extern crate fern; extern crate sequoia_openpgp as openpgp; extern crate subprocess; extern crate trezor; #[macro_use] extern crate log; use openpgp::armor; use openpgp::constants::{HashAlgorithm, PublicKeyAlgorithm}; use openpgp::crypto::{self, mpis}; use openpgp::packet::Key; use openpgp::parse::Parse; use openpgp::serialize::stream; use openpgp::TPK; fn handle_interaction<T, R: trezor::TrezorMessage>( resp: trezor::TrezorResponse<T, R>, ) -> Result<T, trezor::Error> { match resp { trezor::TrezorResponse::Ok(res) => Ok(res), trezor::TrezorResponse::Failure(_) => resp.ok(), trezor::TrezorResponse::ButtonRequest(req) => handle_interaction(req.ack()?), trezor::TrezorResponse::PinMatrixRequest(_req) => panic!("TREZOR is locked"), trezor::TrezorResponse::PassphraseRequest(_req) => panic!("TREZOR has passphrase"), trezor::TrezorResponse::PassphraseStateRequest(_req) => panic!("TREZOR has passphrase"), } } struct ExternalSigner { sigkey: Key, userid: String, } impl ExternalSigner { pub fn from_file(path: &Path, user_id: &str) -> openpgp::Result<Self> { let tpk = TPK::from_file(path)?; if tpk .userids() .find(|u| u.userid().value() == user_id.as_bytes()) .is_none() { let msg = format!("{:?} has no user ID {}", path, user_id); return Err(openpgp::Error::UnsupportedTPK(msg).into()); } let (_sig, _rev, key) = tpk .keys_valid() .signing_capable() .next() .expect("no valid signing key"); let userid_str = String::from_utf8( tpk.userids() .next() .expect("no user IDs") .userid() .value() .to_vec(), )?; Ok(ExternalSigner { sigkey: key.clone(), userid: userid_str, }) } } impl crypto::Signer for ExternalSigner { fn public(&self) -> &Key { &self.sigkey } fn sign( &mut self, hash_algo: HashAlgorithm, digest: &[u8], ) -> openpgp::Result<mpis::Signature> { match hash_algo { HashAlgorithm::SHA256 | HashAlgorithm::SHA512 => (), _ => return Err(openpgp::Error::UnsupportedHashAlgorithm(hash_algo).into()), } let mut digest = digest.to_vec(); assert!(digest.len() >= 32); let curve = match self.sigkey.pk_algo() { PublicKeyAlgorithm::EdDSA => "ed25519", PublicKeyAlgorithm::ECDSA => { digest.split_off(32); "nist256p1" } _ => { return Err( openpgp::Error::UnsupportedPublicKeyAlgorithm(self.sigkey.pk_algo()).into(), ) } }; let mut identity = trezor::protos::IdentityType::new(); identity.set_host(self.userid.to_owned()); identity.set_proto("gpg".to_owned()); let mut trezor = trezor::unique(false)?; trezor.init_device()?; let sig = handle_interaction(trezor.sign_identity(identity, digest, curve.to_owned())?)?; if sig.len() != 65 { return Err(openpgp::Error::BadSignature(format!( "invalid signature size: {}", sig.len() )) .into()); } Ok(mpis::Signature::ECDSA { r: mpis::MPI::new(&sig[1..33]), s: mpis::MPI::new(&sig[33..]), }) } } fn main() { let matches = clap::App::new("OpenPGP git wrapper for TREZOR") .arg( clap::Arg::with_name("userid") .short("u") .value_name("USERID") .help("User ID for signature") .takes_value(true), ) .arg( clap::Arg::with_name("detached") .short("b") .help("Make a detached signature"), ) .arg( clap::Arg::with_name("sign") .short("s") .help("Sign message from stdin"), ) .arg( clap::Arg::with_name("verify") .long("verify") .takes_value(true) .help("Verify signature"), ) .arg( clap::Arg::with_name("armor") .short("a") .help("Output armored signature"), ) .arg( clap::Arg::with_name("status_fd") .long("status-fd") .takes_value(true) .help("File descriptor for status messages"), ) .arg( clap::Arg::with_name("keyid_format") .long("keyid-format") .default_value("long") .takes_value(true) .help("TODO"), ) .arg(clap::Arg::with_name("file").index(1).required(false)) .get_matches(); let home_dir: OsString = std::env::var_os("GNUPGHOME").expect("GNUPGHOME is not set"); let pubkey_path = std::path::Path::new(&home_dir).join("trezor.asc"); trace!("pubkey_path = {:?}", pubkey_path); if matches.is_present("sign") { let userid = matches.value_of("userid").expect("missing USERID"); trace!("userid = {:?}", userid); assert!(matches.is_present("detached")); assert!(matches.is_present("armor")); assert_eq!(matches.value_of("status_fd").unwrap_or("2"), "2"); let mut signer = ExternalSigner::from_file(&pubkey_path, userid).expect("no ExternalSigner signer"); let signers: Vec<&mut dyn crypto::Signer> = vec![&mut signer]; let sink = armor::Writer::new(io::stdout(), armor::Kind::Signature, &[]) .expect("Failed to create an armored writer."); let mut signer = stream::Signer::detached(stream::Message::new(sink), signers, None) .expect("Failed to create detached signer"); io::copy(&mut io::stdin(), &mut signer).expect("Failed to sign data"); signer.finalize().expect("Failed to write data"); eprintln!("\n[GNUPG:] SIG_CREATED "); return; } if matches.is_present("verify") { assert_eq!(matches.value_of("status_fd").unwrap_or("1"), "1"); assert_eq!(matches.value_of("file").expect("missing input file"), "-"); let sigfile = matches.value_of("verify").expect("missing signature"); let result = subprocess::Exec::cmd("/home/roman/Code/sequoia/target/debug/sqv") .arg("--keyring") .arg(&pubkey_path) .arg(sigfile) .arg("/dev/stdin") .capture() .expect("Popen failed"); if result.success() { println!("\n[GNUPG:] GOODSIG "); eprint!("✓ "); std::process::exit(0); } else { println!("\n[GNUPG:] BADSIG "); eprint!("✗ "); std::process::exit(1); } } panic!("unsupported command: {:?}", matches); }
use std::ffi::OsString; use std::io; use std::path::Path; extern crate clap; extern crate fern; extern crate sequoia_openpgp as openpgp; extern crate subprocess; extern crate trezor; #[macro_use] extern crate log; use openpgp::armor; use openpgp::constants::{HashAlgorithm, PublicKeyAlgorithm}; use openpgp::crypto::{self, mpis}; use openpgp::packet::Key; use openpgp::parse::Parse; use openpgp::serialize::stream; use openpgp::TPK; fn handle_interaction<T, R: trezor::TrezorMessage>( resp: trezor::TrezorResponse<T, R>, ) -> Result<T, trezor::Error> { match resp { trezor::TrezorResponse::Ok(res) => Ok(res), trezor::TrezorResponse::Failure(_) => resp.ok(), trezor::TrezorResponse::ButtonRequest(req) => handle_interaction(req.ack()?), trezor::TrezorResponse::PinMatrixRequest(_req) => panic!("TREZOR is locked"), trezor::TrezorResponse::PassphraseRequest(_req) => panic!("TREZOR has passphrase"), trezor::TrezorResponse::PassphraseStateRequest(_req) => panic!("TREZOR has passphrase"), } } struct ExternalSigner { sigkey: Key, userid: String, } impl ExternalSigner { pub fn from_file(path: &Path, user_id: &str) -> openpgp::Result<Self> { let tpk = TPK::from_file(path)?; if tpk .userids() .find(|u| u.userid().value() == user_id.as_bytes()) .is_none() { let msg = format!("{:?} has no user ID {}", path, user_id); return Err(openpgp::Error::UnsupportedTPK(msg).into()); } let (_sig, _rev, key) = tpk .keys_valid() .signing_capable() .next() .expect("no valid signing key"); let userid_str = String::from_utf8( tpk.userids() .next() .expect("no user IDs") .userid() .value() .to_vec(), )?; Ok(ExternalSigner { sigkey: key.clone(), userid: userid_str, }) } } impl crypto::Signer for ExternalSigner { fn public(&self) -> &Key { &self.sigkey }
} fn main() { let matches = clap::App::new("OpenPGP git wrapper for TREZOR") .arg( clap::Arg::with_name("userid") .short("u") .value_name("USERID") .help("User ID for signature") .takes_value(true), ) .arg( clap::Arg::with_name("detached") .short("b") .help("Make a detached signature"), ) .arg( clap::Arg::with_name("sign") .short("s") .help("Sign message from stdin"), ) .arg( clap::Arg::with_name("verify") .long("verify") .takes_value(true) .help("Verify signature"), ) .arg( clap::Arg::with_name("armor") .short("a") .help("Output armored signature"), ) .arg( clap::Arg::with_name("status_fd") .long("status-fd") .takes_value(true) .help("File descriptor for status messages"), ) .arg( clap::Arg::with_name("keyid_format") .long("keyid-format") .default_value("long") .takes_value(true) .help("TODO"), ) .arg(clap::Arg::with_name("file").index(1).required(false)) .get_matches(); let home_dir: OsString = std::env::var_os("GNUPGHOME").expect("GNUPGHOME is not set"); let pubkey_path = std::path::Path::new(&home_dir).join("trezor.asc"); trace!("pubkey_path = {:?}", pubkey_path); if matches.is_present("sign") { let userid = matches.value_of("userid").expect("missing USERID"); trace!("userid = {:?}", userid); assert!(matches.is_present("detached")); assert!(matches.is_present("armor")); assert_eq!(matches.value_of("status_fd").unwrap_or("2"), "2"); let mut signer = ExternalSigner::from_file(&pubkey_path, userid).expect("no ExternalSigner signer"); let signers: Vec<&mut dyn crypto::Signer> = vec![&mut signer]; let sink = armor::Writer::new(io::stdout(), armor::Kind::Signature, &[]) .expect("Failed to create an armored writer."); let mut signer = stream::Signer::detached(stream::Message::new(sink), signers, None) .expect("Failed to create detached signer"); io::copy(&mut io::stdin(), &mut signer).expect("Failed to sign data"); signer.finalize().expect("Failed to write data"); eprintln!("\n[GNUPG:] SIG_CREATED "); return; } if matches.is_present("verify") { assert_eq!(matches.value_of("status_fd").unwrap_or("1"), "1"); assert_eq!(matches.value_of("file").expect("missing input file"), "-"); let sigfile = matches.value_of("verify").expect("missing signature"); let result = subprocess::Exec::cmd("/home/roman/Code/sequoia/target/debug/sqv") .arg("--keyring") .arg(&pubkey_path) .arg(sigfile) .arg("/dev/stdin") .capture() .expect("Popen failed"); if result.success() { println!("\n[GNUPG:] GOODSIG "); eprint!("✓ "); std::process::exit(0); } else { println!("\n[GNUPG:] BADSIG "); eprint!("✗ "); std::process::exit(1); } } panic!("unsupported command: {:?}", matches); }
fn sign( &mut self, hash_algo: HashAlgorithm, digest: &[u8], ) -> openpgp::Result<mpis::Signature> { match hash_algo { HashAlgorithm::SHA256 | HashAlgorithm::SHA512 => (), _ => return Err(openpgp::Error::UnsupportedHashAlgorithm(hash_algo).into()), } let mut digest = digest.to_vec(); assert!(digest.len() >= 32); let curve = match self.sigkey.pk_algo() { PublicKeyAlgorithm::EdDSA => "ed25519", PublicKeyAlgorithm::ECDSA => { digest.split_off(32); "nist256p1" } _ => { return Err( openpgp::Error::UnsupportedPublicKeyAlgorithm(self.sigkey.pk_algo()).into(), ) } }; let mut identity = trezor::protos::IdentityType::new(); identity.set_host(self.userid.to_owned()); identity.set_proto("gpg".to_owned()); let mut trezor = trezor::unique(false)?; trezor.init_device()?; let sig = handle_interaction(trezor.sign_identity(identity, digest, curve.to_owned())?)?; if sig.len() != 65 { return Err(openpgp::Error::BadSignature(format!( "invalid signature size: {}", sig.len() )) .into()); } Ok(mpis::Signature::ECDSA { r: mpis::MPI::new(&sig[1..33]), s: mpis::MPI::new(&sig[33..]), }) }
function_block-full_function
[]
Rust
src/crypto.rs
SerhoLiu/eakio
aa7366878294a2525f8c0d32dc0079c1e8c605ee
use std::fmt; use std::io; use std::result; use ring::{aead, digest, hkdf, hmac}; use ring::rand::{SecureRandom, SystemRandom}; static CIPHER: &'static aead::Algorithm = &aead::AES_256_GCM; static DIGEST: &'static digest::Algorithm = &digest::SHA256; pub type Result<T> = result::Result<T, Error>; #[derive(Clone, Copy, Debug, PartialEq)] pub enum Error { GenSalt, SaltLenNotMatch(usize), OpenKey, SealKey, SealBufferTooSmall(usize), Open, Seal, } pub struct Salt { len: usize, bytes: [u8; digest::MAX_OUTPUT_LEN], } impl Salt { pub fn new() -> Result<Salt> { let len = Salt::len(); let mut bytes = [0u8; digest::MAX_OUTPUT_LEN]; let rng = SystemRandom::new(); rng.fill(&mut bytes[..len]).map_err(|_| Error::GenSalt)?; Ok(Salt { len, bytes }) } pub fn from_bytes(bytes: &[u8]) -> Result<Salt> { let len = Salt::len(); if bytes.len() != len { return Err(Error::SaltLenNotMatch(len)); } let mut buf = [0u8; digest::MAX_OUTPUT_LEN]; buf[..len].copy_from_slice(bytes); Ok(Salt { len, bytes: buf }) } #[inline] pub fn len() -> usize { hmac::recommended_key_len(DIGEST) } #[inline] pub fn get_bytes(&self) -> &[u8] { &self.bytes[..self.len] } #[inline] fn get_signing_key(&self) -> hmac::SigningKey { hmac::SigningKey::new(DIGEST, &self.bytes[..self.len]) } } const INFO_KEY: &str = "hello kelsi"; #[allow(dead_code)] pub struct Crypto { tag_len: usize, key_len: usize, nonce_len: usize, open_key: aead::OpeningKey, open_nonce: Vec<u8>, seal_key: aead::SealingKey, seal_nonce: Vec<u8>, } impl Crypto { pub fn new(secret: &[u8], salt: &Salt) -> Result<Crypto> { let key_len = CIPHER.key_len(); let mut key = Vec::with_capacity(key_len); unsafe { key.set_len(key_len); } hkdf::extract_and_expand( &salt.get_signing_key(), secret, INFO_KEY.as_bytes(), &mut key, ); let open_key = aead::OpeningKey::new(CIPHER, &key).map_err(|_| Error::OpenKey)?; let seal_key = aead::SealingKey::new(CIPHER, &key).map_err(|_| Error::SealKey)?; let nonce_len = CIPHER.nonce_len(); Ok(Crypto { tag_len: CIPHER.tag_len(), key_len: CIPHER.key_len(), nonce_len: CIPHER.nonce_len(), open_key, open_nonce: vec![0u8; nonce_len], seal_key, seal_nonce: vec![0u8; nonce_len], }) } #[inline] pub fn tag_len() -> usize { CIPHER.tag_len() } pub fn encrypt(&mut self, inout: &mut [u8], in_len: usize) -> Result<usize> { let out_len = in_len + self.tag_len; if inout.len() < out_len { return Err(Error::SealBufferTooSmall(out_len)); } match aead::seal_in_place( &self.seal_key, &self.seal_nonce, &[], &mut inout[..out_len], self.tag_len, ) { Ok(outlen) => debug_assert_eq!(out_len, outlen), Err(_) => return Err(Error::Seal), }; incr_nonce(&mut self.seal_nonce); Ok(out_len) } #[inline] pub fn decrypt(&mut self, inout: &mut [u8]) -> Result<usize> { match aead::open_in_place(&self.open_key, &self.open_nonce, &[], 0, inout) { Ok(buf) => { incr_nonce(&mut self.open_nonce); Ok(buf.len()) } Err(_) => Err(Error::Open), } } } fn incr_nonce(nonce: &mut [u8]) { for byte in nonce.iter_mut() { let (sum, overflow) = (*byte).overflowing_add(1); *byte = sum; if !overflow { break; } } } impl fmt::Display for Error { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { match *self { Error::GenSalt => write!(fmt, "generate salt error"), Error::SaltLenNotMatch(need) => write!(fmt, "salt length not match, need {}", need), Error::OpenKey => write!(fmt, "crypto ring open key error"), Error::SealKey => write!(fmt, "crypto ring seal key error"), Error::SealBufferTooSmall(need) => { write!(fmt, "crypto seal inout buffer too small, need {}", need) } Error::Open => write!(fmt, "crypto decrypt error"), Error::Seal => write!(fmt, "crypto encrypt error"), } } } impl From<Error> for io::Error { fn from(err: Error) -> io::Error { io::Error::new(io::ErrorKind::Other, format!("{}", err)) } } #[cfg(test)] mod test { use super::{Crypto, Error, Salt}; #[test] fn test_incr_nonce() { let mut nonce = [0u8; 4]; for i in 1..1024 { super::incr_nonce(&mut nonce); let x = (nonce[0] as usize) + ((nonce[1] as usize) << 8) + ((nonce[2] as usize) << 16) + ((nonce[3] as usize) << 24); assert_eq!(x, i); } } #[test] fn test_crypto_normal() { let salt = Salt::new().unwrap(); let mut crypto = Crypto::new(&[0u8; 8], &salt).unwrap(); let mut buf = [0u8; 128]; let plain_len: usize = 24; let out_len = crypto.encrypt(&mut buf[..], plain_len).unwrap(); assert_eq!(out_len, plain_len + Crypto::tag_len()); assert!(buf[out_len..].iter().all(|&x| x == 0)); let len = crypto.decrypt(&mut buf[..out_len]).unwrap(); assert_eq!(plain_len, len); assert!(buf[..plain_len].iter().all(|&x| x == 0)); } #[test] fn test_crypto_zerosize() { let salt = Salt::new().unwrap(); let mut crypto = Crypto::new(&[0u8; 8], &salt).unwrap(); let mut buf = [0u8; 128]; let out_len = crypto.encrypt(&mut buf[..], 0).unwrap(); assert_eq!(out_len, 0 + Crypto::tag_len()); let len = crypto.decrypt(&mut buf[..out_len]).unwrap(); assert_eq!(0, len); } #[test] fn test_crypto_multi_buf() { let salt = Salt::new().unwrap(); let mut crypto = Crypto::new(&[0u8; 8], &salt).unwrap(); let mut buf1 = [0u8; 128]; let plain_len1: usize = 24; let mut buf2 = [1u8; 128]; let plain_len2: usize = 37; crypto.encrypt(&mut buf1[..], plain_len1).unwrap(); let out_len2 = crypto.encrypt(&mut buf2[..], plain_len2).unwrap(); let err = crypto.decrypt(&mut buf2[..out_len2]).unwrap_err(); assert_eq!(err, Error::Open); let mut crypto1 = Crypto::new(&[0u8; 8], &salt).unwrap(); let mut buf3 = [0u8; 128]; let plain_len3: usize = 24; let mut buf4 = [2u8; 128]; let plain_len4: usize = 24; let out_len3 = crypto1.encrypt(&mut buf3[..], plain_len3).unwrap(); let out_len4 = crypto1.encrypt(&mut buf4[..], plain_len4).unwrap(); crypto1.decrypt(&mut buf3[..out_len3]).unwrap(); assert!(buf3[..plain_len3].iter().all(|&x| x == 0)); crypto1.decrypt(&mut buf4[..out_len4]).unwrap(); assert!(buf4[..plain_len4].iter().all(|&x| x == 2)); } }
use std::fmt; use std::io; use std::result; use ring::{aead, digest, hkdf, hmac}; use ring::rand::{SecureRandom, SystemRandom}; static CIPHER: &'static aead::Algorithm = &aead::AES_256_GCM; static DIGEST: &'static digest::Algorithm = &digest::SHA256; pub type Result<T> = result::Result<T, Error>; #[derive(Clone, Copy, Debug, PartialEq)] pub enum Error { GenSalt, SaltLenNotMatch(usize), OpenKey, SealKey, SealBufferTooSmall(usize), Open, Seal, } pub struct Salt { len: usize, bytes: [u8; digest::MAX_OUTPUT_LEN], } impl Salt { pub fn new() -> Result<Salt> { let len = Salt::len(); let mut bytes = [0u8; digest::MAX_OUTPUT_LEN]; let rng = SystemRandom::new(); rng.fill(&mut bytes[..len]).map_err(|_| Error::GenSalt)?; Ok(Salt { len, bytes }) } pub fn from_bytes(bytes: &[u8]) -> Result<Salt> { let len = Salt::len(); if bytes.len() != len { return Err(Error::SaltLenNotMatch(len)); } let mut buf = [0u8; digest::MAX_OUTPUT_LEN]; buf[..len].copy_from_slice(bytes); Ok(Salt { len, bytes: buf }) } #[inline] pub fn len() -> usize { hmac::recommended_key_len(DIGEST) } #[inline] pub fn get_bytes(&self) -> &[u8] { &self.bytes[..self.len] } #[inline] fn get_signing_key(&self) -> hmac::SigningKey { hmac::SigningKey::new(DIGEST, &self.bytes[..self.len]) } } const INFO_KEY: &str = "hello kelsi"; #[allow(dead_code)] pub struct Crypto { tag_len: usize, key_len: usize, nonce_len: usize, open_key: aead::OpeningKey, open_nonce: Vec<u8>, seal_key: aead::SealingKey, seal_nonce: Vec<u8>, } impl Crypto { pub fn new(secret: &[u8], salt: &Salt) -> Result<Crypto> { let key_len = CIPHER.key_len(); let mut key = Vec::with_capacity(key_len); unsafe { key.set_len(key_len); } hkdf::extract_and_expand( &salt.get_signing_key(), secret, INFO_KEY.as_bytes(), &mut key, ); let open_key = aead::OpeningKey::new(CIPHER, &key).map_err(|_| Error::OpenKey)?; let seal_key = aead::SealingKey::new(CIPHER, &key).map_err(|_| Error::SealKey)?; let nonce_len = CIPHER.nonce_len(); Ok(Crypto { tag_len: CIPHER.tag_len(), key_len: CIPHER.key_len(), nonce_len: CIPHER.nonce_len(), open_key, open_nonce: vec![0u8; nonce_len], seal_key, seal_nonce: vec![0u8; nonce_len], }) } #[inline] pub fn tag_len() -> usize { CIPHER.tag_len() } pub fn encrypt(&mut self, inout: &mut [u8], in_len: usize) -> Result<usize> { let out_len = in_len + self.tag_len; if inout.len() < out_len { return Err(Error::SealBufferTooSmall(out_len)); } match aead::seal_in_place( &self.seal_key, &self.seal_nonce, &[], &mut inout[..out_len], self.tag_len, ) { Ok(outlen) => debug_assert_eq!(out_len, outlen), Err(_) => return Err(Error::Seal), }; incr_nonce(&mut self.seal_nonce); Ok(out_len) } #[inline] pub fn decrypt(&mut self, inout: &mut [u8]) -> Result<usize> { match aead::open_in_place(&self.open_key, &self.open_nonce, &[], 0, inout) { Ok(buf) => { incr_nonce(&mut self.open_nonce); Ok(buf.len()) } Err(_) => Err(Error::Open), } } } fn incr_nonce(nonce: &mut [u8]) { for byte in nonce.iter_mut() { let (sum, overflow) = (*byte).overflowing_add(1); *byte = sum; if !overflow { break; } } } impl fmt::Display for Error { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { match *self { Error::GenSalt => write!(fmt, "generate salt error"), Error::SaltLenNotMatch(need) => write!(fmt, "salt length not match, need {}", need), Error::OpenKey => write!(fmt, "crypto ring open key error"), Error::SealKey => write!(fmt, "crypto ring seal key error"), Error::SealBufferTooSmall(need) => { write!(fmt, "crypto seal inout buffer too small, need {}", need) } Error::Open => write!(fmt, "crypto decrypt error"), Error::Seal => write!(fmt, "crypto encrypt error"), } } } impl From<Error> for io::Error { fn from(err: Error) -> io::Error { io::Error::new(io::ErrorKind::Other, format!("{}", err)) } } #[cfg(test)] mod test { use super::{Crypto, Error, Salt}; #[test] fn test_incr_nonce() { let mut nonce = [0u8; 4]; for i in 1..1024 { super::incr_nonce(&mut nonce); let x = (nonce[0] as usize) + ((nonce[1] as usize) << 8) + ((nonce[2] as usize) << 16) + ((nonce[3] as usize) << 24); assert_eq!(x, i); } } #[test] fn test_crypto_normal() { let salt = Salt::new().unwrap(); let mut crypto = Crypto::new(&[0u8; 8], &salt).unwrap(); let mut buf = [0u8; 128]; let plain_len: usize = 24; let out_len = crypto.encrypt(&mut buf[..], plain_len).unwrap(); assert_eq!(out_len, plain_len + Crypto::tag_len()); assert!(buf[out_len..].iter().all(|&x| x == 0)); let len = crypto.decrypt(&mut buf[..out_len]).unwrap(); assert_eq!(plain_len, len); assert!(buf[..plain_len].iter().all(|&x| x == 0)); } #[test] fn test_crypto_zerosize() { let salt = Salt::new().unwrap(); let mut crypto = Crypto::new(&[0u8; 8], &salt).unwrap(); let mut buf = [0u8; 128]; let out_len = crypto.encrypt(&mut buf[..], 0).unwrap(); assert_eq!(out_len, 0 + Crypto::tag_len()); let len = crypto.decrypt(&mut buf[..out_len]).unwrap(); assert_eq!(0, len); } #[test] fn test_crypto_multi_buf() { let salt = Salt::new().unwrap(); let mut crypto = Crypto::new(&[0u8; 8], &salt).unwrap(); let mut buf1 = [0u8; 128]; let plain_len1: usize = 24; let mut buf2 = [1u8; 12
}
8]; let plain_len2: usize = 37; crypto.encrypt(&mut buf1[..], plain_len1).unwrap(); let out_len2 = crypto.encrypt(&mut buf2[..], plain_len2).unwrap(); let err = crypto.decrypt(&mut buf2[..out_len2]).unwrap_err(); assert_eq!(err, Error::Open); let mut crypto1 = Crypto::new(&[0u8; 8], &salt).unwrap(); let mut buf3 = [0u8; 128]; let plain_len3: usize = 24; let mut buf4 = [2u8; 128]; let plain_len4: usize = 24; let out_len3 = crypto1.encrypt(&mut buf3[..], plain_len3).unwrap(); let out_len4 = crypto1.encrypt(&mut buf4[..], plain_len4).unwrap(); crypto1.decrypt(&mut buf3[..out_len3]).unwrap(); assert!(buf3[..plain_len3].iter().all(|&x| x == 0)); crypto1.decrypt(&mut buf4[..out_len4]).unwrap(); assert!(buf4[..plain_len4].iter().all(|&x| x == 2)); }
function_block-function_prefixed
[ { "content": "#[inline]\n\npub fn io_error(desc: &str) -> io::Error {\n\n io::Error::new(io::ErrorKind::Other, desc)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use std::env;\n\n\n\n #[test]\n\n fn test_expand_tilde_path() {\n\n let old_home = env::var(\"HOME\").ok();\n\n env::set_var(\...
Rust
artichoke-backend/src/extn/core/math/mruby.rs
Talljoe/artichoke
36ed5eba078a9fbf3cb4d5c8f7407d0a773d2d6e
use crate::extn::core::math; use crate::extn::prelude::*; pub fn init(interp: &mut Artichoke) -> InitializeResult<()> { if interp.is_module_defined::<math::Math>() { return Ok(()); } let spec = module::Spec::new(interp, "Math", None)?; module::Builder::for_spec(interp, &spec) .add_module_method("acos", artichoke_math_acos, sys::mrb_args_req(1))? .add_module_method("acosh", artichoke_math_acosh, sys::mrb_args_req(1))? .add_module_method("asin", artichoke_math_asin, sys::mrb_args_req(1))? .add_module_method("asinh", artichoke_math_asinh, sys::mrb_args_req(1))? .add_module_method("atan", artichoke_math_atan, sys::mrb_args_req(1))? .add_module_method("atan2", artichoke_math_atan2, sys::mrb_args_req(2))? .add_module_method("atanh", artichoke_math_atanh, sys::mrb_args_req(1))? .add_module_method("cbrt", artichoke_math_cbrt, sys::mrb_args_req(1))? .add_module_method("cos", artichoke_math_cos, sys::mrb_args_req(1))? .add_module_method("cosh", artichoke_math_cosh, sys::mrb_args_req(1))? .add_module_method("erf", artichoke_math_erf, sys::mrb_args_req(1))? .add_module_method("erfc", artichoke_math_erfc, sys::mrb_args_req(1))? .add_module_method("exp", artichoke_math_exp, sys::mrb_args_req(1))? .add_module_method("frexp", artichoke_math_frexp, sys::mrb_args_req(1))? .add_module_method("gamma", artichoke_math_gamma, sys::mrb_args_req(1))? .add_module_method("hypot", artichoke_math_hypot, sys::mrb_args_req(2))? .add_module_method("ldexp", artichoke_math_ldexp, sys::mrb_args_req(2))? .add_module_method("lgamma", artichoke_math_lgamma, sys::mrb_args_req(1))? .add_module_method("log", artichoke_math_log, sys::mrb_args_req_and_opt(1, 1))? .add_module_method("log10", artichoke_math_log10, sys::mrb_args_req(1))? .add_module_method("log2", artichoke_math_log2, sys::mrb_args_req(1))? .add_module_method("sin", artichoke_math_sin, sys::mrb_args_req(1))? .add_module_method("sinh", artichoke_math_sinh, sys::mrb_args_req(1))? .add_module_method("sqrt", artichoke_math_sqrt, sys::mrb_args_req(1))? .add_module_method("tan", artichoke_math_tan, sys::mrb_args_req(1))? .add_module_method("tanh", artichoke_math_tanh, sys::mrb_args_req(1))? .define()?; let domainerror = class::Spec::new("DomainError", Some(EnclosingRubyScope::module(&spec)), None)?; class::Builder::for_spec(interp, &domainerror) .with_super_class::<StandardError, _>("StandardError")? .define()?; interp.def_class::<math::DomainError>(domainerror)?; interp.def_module::<math::Math>(spec)?; let e = interp.convert_mut(math::E); interp.define_module_constant::<math::Math>("E", e)?; let pi = interp.convert_mut(math::PI); interp.define_module_constant::<math::Math>("PI", pi)?; Ok(()) } unsafe extern "C" fn artichoke_math_acos( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::acos(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_acosh( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::acosh(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_asin( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::asin(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_asinh( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::asinh(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_atan( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::atan(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_atan2( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let (value, other) = mrb_get_args!(mrb, required = 2); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let other = Value::from(other); let result = math::atan2(&mut guard, value, other).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_atanh( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::atanh(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_cbrt( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::cbrt(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_cos( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::cos(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_cosh( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::cosh(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_erf( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::erf(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_erfc( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::erfc(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_exp( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::exp(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_frexp( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::frexp(&mut guard, value).and_then(|(fraction, exponent)| { let fraction = guard.convert_mut(fraction); let exponent = guard.convert(exponent); guard.try_convert_mut(&[fraction, exponent][..]) }); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_gamma( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::gamma(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_hypot( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let (value, other) = mrb_get_args!(mrb, required = 2); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let other = Value::from(other); let result = math::hypot(&mut guard, value, other).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_ldexp( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let (fraction, exponent) = mrb_get_args!(mrb, required = 2); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let fraction = Value::from(fraction); let exponent = Value::from(exponent); let result = math::ldexp(&mut guard, fraction, exponent).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_lgamma( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::lgamma(&mut guard, value).and_then(|(result, sign)| { let result = guard.convert_mut(result); let sign = guard.convert(sign); guard.try_convert_mut(&[result, sign][..]) }); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_log( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let (value, base) = mrb_get_args!(mrb, required = 1, optional = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let base = base.map(Value::from); let result = math::log(&mut guard, value, base).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_log10( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::log10(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_log2( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::log2(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_sin( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::sin(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_sinh( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::sinh(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_sqrt( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::sqrt(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_tan( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::tan(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_tanh( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::tanh(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } }
use crate::extn::core::math; use crate::extn::prelude::*; pub fn init(interp: &mut Artichoke) -> InitializeResult<()> { if interp.is_module_defined::<math::Math>() { return Ok(()); } let spec = module::Spec::new(interp, "Math", None)?; module::Builder::for_spec(interp, &spec) .add_module_method("acos", artichoke_math_acos, sys::mrb_args_req(1))? .add_module_method("acosh", artichoke_math_acosh, sys::mrb_args_req(1))? .add_module_method("asin", artichoke_math_asin, sys::mrb_args_req(1))? .add_module_method("asinh", artichoke_math_asinh, sys::mrb_args_req(1))? .add_module_method("atan", artichoke_math_atan, sys::mrb_args_req(1))? .add_module_method("atan2", artichoke_math_atan2, sys::mrb_args_req(2))? .add_module_method("atanh", artichoke_math_atanh, sys::mrb_args_req(1))? .add_module_method("cbrt", artichoke_math_cbrt, sys::mrb_args_req(1))? .add_module_method("cos", artichoke_math_cos, sys::mrb_args_req(1))? .add_module_method("cosh", artichoke_math_cosh, sys::mrb_args_req(1))? .add_module_method("erf", artichoke_math_erf, sys::mrb_args_req(1))? .add_module_method("erfc", artichoke_math_erfc, sys::mrb_args_req(1))? .add_module_method("exp", artichoke_math_exp, sys::mrb_args_req(1))? .add_module_method("frexp", artichoke_math_frexp, sys::mrb_args_req(1))? .add_module_method("gamma", artichoke_math_gamma, sys::mrb_args_req(1))? .add_module_method("hypot", artichoke_math_hypot, sys::mrb_args_req(2))? .add_module_method("ldexp", artichoke_math_ldexp, sys::mrb_args_req(2))? .add_module_method("lgamma", artichoke_math_lgamma, sys::mrb_args_req(1))? .add_module_method("log", artichoke_math_log, sys::mrb_args_req_and_opt(1, 1))? .add_module_method("log10", artichoke_math_log10, sys::mrb_args_req(1))? .add_module_method("log2", artichoke_math_log2, sys::mrb_args_req(1))? .add_module_method("sin", artichoke_math_sin, sys::mrb_args_req(1))? .add_module_method("sinh", artichoke_math_sinh, sys::mrb_args_req(1))? .add_module_method("sqrt", artichoke_math_sqrt, sys::mrb_args_req(1))? .add_module_method("tan", artichoke_math_tan, sys::mrb_args_req(1))? .add_module_method("tanh", artichoke_math_tanh, sys::mrb_args_req(1))? .define()?; let domainerror = class::Spec::new("DomainError", Some(EnclosingRubyScope::module(&spec)), None)?; class::Builder::for_spec(interp, &domainerror) .with_super_class::<StandardError, _>("StandardError")? .define()?; interp.def_class::<math::DomainError>(domainerror)?; interp.def_module::<math::Math>(spec)?; let e = interp.convert_mut(math::E); interp.define_module_constant::<math::Math>("E", e)?; let pi = interp.convert_mut(math::PI); interp.define_module_constant::<math::Math>("PI", pi)?; Ok(()) } unsafe extern "C" fn artichoke_math_acos( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::acos(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_acosh( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::acosh(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_asin( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::asin(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_asinh( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::asinh(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_atan( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::atan(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_atan2( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let (value, other) = mrb_get_args!(mrb, required = 2); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let other = Value::from(other); let result = math::atan2(&mut guard, value, other).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_atanh( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::atanh(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_cbrt( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::cbrt(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } }
unsafe extern "C" fn artichoke_math_cosh( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::cosh(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_erf( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::erf(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_erfc( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::erfc(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_exp( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::exp(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_frexp( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::frexp(&mut guard, value).and_then(|(fraction, exponent)| { let fraction = guard.convert_mut(fraction); let exponent = guard.convert(exponent); guard.try_convert_mut(&[fraction, exponent][..]) }); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_gamma( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::gamma(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_hypot( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let (value, other) = mrb_get_args!(mrb, required = 2); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let other = Value::from(other); let result = math::hypot(&mut guard, value, other).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_ldexp( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let (fraction, exponent) = mrb_get_args!(mrb, required = 2); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let fraction = Value::from(fraction); let exponent = Value::from(exponent); let result = math::ldexp(&mut guard, fraction, exponent).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_lgamma( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::lgamma(&mut guard, value).and_then(|(result, sign)| { let result = guard.convert_mut(result); let sign = guard.convert(sign); guard.try_convert_mut(&[result, sign][..]) }); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_log( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let (value, base) = mrb_get_args!(mrb, required = 1, optional = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let base = base.map(Value::from); let result = math::log(&mut guard, value, base).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_log10( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::log10(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_log2( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::log2(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_sin( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::sin(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_sinh( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::sinh(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_sqrt( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::sqrt(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_tan( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::tan(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_tanh( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::tanh(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } }
unsafe extern "C" fn artichoke_math_cos( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::cos(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } }
function_block-full_function
[ { "content": "pub fn post_match(interp: &mut Artichoke, mut value: Value) -> Result<Value, Exception> {\n\n let data = unsafe { MatchData::unbox_from_value(&mut value, interp)? };\n\n let post = data.post();\n\n Ok(interp.convert_mut(post))\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core...
Rust
lumol-sim/src/output/custom.rs
Luthaf/lumol
3ef0809b421a574c3604e611372ef6644c251184
use std::error; use std::fmt; use std::fs::File; use std::io::{self, BufWriter}; use std::io::prelude::*; use std::path::{Path, PathBuf}; use caldyn::{Context, Expr}; use caldyn::Error as CaldynError; use log::error; use log_once::{warn_once, error_once}; use super::Output; use lumol_core::{units, System}; #[derive(Debug)] pub enum CustomOutputError { Io(io::Error), Expr(CaldynError), Custom(String), } impl From<io::Error> for CustomOutputError { fn from(error: io::Error) -> CustomOutputError { CustomOutputError::Io(error) } } impl From<CaldynError> for CustomOutputError { fn from(error: CaldynError) -> CustomOutputError { CustomOutputError::Expr(error) } } impl From<String> for CustomOutputError { fn from(error: String) -> CustomOutputError { CustomOutputError::Custom(error) } } impl fmt::Display for CustomOutputError { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> { match *self { CustomOutputError::Io(ref err) => write!(fmt, "{}", err)?, CustomOutputError::Expr(ref err) => write!(fmt, "{}", err)?, CustomOutputError::Custom(ref err) => write!(fmt, "{}", err)?, } Ok(()) } } impl error::Error for CustomOutputError { fn description(&self) -> &str { match *self { CustomOutputError::Io(ref err) => err.description(), CustomOutputError::Expr(ref err) => err.description(), CustomOutputError::Custom(ref err) => err, } } fn cause(&self) -> Option<&dyn error::Error> { match *self { CustomOutputError::Io(ref err) => Some(err), CustomOutputError::Expr(ref err) => Some(err), CustomOutputError::Custom(_) => None, } } } struct FormatArgs { args: Vec<(String, Expr)>, tail: String, } impl FormatArgs { fn new(format: &str) -> Result<FormatArgs, CustomOutputError> { let mut args = Vec::new(); let mut expr = String::new(); let mut tail = String::new(); let mut in_expr = false; for c in format.chars() { match c { '{' if !in_expr => { in_expr = true; } '}' if in_expr => { in_expr = false; let sub_expr = Expr::parse(&expr)?; args.push((tail.clone(), sub_expr)); tail.clear(); expr.clear(); } '{' if in_expr => { return Err(CustomOutputError::Custom("found { in an expression".into())); } '}' if !in_expr => { return Err( CustomOutputError::Custom("found } outside of an expression".into()), ); } c => { if in_expr { expr.push(c); } else { tail.push(c); } } } } if in_expr { return Err(CustomOutputError::Custom("mismatched braces".into())); } Ok(FormatArgs { args: args, tail: tail, }) } fn get_context<'a>(&self, system: &'a System) -> Context<'a> { let mut context = Context::new(); context.set_query(move |name| { units::CONVERSION_FACTORS.get(name).cloned().or_else(|| { macro_rules! get_particle_data { ($index: ident, $data: ident) => ( system.particles() .$data .get($index) .cloned() .unwrap_or_else(|| { warn_once!( "index out of bound in custom output: \ index is {}, but we only have {} atoms", $index, system.size() ); return num_traits::Zero::zero(); }) ); } if name.contains('[') { let (name, index) = parse_index(name); match name { "x" => Some(get_particle_data!(index, position)[0]), "y" => Some(get_particle_data!(index, position)[1]), "z" => Some(get_particle_data!(index, position)[2]), "vx" => Some(get_particle_data!(index, velocity)[0]), "vy" => Some(get_particle_data!(index, velocity)[1]), "vz" => Some(get_particle_data!(index, velocity)[2]), "mass" => Some(get_particle_data!(index, mass)), "charge" => Some(get_particle_data!(index, charge)), _ => None, } } else { match name { "step" => Some(system.step as f64), "pressure" => Some(system.pressure()), "volume" => Some(system.volume()), "temperature" => Some(system.temperature()), "natoms" => Some(system.size() as f64), "cell.a" => Some(system.cell.a()), "cell.b" => Some(system.cell.b()), "cell.c" => Some(system.cell.c()), "cell.alpha" => Some(system.cell.alpha()), "cell.beta" => Some(system.cell.beta()), "cell.gamma" => Some(system.cell.gamma()), "stress.xx" => Some(system.stress()[0][0]), "stress.yy" => Some(system.stress()[1][1]), "stress.zz" => Some(system.stress()[2][2]), "stress.xy" => Some(system.stress()[0][1]), "stress.xz" => Some(system.stress()[0][2]), "stress.yz" => Some(system.stress()[1][2]), _ => None, } } }) }); return context; } fn format(&self, system: &System) -> Result<String, CustomOutputError> { let context = self.get_context(system); let mut output = String::new(); for &(ref string, ref expr) in &self.args { output.push_str(string); let value = expr.eval(&context)?; output.push_str(&value.to_string()); } output.push_str(&self.tail); return Ok(output); } } fn parse_index(input: &str) -> (&str, usize) { let l_brackets = input.match_indices('[').collect::<Vec<_>>(); let r_brackets = input.match_indices(']').collect::<Vec<_>>(); if l_brackets.len() != 1 || r_brackets.len() != 1 { return (input, 0); } let start = l_brackets[0].0; let end = r_brackets[0].0; if start > end { return (input, 0); } if let Ok(index) = input[(start + 1)..end].parse() { return (&input[..start], index); } else { return (input, 0); } } pub struct CustomOutput { file: BufWriter<File>, path: PathBuf, template: String, args: FormatArgs, } impl CustomOutput { pub fn new<P: AsRef<Path>>( filename: P, template: &str, ) -> Result<CustomOutput, CustomOutputError> { Ok(CustomOutput { file: BufWriter::new(File::create(filename.as_ref())?), path: filename.as_ref().to_owned(), template: template.into(), args: FormatArgs::new(template)?, }) } } impl Output for CustomOutput { fn setup(&mut self, _: &System) { writeln_or_log!(self, "# Custom output"); writeln_or_log!(self, "# {}", self.template); } fn write(&mut self, system: &System) { if let Ok(formatted) = self.args.format(system) { writeln_or_log!(self, "{}", formatted); } else { error_once!("Could not evaluate custom output {}", self.template); } } } #[cfg(test)] mod tests { use super::*; use super::super::tests::{test_output, testing_system}; fn format(input: &str) -> String { FormatArgs::new(input).unwrap().format(&testing_system()).unwrap() } #[test] fn parsing_index() { assert_eq!(parse_index("a[6]"), ("a", 6)); assert_eq!(parse_index("a"), ("a", 0)); assert_eq!(parse_index("a][6"), ("a][6", 0)); assert_eq!(parse_index("a[6][2]"), ("a[6][2]", 0)); assert_eq!(parse_index("a[6]2]"), ("a[6]2]", 0)); assert_eq!(parse_index("a[6][2"), ("a[6][2", 0)); assert_eq!(parse_index("a[b]"), ("a[b]", 0)); } #[test] fn format_args_parsing() { assert!(FormatArgs::new("one {test} two {5 } three!").is_ok()); assert!(FormatArgs::new("{3 + 4} {").is_err()); assert!(FormatArgs::new("{3 + 4} }").is_err()); assert!(FormatArgs::new("{3 + { 4}").is_err()); assert!(FormatArgs::new("{3 + {} }").is_err()); } #[test] fn formating() { assert_eq!(format("{3 + 4}"), "7"); assert_eq!(format("{pressure / bar}"), "10299.991728079816"); assert_eq!(format("{temperature / K}"), "38083.04389172312"); assert_eq!(format("{volume / A^3}"), "1000"); assert_eq!(format("{cell.a / A}"), "10"); assert_eq!(format("{cell.b / A}"), "10"); assert_eq!(format("{cell.c / A}"), "10"); assert_eq!(format("{cell.alpha}"), "90"); assert_eq!(format("{cell.beta}"), "90"); assert_eq!(format("{cell.gamma}"), "90"); assert_eq!(format("{stress.xx / bar}"), "30899.975184239443"); assert_eq!(format("{stress.yy / bar}"), "0"); assert_eq!(format("{stress.zz / bar}"), "0"); assert_eq!(format("{stress.xy / bar}"), "0"); assert_eq!(format("{stress.xz / bar}"), "0"); assert_eq!(format("{stress.yz / bar}"), "0"); assert_eq!(format("{x[1]}"), "1.3"); assert_eq!(format("{vy[1]}"), "0"); assert_eq!(format("{vx[0]}"), "0.1"); assert_eq!(format("{cell.a / bohr}"), "18.897261328856434"); assert_eq!(format("{cell.a / nm}"), "1"); assert_eq!(format("{cell.a / m}"), "0.000000001"); assert_eq!(format("{step}"), "42"); } #[test] fn custom() { let template = "p {pressure/bar} t {3 * 5} \tff"; test_output( |path| Box::new(CustomOutput::new(path, template).unwrap()), "# Custom output # p {pressure/bar} t {3 * 5} \tff p 10299.991728079816 t 15 \tff ", ); } }
use std::error; use std::fmt; use std::fs::File; use std::io::{self, BufWriter}; use std::io::prelude::*; use std::path::{Path, PathBuf}; use caldyn::{Context, Expr}; use caldyn::Error as CaldynError; use log::error; use log_once::{warn_once, error_once}; use super::Output; use lumol_core::{units, System}; #[derive(Debug)] pub enum CustomOutputError { Io(io::Error), Expr(CaldynError), Custom(String), } impl From<io::Error> for CustomOutputError { fn from(error: io::Error) -> CustomOutputError { CustomOutputError::Io(error) } } impl From<CaldynError> for CustomOutputError { fn from(error: CaldynError) -> CustomOutputError { CustomOutputError::Expr(error) } } impl From<String> for CustomOutputError { fn from(error: String) -> CustomOutputError { CustomOutputError::Custom(error) } } impl fmt::Display for CustomOutputError { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> { match *self { CustomOutputError::Io(ref err) => write!(fmt, "{}", err)?, CustomOutputError::Expr(ref err) => write!(fmt, "{}", err)?, CustomOutputError::Custom(ref err) => write!(fmt, "{}", err)?, } Ok(()) } } impl error::Error for CustomOutputError { fn description(&self) -> &str { match *self { CustomOutputError::Io(ref err) => err.description(), CustomOutputError::Expr(ref err) => err.description(), CustomOutputError::Custom(ref err) => err, } } fn cause(&self) -> Option<&dyn error::Error> { match *self { CustomOutputError::Io(ref err) => Some(err), CustomOutputError::Expr(ref err) => Some(err), CustomOutputError::Custom(_) => None, } } } struct FormatArgs { args: Vec<(String, Expr)>, tail: String, } impl FormatArgs { fn new(format: &str) -> Result<FormatArgs, CustomOutputError> { let mut args = Vec::new(); let mut expr = String::new(); let mut tail = String::new(); let mut in_expr = false; for c in format.chars() { match c { '{' if !in_expr => { in_expr = true; } '}' if in_expr => { in_expr = false; let sub_expr = Expr::parse(&expr)?; args.push((tail.clone(), sub_expr)); tail.clear(); expr.clear(); } '{' if in_expr => { return Err(CustomOutputError::Custom("found { in an expression".into())); } '}' if !in_expr => { return Err( CustomOutputError::Custom("found } outside of an expression".into()), ); } c => { if in_expr { expr.push(c); } else { tail.push(c); } } } } if in_expr { return Err(CustomOutputError::Custom("mismatched braces".into())); } Ok(FormatArgs { args: args, tail: tail, }) } fn get_context<'a>(&self, system: &'a System) -> Context<'a> { let mut context = Context::new(); context.set_query(move |name| { units::CONVERSION_FACTORS.get(name).cloned().or_else(|| { macro_rules! get_particle_data { ($index: ident, $data: ident) => ( system.particles() .$data .get($index) .cloned() .unwrap_or_else(|| { warn_once!( "index out of bound in custom output: \ index is {}, but we only have {} atoms", $index, system.size() ); return num_traits::Zero::zero(); }) ); } if name.contains('[') { let (name, index) = parse_index(name); match name { "x" => Some(get_particle_data!(index, position)[0]), "y" => Some(get_particle_data!(index, position)[1]), "z" => Some(get_particle_data!(index, position)[2]), "vx" => Some(get_particle_data!(index, velocity)[0]), "vy" => Some(get_particle_data!(index, velocity)[1]), "vz" => Some(get_particle_data!(index, velocity)[2]), "mass" => Some(get_particle_data!(index, mass)), "charge" => Some(get_particle_data!(index, charge)), _ => None, } } else { match name { "step" => Some(system.step as f64), "pressure" => Some(system.pressure()), "volume" => Some(system.volume()), "temperature" => Some(system.temperature()), "natoms" => Some(system.size() as f64), "cell.a" => Some(system.cell.a()), "cell.b" => Some(system.cell.b()), "cell.c" => Some(system.cell.c()), "cell.alpha" => Some(system.cell.alpha()), "cell.beta" => Some(system.cell.beta()), "cell.gamma" => Some(system.cell.gamma()), "stress.xx" => Some(system.stress()[0][0]), "stress.yy" => Some(system.stress()[1][1]), "stress.zz" => Some(system.stress()[2][2]), "stress.xy" => Some(system.stress()[0][1]), "stress.xz" => Some(system.stress()[0][2]), "stress.yz" => Some(system.stress()[1][2]), _ => None, } } }) }); return context; } fn format(&self, system: &System) -> Result<String, CustomOutputError> { let context = self.get_context(system); let mut output = String::new(); for &(ref string, ref expr) in &self.args { output.push_str(string); let value = expr.eval(&context)?; output.push_str(&value.to_string()); } output.push_str(&self.tail); return Ok(output); } } fn parse_index(input: &str) -> (&str, usize) { let l_brackets = input.match_indices('[').collect::<Vec<_>>(); let r_brackets = input.match_indices(']').collect::<Vec<_>>(); if l_brackets.len() != 1 || r_brackets.len() != 1 { return (input, 0); } let start = l_brackets[0].0; let end = r_brackets[0].0; if start > end { return (input, 0); } if let Ok(index) = input[(start + 1)..end].parse() { return (&input[..start], index); } else { return (input, 0); } } pub struct CustomOutput { file: BufWriter<File>, path: PathBuf, template: String, args: FormatArgs, } impl CustomOutput { pub fn new<P: AsRef<Path>>( filename: P, template: &str, ) -> Result<CustomOutput, CustomOutputError> {
} } impl Output for CustomOutput { fn setup(&mut self, _: &System) { writeln_or_log!(self, "# Custom output"); writeln_or_log!(self, "# {}", self.template); } fn write(&mut self, system: &System) { if let Ok(formatted) = self.args.format(system) { writeln_or_log!(self, "{}", formatted); } else { error_once!("Could not evaluate custom output {}", self.template); } } } #[cfg(test)] mod tests { use super::*; use super::super::tests::{test_output, testing_system}; fn format(input: &str) -> String { FormatArgs::new(input).unwrap().format(&testing_system()).unwrap() } #[test] fn parsing_index() { assert_eq!(parse_index("a[6]"), ("a", 6)); assert_eq!(parse_index("a"), ("a", 0)); assert_eq!(parse_index("a][6"), ("a][6", 0)); assert_eq!(parse_index("a[6][2]"), ("a[6][2]", 0)); assert_eq!(parse_index("a[6]2]"), ("a[6]2]", 0)); assert_eq!(parse_index("a[6][2"), ("a[6][2", 0)); assert_eq!(parse_index("a[b]"), ("a[b]", 0)); } #[test] fn format_args_parsing() { assert!(FormatArgs::new("one {test} two {5 } three!").is_ok()); assert!(FormatArgs::new("{3 + 4} {").is_err()); assert!(FormatArgs::new("{3 + 4} }").is_err()); assert!(FormatArgs::new("{3 + { 4}").is_err()); assert!(FormatArgs::new("{3 + {} }").is_err()); } #[test] fn formating() { assert_eq!(format("{3 + 4}"), "7"); assert_eq!(format("{pressure / bar}"), "10299.991728079816"); assert_eq!(format("{temperature / K}"), "38083.04389172312"); assert_eq!(format("{volume / A^3}"), "1000"); assert_eq!(format("{cell.a / A}"), "10"); assert_eq!(format("{cell.b / A}"), "10"); assert_eq!(format("{cell.c / A}"), "10"); assert_eq!(format("{cell.alpha}"), "90"); assert_eq!(format("{cell.beta}"), "90"); assert_eq!(format("{cell.gamma}"), "90"); assert_eq!(format("{stress.xx / bar}"), "30899.975184239443"); assert_eq!(format("{stress.yy / bar}"), "0"); assert_eq!(format("{stress.zz / bar}"), "0"); assert_eq!(format("{stress.xy / bar}"), "0"); assert_eq!(format("{stress.xz / bar}"), "0"); assert_eq!(format("{stress.yz / bar}"), "0"); assert_eq!(format("{x[1]}"), "1.3"); assert_eq!(format("{vy[1]}"), "0"); assert_eq!(format("{vx[0]}"), "0.1"); assert_eq!(format("{cell.a / bohr}"), "18.897261328856434"); assert_eq!(format("{cell.a / nm}"), "1"); assert_eq!(format("{cell.a / m}"), "0.000000001"); assert_eq!(format("{step}"), "42"); } #[test] fn custom() { let template = "p {pressure/bar} t {3 * 5} \tff"; test_output( |path| Box::new(CustomOutput::new(path, template).unwrap()), "# Custom output # p {pressure/bar} t {3 * 5} \tff p 10299.991728079816 t 15 \tff ", ); } }
Ok(CustomOutput { file: BufWriter::new(File::create(filename.as_ref())?), path: filename.as_ref().to_owned(), template: template.into(), args: FormatArgs::new(template)?, })
call_expression
[ { "content": "/// Scale all velocities in the `System` such that the `system` temperature\n\n/// is `temperature`.\n\npub fn scale(system: &mut System, temperature: f64) {\n\n let instant_temperature = system.temperature();\n\n let factor = f64::sqrt(temperature / instant_temperature);\n\n for velocity...
Rust
import/src/location/record.rs
pixunil/tiny-transport
7aee05ba0303e005768e44cabd995b6413c4ce85
use std::collections::HashMap; use std::rc::Rc; use serde_derive::Deserialize; use super::{Location, LocationId, LocationImportError, LocationKind}; use crate::coord::project; #[derive(Debug, PartialEq, Deserialize)] pub(super) struct LocationRecord { stop_id: LocationId, #[serde(rename = "location_type")] location_kind: LocationKind, parent_station: Option<LocationId>, stop_name: String, stop_lat: f64, stop_lon: f64, } impl LocationRecord { pub(super) fn stop_id(&self) -> &LocationId { &self.stop_id } pub(super) fn parent_station(&self) -> Option<&LocationId> { self.parent_station.as_ref() } pub(super) fn try_import( self, locations: &mut HashMap<LocationId, Rc<Location>>, ) -> Result<(), Self> { match self.parent_station { Some(ref parent_id) => match locations.get(parent_id).cloned() { Some(parent) => { locations.insert(self.stop_id, parent); Ok(()) } None => Err(self), }, None => { let id = self.stop_id.clone(); locations.insert(id, Rc::new(self.into())); Ok(()) } } } pub(super) fn import_or_enqueue( self, locations: &mut HashMap<LocationId, Rc<Location>>, queues: &mut (Vec<Self>, Vec<Self>), ) -> Result<(), LocationImportError> { if let Err(record) = self.try_import(locations) { match record.location_kind { LocationKind::Station => { return Err(LocationImportError::StationHasParent(record)); } LocationKind::Stop | LocationKind::Entrance | LocationKind::GenericNode => { queues.0.push(record); } LocationKind::BoardingArea => { queues.1.push(record); } } } Ok(()) } } impl Into<Location> for LocationRecord { fn into(self) -> Location { let position = project(self.stop_lat, self.stop_lon); Location::new(self.stop_id, self.stop_name, position) } } #[cfg(test)] mod tests { use super::*; use crate::fixtures::locations; use test_utils::map; fn main_station_record() -> LocationRecord { LocationRecord { stop_id: "hauptbahnhof".into(), location_kind: LocationKind::Station, parent_station: None, stop_name: "Hauptbahnhof".to_string(), stop_lat: 52.526, stop_lon: 13.369, } } fn main_station_platform_record() -> LocationRecord { LocationRecord { stop_id: "hauptbahnhof_1".into(), location_kind: LocationKind::Stop, parent_station: Some("hauptbahnhof".into()), stop_name: "Hauptbahnhof Gleis 1".to_string(), stop_lat: 52.526, stop_lon: 13.369, } } #[test] fn test_into_location() { let location: Location = main_station_record().into(); assert_eq!(location, locations::hauptbahnhof()); } #[test] fn test_import_parent() { let mut locations = HashMap::new(); main_station_record().try_import(&mut locations).unwrap(); assert_eq!( locations, map! { "hauptbahnhof" => Rc::new(locations::hauptbahnhof()), } ); } #[test] fn test_import_child_without_parent() { let mut locations = HashMap::new(); let record = main_station_platform_record() .try_import(&mut locations) .unwrap_err(); assert_eq!(record, main_station_platform_record()); assert!(locations.is_empty()); } #[test] fn test_import_child_with_parent() { let mut locations = map! { "hauptbahnhof" => Rc::new(locations::hauptbahnhof()), }; main_station_platform_record() .try_import(&mut locations) .unwrap(); assert_eq!( locations, map! { "hauptbahnhof" => Rc::new(locations::hauptbahnhof()), "hauptbahnhof_1" => Rc::new(locations::hauptbahnhof()), } ); } }
use std::collections::HashMap; use std::rc::Rc; use serde_derive::Deserialize; use super::{Location, LocationId, LocationImportError, LocationKind}; use crate::coord::project; #[derive(Debug, PartialEq, Deserialize)] pub(super) struct LocationRecord { stop_id: LocationId, #[serde(rename = "location_type")] location_kind: LocationKind, parent_station: Option<LocationId>, stop_name: String, stop_lat: f64, stop_lon: f64, } impl LocationRecord { pub(super) fn stop_id(&self) -> &LocationId { &self.stop_id } pub(super) fn parent_station(&self) -> Option<&LocationId> { self.parent_station.as_ref() } pub(super) fn try_import( self, locations: &mut HashMap<LocationId, Rc<Location>>, ) -> Result<(), Self> { match self.parent_station { Some(ref parent_id) => match locations.get(parent_id).cloned() { Some(parent) => { locations.insert(self.stop_id, parent); Ok(()) } None => Err(self), }, None => { let id = self.stop_id.clone(); locations.insert(id, Rc::new(self.into())); Ok(()) } } } pub(super) fn import_or_enqueue( self, locations: &mut HashMap<LocationId, Rc<Location>>, queues: &mut (Vec<Self>, Vec<Self>), ) -> Result<(), LocationImportError> {
Ok(()) } } impl Into<Location> for LocationRecord { fn into(self) -> Location { let position = project(self.stop_lat, self.stop_lon); Location::new(self.stop_id, self.stop_name, position) } } #[cfg(test)] mod tests { use super::*; use crate::fixtures::locations; use test_utils::map; fn main_station_record() -> LocationRecord { LocationRecord { stop_id: "hauptbahnhof".into(), location_kind: LocationKind::Station, parent_station: None, stop_name: "Hauptbahnhof".to_string(), stop_lat: 52.526, stop_lon: 13.369, } } fn main_station_platform_record() -> LocationRecord { LocationRecord { stop_id: "hauptbahnhof_1".into(), location_kind: LocationKind::Stop, parent_station: Some("hauptbahnhof".into()), stop_name: "Hauptbahnhof Gleis 1".to_string(), stop_lat: 52.526, stop_lon: 13.369, } } #[test] fn test_into_location() { let location: Location = main_station_record().into(); assert_eq!(location, locations::hauptbahnhof()); } #[test] fn test_import_parent() { let mut locations = HashMap::new(); main_station_record().try_import(&mut locations).unwrap(); assert_eq!( locations, map! { "hauptbahnhof" => Rc::new(locations::hauptbahnhof()), } ); } #[test] fn test_import_child_without_parent() { let mut locations = HashMap::new(); let record = main_station_platform_record() .try_import(&mut locations) .unwrap_err(); assert_eq!(record, main_station_platform_record()); assert!(locations.is_empty()); } #[test] fn test_import_child_with_parent() { let mut locations = map! { "hauptbahnhof" => Rc::new(locations::hauptbahnhof()), }; main_station_platform_record() .try_import(&mut locations) .unwrap(); assert_eq!( locations, map! { "hauptbahnhof" => Rc::new(locations::hauptbahnhof()), "hauptbahnhof_1" => Rc::new(locations::hauptbahnhof()), } ); } }
if let Err(record) = self.try_import(locations) { match record.location_kind { LocationKind::Station => { return Err(LocationImportError::StationHasParent(record)); } LocationKind::Stop | LocationKind::Entrance | LocationKind::GenericNode => { queues.0.push(record); } LocationKind::BoardingArea => { queues.1.push(record); } } }
if_condition
[ { "content": "pub fn project_back(position: Point) -> (f64, f64) {\n\n let utm = Utm::new(position.x, position.y, true, 33, 'U', false);\n\n let coord = Coord::from(utm);\n\n (coord.lat, coord.lon)\n\n}\n\n\n", "file_path": "import/src/coord.rs", "rank": 0, "score": 91804.71011538121 }, ...
Rust
idp2p-client/did/microledger.rs
idp2p/idp2p
c5dec982dd03d4c7c0ea6af605042df21f62906f
use super::{ eventlog::{EventLog, EventLogChange, EventLogPayload} }; use crate::IdentityError; use idp2p_common::{ anyhow::Result, chrono::prelude::*, encode, encode_vec, generate_json_cid, hash, IdKeyDigest, IDP2P_ED25519, Idp2pCodec, }; use serde::{Deserialize, Serialize}; use std::collections::HashMap; #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] pub struct AssertionMethod { pub valid_at: i64, pub expired_at: Option<i64>, pub ver_method: VerificationMethod, } #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] pub struct MicroLedgerState { pub event_id: String, #[serde(with = "encode_vec")] pub next_key_digest: IdKeyDigest, #[serde(with = "encode_vec")] pub recovery_key_digest: IdKeyDigest, pub assertion_keys: Vec<AssertionMethod>, pub authentication_key: Option<VerificationMethod>, pub agreement_key: Option<VerificationMethod>, pub proofs: HashMap<String, String>, } #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] pub struct MicroLedgerInception { #[serde(rename = "keyType")] pub key_type: String, #[serde(with = "encode_vec", rename = "recoveryKeyDigest")] pub recovery_key_digest: Vec<u8>, #[serde(with = "encode_vec", rename = "nextKeyDigest")] pub next_key_digest: IdKeyDigest, } impl MicroLedgerInception { pub fn get_id(&self) -> String { generate_json_cid(self).unwrap() } } #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] pub struct MicroLedger { pub inception: MicroLedgerInception, #[serde(skip_serializing_if = "Vec::is_empty", default)] pub events: Vec<EventLog>, } impl MicroLedger { pub fn new(recovery_key_digest: &[u8], next_key_digest: &[u8]) -> Self { let inception = MicroLedgerInception { key_type: IDP2P_ED25519.to_owned(), recovery_key_digest: recovery_key_digest.to_owned(), next_key_digest: next_key_digest.to_owned(), }; MicroLedger { inception, events: vec![], } } pub fn create_event( &self, signer_key: &[u8], next_digest: &[u8], change: Vec<EventLogChange>, ) -> EventLogPayload { let previous = self.get_previous_id(); EventLogPayload { previous: previous, signer_key: signer_key.to_owned(), next_key_digest: next_digest.to_owned(), change: change, timestamp: Utc::now().timestamp(), } } pub fn save_event(&mut self, payload: EventLogPayload, proof: &[u8]) { let event_log = EventLog::new(payload, proof); self.events.push(event_log); } pub fn verify(&self, cid: &str) -> Result<MicroLedgerState, IdentityError> { let mut state = MicroLedgerState { event_id: self.inception.get_id(), recovery_key_digest: self.inception.recovery_key_digest.clone(), next_key_digest: self.inception.next_key_digest.clone(), assertion_keys: vec![], authentication_key: None, agreement_key: None, proofs: HashMap::new(), }; check!(cid == self.inception.get_id(), IdentityError::InvalidId); for event in &self.events { let previous_valid = event.payload.previous == state.event_id; check!(previous_valid, IdentityError::InvalidPrevious); let event_valid = event.verify(&event.payload.signer_key); check!(event_valid, IdentityError::InvalidEventSignature); let signer_digest = hash(&event.payload.signer_key); check!( signer_digest == state.next_key_digest, IdentityError::InvalidSigner ); for change in &event.payload.change { match &change { EventLogChange::SetAssertionKey { verification_method, } => { let previous_key = state.assertion_keys.last_mut(); if let Some(previous_key) = previous_key { previous_key.expired_at = Some(event.payload.timestamp); } let assertion_method = AssertionMethod { valid_at: event.payload.timestamp, expired_at: None, ver_method: verification_method.clone(), }; state.assertion_keys.push(assertion_method); } EventLogChange::SetAuthenticationKey { verification_method, } => { state.authentication_key = Some(verification_method.clone()); } EventLogChange::SetAgreementKey { verification_method, } => { state.agreement_key = Some(verification_method.clone()); } EventLogChange::SetProof(stmt) => { let key = encode(&stmt.key); let value = encode(&stmt.value); state.proofs.insert(key, value); } } } state.next_key_digest = event.payload.next_key_digest.clone(); state.event_id = event.get_id(); } Ok(state) } pub fn get_previous_id(&self) -> String { let previous_id = if self.events.len() == 0 { self.inception.get_id() } else { let e = self.events.last().unwrap(); e.get_id() }; previous_id } } #[cfg(test)] mod tests { use super::*; use crate::json::did::eventlog::*; use idp2p_common::secret::EdSecret; use idp2p_common::ED25519; #[test] fn id_test() { let expected_id = "bagaaieraqun2pn4ycd3b4nq4ptyzfnxea4hohwlgd7vdu3cifiy2fowvvpuq"; let ledger = create_microledger().0; assert_eq!(ledger.inception.get_id(), expected_id); } #[test] fn verify_test() { let ledger = create_microledger().0; let result = ledger.verify(&ledger.inception.get_id()); assert!(result.is_ok(), "{:?}", result); } #[test] fn verify_invalid_id_test() { let ledger = create_microledger().0; let result = ledger.verify("1"); let is_err = matches!(result, Err(crate::IdentityError::InvalidId)); assert!(is_err, "{:?}", result); } #[test] fn verify_valid_test() { let (mut ledger, secret) = create_microledger(); let id = ledger.inception.get_id(); let set_proof = EventLogChange::SetProof(ProofStatement { key: vec![1], value: vec![1], }); let ver_method = VerificationMethod { id: id.clone(), controller: id.clone(), typ: ED25519.to_string(), bytes: secret.to_publickey().to_vec(), }; let set_assertion = EventLogChange::SetAssertionKey { verification_method: ver_method.clone(), }; let set_authentication = EventLogChange::SetAuthenticationKey { verification_method: ver_method.clone(), }; let set_agreement = EventLogChange::SetAgreementKey { verification_method: ver_method.clone(), }; let change = vec![ set_proof, set_assertion.clone(), set_authentication, set_agreement, ]; let signer = secret.to_publickey(); let payload = ledger.create_event(&signer, &secret.to_publickey_digest().unwrap(), change); let proof = secret.sign(&payload); ledger.save_event(payload, &proof); let change = vec![set_assertion]; let payload = ledger.create_event(&signer, &signer, change); let proof = secret.sign(&payload); ledger.save_event(payload, &proof); let result = ledger.verify(&id); assert!(result.is_ok()); } #[test] fn verify_invalid_previous_test() { let (mut ledger, secret) = create_microledger(); let id = ledger.inception.get_id(); let set_change = EventLogChange::SetProof(ProofStatement { key: vec![], value: vec![], }); let change = vec![set_change]; let signer = secret.to_publickey(); let payload = ledger.create_event(&signer, &signer, change); let proof = secret.sign(&payload); ledger.save_event(payload, &proof); ledger.events[0].payload.previous = "1".to_owned(); let result = ledger.verify(&id); let is_err = matches!(result, Err(crate::IdentityError::InvalidPrevious)); assert!(is_err, "{:?}", result); } #[test] fn verify_invalid_signature_test() { let (mut ledger, secret) = create_microledger(); let id = ledger.inception.get_id(); let set_change = EventLogChange::SetProof(ProofStatement { key: vec![], value: vec![], }); let change = vec![set_change]; let signer = secret.to_publickey(); let payload = ledger.create_event(&signer, &signer, change); let proof = secret.sign(&payload); ledger.save_event(payload, &proof); ledger.events[0].proof = vec![0; 64]; let result = ledger.verify(&id); let is_err = matches!(result, Err(crate::IdentityError::InvalidEventSignature)); assert!(is_err, "{:?}", result); } #[test] fn verify_invalid_signer_test() { let (mut ledger, secret) = create_microledger(); let id = ledger.inception.get_id(); let set_change = EventLogChange::SetProof(ProofStatement { key: vec![], value: vec![], }); let change = vec![set_change]; let signer = secret.to_publickey(); let payload = ledger.create_event(&signer, &signer, change); let proof = secret.sign(&payload); ledger.save_event(payload, &proof); let new_secret = EdSecret::new(); let new_ed_key = new_secret.to_publickey(); ledger.events[0].payload.signer_key = new_ed_key.to_vec(); ledger.events[0].proof = new_secret.sign(&ledger.events[0].payload).to_vec(); let result = ledger.verify(&id); let is_err = matches!(result, Err(crate::IdentityError::InvalidSigner)); assert!(is_err, "{:?}", result); } fn create_microledger() -> (MicroLedger, idp2p_common::secret::EdSecret) { let secret_str = "bd6yg2qeifnixj4x3z2fclp5wd3i6ysjlfkxewqqt2thie6lfnkma"; let secret = idp2p_common::secret::EdSecret::from_str(secret_str).unwrap(); let d = secret.to_publickey_digest().unwrap(); let ledger = MicroLedger::new(&d, &d); (ledger, secret) } }
use super::{ eventlog::{EventLog, EventLogChange, EventLogPayload} }; use crate::IdentityError; use idp2p_common::{ anyhow::Result, chrono::prelude::*, encode, encode_vec, generate_json_cid, hash, IdKeyDigest, IDP2P_ED25519, Idp2pCodec, }; use serde::{Deserialize, Serialize}; use std::collections::HashMap; #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] pub struct AssertionMethod { pub valid_at: i64, pub expired_at: Option<i64>, pub ver_method: VerificationMethod, } #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] pub struct MicroLedgerState { pub event_id: String, #[serde(with = "encode_vec")] pub next_key_digest: IdKeyDigest, #[serde(with = "encode_vec")] pub recovery_key_digest: IdKeyDigest, pub assertion_keys: Vec<AssertionMethod>, pub authentication_key: Option<VerificationMethod>, pub agreement_key: Option<VerificationMethod>, pub proofs: HashMap<String, String>, } #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] pub struct MicroLedgerInception { #[serde(rename = "keyType")] pub key_type: String, #[serde(with = "encode_vec", rename = "recoveryKeyDigest")] pub recovery_key_digest: Vec<u8>, #[serde(with = "encode_vec", rename = "nextKeyDigest")] pub next_key_digest: IdKeyDigest, } impl MicroLedgerInception { pub fn get_id(&self) -> String { generate_json_cid(self).unwrap() } } #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] pub struct MicroLedger { pub inception: MicroLedgerInception, #[serde(skip_serializing_if = "Vec::is_empty", default)] pub events: Vec<EventLog>, } impl MicroLedger { pub fn new(recovery_key_digest: &[u8], next_key_digest: &[u8]) -> Self { let inception = MicroLedgerInception { key_type: IDP2P_ED25519.to_owned(), recovery_key_digest: recovery_key_digest.to_owned(), next_key_digest: next_key_digest.to_owned(), }; MicroLedger { inception, events: vec![], } } pub fn create_event( &self, signer_key: &[u8], next_digest: &[u8], change: Vec<EventLogChange>, ) -> EventLogPayload { let previous = self.get_previous_id(); EventLogPayload { previous: previous, signer_key: signer_key.to_owned(), next_key_digest: next_digest.to_owned(), change: change, timestamp: Utc::now().timestamp(), } } pub fn save_event(&mut self, payload: EventLogPayload, proof: &[u8]) { let event_log = EventLog::new(payload, proof); self.events.push(event_log); } pub fn verify(&self, cid: &str) -> Result<MicroLedgerState, IdentityError> { let mut state = MicroLedgerState { event_id: self.inception.get_id(), recovery_key_digest: self.inception.recovery_key_digest.clone(), next_key_digest: self.inception.next_key_digest.clone(), assertion_keys: vec![], authentication_key: None, agreement_key: None, proofs: HashMap::new(), }; check!(cid == self.inception.get_id(), IdentityError::InvalidId); for event in &self.events { let previous_valid = event.payload.previous == state.event_id; check!(previous_valid, IdentityError::InvalidPrevious); let event_valid = event.verify(&event.payload.signer_key); check!(event_valid, IdentityError::InvalidEventSignature); let signer_digest = hash(&event.payload.signer_key); check!( signer_digest == state.next_key_digest, IdentityError::InvalidSigner ); for change in &event.payload.change { match &change { EventLogChange::SetAssertionKey { verification_method, } => { let previous_key = state.assertion_keys.last_mut(); if let Some(previous_key) = previous_key { previous_key.expired_at = Some(event.payload.timestamp); } let assertion_method = AssertionMethod { valid_at: event.payload.timestamp, expired_at: None, ver_method: verification_method.clone(), }; state.assertion_keys.push(assertion_method); } EventLogChange::SetAuthenticationKey { verification_method, } => { state.authentication_key = Some(verification_method.clone()); } EventLogChange::SetAgreementKey { verification_method, } => { state.agreement_key = Some(verification_method.clone()); } EventLogChange::SetProof(stmt) => { let key = encode(&stmt.key); let value = encode(&stmt.value); state.proofs.insert(key, value); } } } state.next_key_digest = event.payload.next_key_digest.clone(); state.event_id = event.get_id(); } Ok(state) } pub fn get_previous_id(&self) -> String { let previous_id = if self.events.len() == 0 { self.inception.get_id() } else { let e = self.events.last().unwrap(); e.get_id() }; previous_id } } #[cfg(test)] mod tests { use super::*; use crate::json::did::eventlog::*; use idp2p_common::secret::EdSecret; use idp2p_common::ED25519; #[test] fn id_test() { let expected_id = "bagaaieraqun2pn4ycd3b4nq4ptyzfnxea4hohwlgd7vdu3cifiy2fowvvpuq"; let ledger = create_microledger().0; assert_eq!(ledger.inception.get_id(), expected_id); } #[test] fn verify_test() { let ledger = create_microledger().0; let result = ledger.verify(&ledger.inception.get_id()); assert!(result.is_ok(), "{:?}", result); } #[test] fn verify_invalid_id_test() { let ledger = create_microledger().0; let result = ledger.verify("1"); let is_err = matches!(result, Err(crate::IdentityError::InvalidId)); assert!(is_err, "{:?}", result); } #[test] fn verify_valid_test() { let (mut ledger, secret) = create_microledger(); let id = ledger.inception.get_id(); let set_proof = EventLogChange::SetProof(ProofStatement { key: vec![1], value: vec![1], }); let ver_method = VerificationMethod { id: id.clone(), controller: id.clone(), typ: ED25519.to_string(), bytes: secret.to_publickey().to_vec(), }; let set_assertion = EventLogChange::SetAssertionKey { verification_method: ver_method.clone(), }; let set_authentication = EventLogChange::SetAuthenticationKey { verification_method: ver_method.clone(), }; let set_agreement = EventLogChange::SetAgreementKey { verification_method: ver_method.clone(), }; let change = vec![ set_proof, set_assertion.clone(), set_authentication, set_agreement, ]; let signer = secret.to_publickey(); let payload = ledger.create_event(&signer, &secret.to_publickey_digest().unwrap(
) -> (MicroLedger, idp2p_common::secret::EdSecret) { let secret_str = "bd6yg2qeifnixj4x3z2fclp5wd3i6ysjlfkxewqqt2thie6lfnkma"; let secret = idp2p_common::secret::EdSecret::from_str(secret_str).unwrap(); let d = secret.to_publickey_digest().unwrap(); let ledger = MicroLedger::new(&d, &d); (ledger, secret) } }
), change); let proof = secret.sign(&payload); ledger.save_event(payload, &proof); let change = vec![set_assertion]; let payload = ledger.create_event(&signer, &signer, change); let proof = secret.sign(&payload); ledger.save_event(payload, &proof); let result = ledger.verify(&id); assert!(result.is_ok()); } #[test] fn verify_invalid_previous_test() { let (mut ledger, secret) = create_microledger(); let id = ledger.inception.get_id(); let set_change = EventLogChange::SetProof(ProofStatement { key: vec![], value: vec![], }); let change = vec![set_change]; let signer = secret.to_publickey(); let payload = ledger.create_event(&signer, &signer, change); let proof = secret.sign(&payload); ledger.save_event(payload, &proof); ledger.events[0].payload.previous = "1".to_owned(); let result = ledger.verify(&id); let is_err = matches!(result, Err(crate::IdentityError::InvalidPrevious)); assert!(is_err, "{:?}", result); } #[test] fn verify_invalid_signature_test() { let (mut ledger, secret) = create_microledger(); let id = ledger.inception.get_id(); let set_change = EventLogChange::SetProof(ProofStatement { key: vec![], value: vec![], }); let change = vec![set_change]; let signer = secret.to_publickey(); let payload = ledger.create_event(&signer, &signer, change); let proof = secret.sign(&payload); ledger.save_event(payload, &proof); ledger.events[0].proof = vec![0; 64]; let result = ledger.verify(&id); let is_err = matches!(result, Err(crate::IdentityError::InvalidEventSignature)); assert!(is_err, "{:?}", result); } #[test] fn verify_invalid_signer_test() { let (mut ledger, secret) = create_microledger(); let id = ledger.inception.get_id(); let set_change = EventLogChange::SetProof(ProofStatement { key: vec![], value: vec![], }); let change = vec![set_change]; let signer = secret.to_publickey(); let payload = ledger.create_event(&signer, &signer, change); let proof = secret.sign(&payload); ledger.save_event(payload, &proof); let new_secret = EdSecret::new(); let new_ed_key = new_secret.to_publickey(); ledger.events[0].payload.signer_key = new_ed_key.to_vec(); ledger.events[0].proof = new_secret.sign(&ledger.events[0].payload).to_vec(); let result = ledger.verify(&id); let is_err = matches!(result, Err(crate::IdentityError::InvalidSigner)); assert!(is_err, "{:?}", result); } fn create_microledger(
random
[ { "content": "pub fn encode_bytes(value: &[u8]) -> Result<String> {\n\n let mb64 = multibase::encode(Base::Base64Url, value);\n\n Ok(mb64[1..].to_owned())\n\n}\n\n\n", "file_path": "idp2p-common/src/base64url.rs", "rank": 0, "score": 310203.4537153712 }, { "content": "pub fn decode_str...
Rust
src/demo.rs
Tri-stone/cosmwasm
4c3f22abdc4ec6e957abe15ac58e2eefc2751fe8
#![allow(dead_code)] use crate::traits::{ReadonlyStorage, Storage}; fn len(prefix: &[u8]) -> [u8; 2] { if prefix.len() > 0xFFFF { panic!("only supports namespaces up to length 0xFFFF") } let length_bytes = (prefix.len() as u64).to_be_bytes(); [length_bytes[6], length_bytes[7]] } fn key_prefix(namespace: &[u8]) -> Vec<u8> { let mut out = Vec::with_capacity(namespace.len() + 2); out.extend_from_slice(&len(namespace)); out.extend_from_slice(namespace); out } fn key_prefix_nested(namespaces: &[&[u8]]) -> Vec<u8> { let mut size = namespaces.len(); for &namespace in namespaces { size += namespace.len() + 2; } let mut out = Vec::with_capacity(size); for &namespace in namespaces { let prefix = key_prefix(namespace); out.extend_from_slice(&prefix); } out } pub struct ReadonlyPrefixedStorage<'a, T: ReadonlyStorage> { prefix: Vec<u8>, storage: &'a T, } impl<'a, T: ReadonlyStorage> ReadonlyPrefixedStorage<'a, T> { fn new(namespace: &[u8], storage: &'a T) -> Self { ReadonlyPrefixedStorage { prefix: key_prefix(namespace), storage, } } fn multilevel(namespaces: &[&[u8]], storage: &'a T) -> Self { ReadonlyPrefixedStorage { prefix: key_prefix_nested(namespaces), storage, } } } impl<'a, T: ReadonlyStorage> ReadonlyStorage for ReadonlyPrefixedStorage<'a, T> { fn get(&self, key: &[u8]) -> Option<Vec<u8>> { let mut k = self.prefix.clone(); k.extend_from_slice(key); self.storage.get(&k) } } pub struct PrefixedStorage<'a, T: Storage> { prefix: Vec<u8>, storage: &'a mut T, } impl<'a, T: Storage> PrefixedStorage<'a, T> { fn new(namespace: &[u8], storage: &'a mut T) -> Self { PrefixedStorage { prefix: key_prefix(namespace), storage, } } fn multilevel(namespaces: &[&[u8]], storage: &'a mut T) -> Self { PrefixedStorage { prefix: key_prefix_nested(namespaces), storage, } } } impl<'a, T: Storage> ReadonlyStorage for PrefixedStorage<'a, T> { fn get(&self, key: &[u8]) -> Option<Vec<u8>> { let mut k = self.prefix.clone(); k.extend_from_slice(key); self.storage.get(&k) } } impl<'a, T: Storage> Storage for PrefixedStorage<'a, T> { fn set(&mut self, key: &[u8], value: &[u8]) { let mut k = self.prefix.clone(); k.extend_from_slice(key); self.storage.set(&k, value) } } #[cfg(test)] mod test { use super::*; use crate::mock::MockStorage; #[test] fn key_prefix_works() { assert_eq!(key_prefix(b""), b"\x00\x00"); assert_eq!(key_prefix(b"a"), b"\x00\x01a"); assert_eq!(key_prefix(b"ab"), b"\x00\x02ab"); assert_eq!(key_prefix(b"abc"), b"\x00\x03abc"); } #[test] fn key_prefix_works_for_long_prefix() { let long_namespace1 = vec![0; 256]; let prefix1 = key_prefix(&long_namespace1); assert_eq!(prefix1.len(), 256 + 2); assert_eq!(&prefix1[0..2], b"\x01\x00"); let long_namespace2 = vec![0; 30000]; let prefix2 = key_prefix(&long_namespace2); assert_eq!(prefix2.len(), 30000 + 2); assert_eq!(&prefix2[0..2], b"\x75\x30"); let long_namespace3 = vec![0; 0xFFFF]; let prefix3 = key_prefix(&long_namespace3); assert_eq!(prefix3.len(), 0xFFFF + 2); assert_eq!(&prefix3[0..2], b"\xFF\xFF"); } #[test] #[should_panic(expected = "only supports namespaces up to length 0xFFFF")] fn key_prefix_panics_for_too_long_prefix() { let limit = 0xFFFF; let long_namespace = vec![0; limit + 1]; key_prefix(&long_namespace); } #[test] fn key_prefix_nested_works() { assert_eq!(key_prefix_nested(&[]), b""); assert_eq!(key_prefix_nested(&[b""]), b"\x00\x00"); assert_eq!(key_prefix_nested(&[b"", b""]), b"\x00\x00\x00\x00"); assert_eq!(key_prefix_nested(&[b"a"]), b"\x00\x01a"); assert_eq!(key_prefix_nested(&[b"a", b"ab"]), b"\x00\x01a\x00\x02ab"); assert_eq!( key_prefix_nested(&[b"a", b"ab", b"abc"]), b"\x00\x01a\x00\x02ab\x00\x03abc" ); } #[test] fn prefix_safe() { let mut storage = MockStorage::new(); let mut foo = PrefixedStorage::new(b"foo", &mut storage); foo.set(b"bar", b"gotcha"); assert_eq!(Some(b"gotcha".to_vec()), foo.get(b"bar")); let rfoo = ReadonlyPrefixedStorage::new(b"foo", &storage); assert_eq!(Some(b"gotcha".to_vec()), rfoo.get(b"bar")); let fo = ReadonlyPrefixedStorage::new(b"fo", &storage); assert_eq!(None, fo.get(b"obar")); } #[test] fn multi_level() { let mut storage = MockStorage::new(); let mut foo = PrefixedStorage::new(b"foo", &mut storage); let mut bar = PrefixedStorage::new(b"bar", &mut foo); bar.set(b"baz", b"winner"); let loader = ReadonlyPrefixedStorage::multilevel(&[b"foo", b"bar"], &storage); assert_eq!(Some(b"winner".to_vec()), loader.get(b"baz")); let mut foobar = PrefixedStorage::multilevel(&[b"foo", b"bar"], &mut storage); foobar.set(b"second", b"time"); let a = ReadonlyPrefixedStorage::new(b"foo", &storage); let b = ReadonlyPrefixedStorage::new(b"bar", &a); assert_eq!(Some(b"time".to_vec()), b.get(b"second")); } }
#![allow(dead_code)] use crate::traits::{ReadonlyStorage, Storage}; fn len(prefix: &[u8]) -> [u8; 2] { if prefix.len() > 0xFFFF { panic!("only supports namespaces up to length 0xFFFF") } let length_bytes = (prefix.len() as u64).to_be_bytes(); [length_bytes[6], length_bytes[7]] } fn key_prefix(namespace: &[u8]) -> Vec<u8> { let mut out = Vec::with_capacity(namespace.len() + 2); out.extend_from_slice(&len(namespace)); out.extend_from_slice(namespace); out } fn key_prefix_nested(namespaces: &[&[u8]]) -> Vec<u8> { let mut size = namespaces.len(); for &namespace in namespaces { size += namespace.len() + 2; } let mut out = Vec::with_capacity(size); for &namespace in namespaces { let prefix = key_prefix(namespace); out.extend_from_slice(&prefix); } out } pub struct ReadonlyPrefixedStorage<'a, T: ReadonlyStorage> { prefix: Vec<u8>, storage: &'a T, } impl<'a, T: ReadonlyStorage> ReadonlyPrefixedStorage<'a, T> { fn new(namespace: &[u8], storage: &'a T) -> Self { ReadonlyPrefixedStorage { prefix: key_prefix(namespace), storage, } } fn multilevel(namespaces: &[&[u8]], storage: &'a T) -> Self { ReadonlyPrefixedStorage { prefix: key_prefix_nested(namespaces), storage, } } } impl<'a, T: ReadonlyStorage> ReadonlyStorage for ReadonlyPrefixedStorage<'a, T> { fn get(&self, key: &[u8]) -> Option<Vec<u8>> { let mut k = self.prefix.clone(); k.extend_from_slice(key); self.storage.get(&k) } } pub struct PrefixedStorage<'a, T: Storage> { prefix: Vec<u8>, storage: &'a mut T, } impl<'a, T: Storage> PrefixedStorage<'a, T> { fn new(namespace: &[u8], storage: &'a mut T) -> Self { PrefixedStorage { prefix: key_prefix(namespace), storage, } } fn multilevel(namespaces: &[&[u8]], storage: &'a mut T) -> Self { PrefixedStorage { prefix: key_prefix_nested(namespaces), storage, } } } impl<'a, T: Storage> ReadonlyStorage for PrefixedStorage<'a, T> { fn get(&self, key: &[u8]) -> Option<Vec<u8>> { let mut k = self.prefix.clone(); k.extend_from_slice(key); self.storage.get(&k) } } impl<'a, T: Storage> Storage for PrefixedStorage<'a, T> { fn set(&mut self, key: &[u8], value: &[u8]) { let mut k = self.prefix.clone(); k.extend_from_slice(key); self.storage.set(&k, value) } } #[cfg(test)] mod test { use super::*; use crate::mock::MockStorage; #[test] fn key_prefix_works() { assert_eq!(key_prefix(b""), b"\x00\x00"); assert_eq!(key_prefix(b"a"), b"\x00\x01a"); assert_eq!(key_prefix(b"ab"), b"\x00\x02ab"); assert_eq!(key_prefix(b"abc"), b"\x00\x03abc"); } #[test] fn key_prefix_works_for_long_prefix() { let long_namespace1 = vec![0; 256]; let prefix1 = key_prefix(&long_namespace1); assert_eq!(prefix1.len(), 256 + 2); assert_eq!(&prefix1[0..2], b"\x01\x00"); let long_namespace2 = vec![0; 30000]; let prefix2 = key_prefix(&long_namespace2); assert_eq!(prefix2.len(), 30000 + 2); assert_eq!(&prefix2[0..2], b"\x75\x30"); let long_namespace3 = vec![0; 0xFFFF]; let prefix3 = key_prefix(&long_namespace3); assert_eq!(prefix3.len(), 0xFFFF + 2); assert_eq!(&prefix3[0..2], b"\xFF\xFF"); } #[test] #[should_panic(expected = "only supports namespaces up to length 0xFFFF")] fn key_prefix_panics_for_too_long_prefix() { let limit = 0xFFFF; let long_namespace = vec![0; limit + 1]; key_prefix(&long_namespace); } #[test] fn key_prefix_nested_works() { assert_eq!(key_prefix_nested(&[]), b""); assert_eq!(key_prefix_nested(&[b""]), b"\x00\x00"); assert_eq!(key_prefix_nested(&[b"", b""]), b"\x00\x00\x00\x00"); assert_eq!(key_prefix_nested(&[b"a"]), b"\x00\x01a"); assert_eq!(key_prefix_nested(&[b"a", b"ab"]), b"\x00\x01a\x00\x02ab"); assert_eq!( key_prefix_nested(&[b"a", b"ab", b"abc"]), b"\x00\x01a\x00\x02ab\x00\x03abc" ); } #[test]
#[test] fn multi_level() { let mut storage = MockStorage::new(); let mut foo = PrefixedStorage::new(b"foo", &mut storage); let mut bar = PrefixedStorage::new(b"bar", &mut foo); bar.set(b"baz", b"winner"); let loader = ReadonlyPrefixedStorage::multilevel(&[b"foo", b"bar"], &storage); assert_eq!(Some(b"winner".to_vec()), loader.get(b"baz")); let mut foobar = PrefixedStorage::multilevel(&[b"foo", b"bar"], &mut storage); foobar.set(b"second", b"time"); let a = ReadonlyPrefixedStorage::new(b"foo", &storage); let b = ReadonlyPrefixedStorage::new(b"bar", &a); assert_eq!(Some(b"time".to_vec()), b.get(b"second")); } }
fn prefix_safe() { let mut storage = MockStorage::new(); let mut foo = PrefixedStorage::new(b"foo", &mut storage); foo.set(b"bar", b"gotcha"); assert_eq!(Some(b"gotcha".to_vec()), foo.get(b"bar")); let rfoo = ReadonlyPrefixedStorage::new(b"foo", &storage); assert_eq!(Some(b"gotcha".to_vec()), rfoo.get(b"bar")); let fo = ReadonlyPrefixedStorage::new(b"fo", &storage); assert_eq!(None, fo.get(b"obar")); }
function_block-full_function
[ { "content": "pub fn do_write<T: Storage>(ctx: &mut Ctx, key: u32, value: u32) {\n\n let key = read_memory(ctx, key);\n\n let value = read_memory(ctx, value);\n\n with_storage_from_context(ctx, |store: &mut T| store.set(&key, &value));\n\n}\n\n\n", "file_path": "lib/vm/src/context.rs", "rank": ...
Rust
bee-network/bee-autopeering/src/peer/mod.rs
TeeVeeEss/bee
b98bd114e763a0cebe47ac4b8055873e8009e8e6
pub(crate) mod lists; pub mod peer_id; pub mod stores; use std::{ fmt, net::{IpAddr, SocketAddr}, }; use bytes::BytesMut; use crypto::signatures::ed25519::PublicKey; use libp2p_core::{multiaddr::Protocol, Multiaddr}; use prost::{DecodeError, EncodeError, Message}; use serde::{ de::{SeqAccess, Visitor}, ser::SerializeStruct, Deserialize, Serialize, }; use self::lists::{ActivePeersList, ReplacementPeersList}; pub use self::{peer_id::PeerId, stores::PeerStore}; use crate::{ local::{ services::{ServiceMap, ServiceProtocol}, Local, }, proto, }; #[derive(Clone)] pub struct Peer { peer_id: PeerId, ip_address: IpAddr, services: ServiceMap, } impl Peer { pub fn new(address: IpAddr, public_key: PublicKey) -> Self { let peer_id = PeerId::from_public_key(public_key); Self { peer_id, ip_address: address, services: ServiceMap::default(), } } pub fn peer_id(&self) -> &PeerId { &self.peer_id } pub fn public_key(&self) -> &PublicKey { self.peer_id.public_key() } pub fn ip_address(&self) -> IpAddr { self.ip_address } pub fn port(&self, service_name: impl AsRef<str>) -> Option<u16> { self.services().get(service_name).map(|s| s.port()) } pub fn services(&self) -> &ServiceMap { &self.services } pub(crate) fn set_services(&mut self, services: ServiceMap) { self.services = services; } pub fn has_service(&self, service_name: impl AsRef<str>) -> bool { self.services.get(service_name).is_some() } pub fn add_service(&mut self, service_name: impl ToString, protocol: ServiceProtocol, port: u16) { self.services.insert(service_name.to_string(), protocol, port); } pub fn service_socketaddr(&self, service_name: impl AsRef<str>) -> Option<SocketAddr> { self.services .get(service_name) .map(|endpoint| SocketAddr::new(self.ip_address, endpoint.port())) } pub fn service_multiaddr(&self, service_name: impl AsRef<str>) -> Option<Multiaddr> { self.services.get(service_name).map(|endpoint| { let mut multiaddr = Multiaddr::empty(); match self.ip_address { IpAddr::V4(ipv4_addr) => multiaddr.push(Protocol::Ip4(ipv4_addr)), IpAddr::V6(ipv6_addr) => multiaddr.push(Protocol::Ip6(ipv6_addr)), }; multiaddr.push(endpoint.to_libp2p_protocol()); multiaddr }) } pub fn from_protobuf(bytes: &[u8]) -> Result<Self, Error> { proto::Peer::decode(bytes)?.try_into() } pub fn to_protobuf(&self) -> Result<BytesMut, EncodeError> { let services: proto::ServiceMap = self.services().into(); let peer = proto::Peer { ip: self.ip_address.to_string(), public_key: self.public_key().as_ref().to_vec(), services: Some(services), }; let mut buf = BytesMut::with_capacity(peer.encoded_len()); peer.encode(&mut buf)?; Ok(buf) } pub(crate) fn into_id(self) -> PeerId { self.peer_id } } #[cfg(any(feature = "rocksdb1", feature = "sled1"))] impl Peer { pub(crate) fn to_bytes(&self) -> Vec<u8> { bincode::serialize(self).expect("serialization error") } pub(crate) fn from_bytes<B: AsRef<[u8]>>(bytes: B) -> Self { bincode::deserialize(bytes.as_ref()).expect("deserialization error") } } impl fmt::Debug for Peer { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("Peer") .field("peer_id", &self.peer_id.to_string()) .field("public_key", &bs58::encode(self.public_key().as_ref()).into_string()) .field("ip_address", &self.ip_address) .field("services", &self.services.to_string()) .finish() } } impl TryFrom<proto::Peer> for Peer { type Error = Error; fn try_from(peer: proto::Peer) -> Result<Self, Self::Error> { let proto::Peer { public_key, ip, services, } = peer; let ip_address: IpAddr = ip.parse().map_err(|_| Error::ParseIpAddr)?; let public_key = PublicKey::try_from_bytes(public_key.try_into().map_err(|_| Error::PublicKeyBytes)?) .map_err(|_| Error::PublicKeyBytes)?; let peer_id = PeerId::from_public_key(public_key); let services: ServiceMap = services.ok_or(Error::MissingServices)?.try_into()?; Ok(Self { peer_id, ip_address, services, }) } } impl From<&Peer> for proto::Peer { fn from(peer: &Peer) -> Self { Self { ip: peer.ip_address().to_string(), public_key: peer.public_key().as_ref().to_vec(), services: Some(peer.services().into()), } } } impl AsRef<Peer> for Peer { fn as_ref(&self) -> &Self { self } } impl AsRef<PeerId> for Peer { fn as_ref(&self) -> &PeerId { self.peer_id() } } #[cfg(feature = "sled")] impl From<Peer> for sled::IVec { fn from(peer: Peer) -> Self { peer.to_bytes().into() } } #[cfg(feature = "sled")] impl From<sled::IVec> for Peer { fn from(bytes: sled::IVec) -> Self { Peer::from_bytes(bytes) } } impl<'de> Deserialize<'de> for Peer { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de>, { deserializer.deserialize_struct("Peer", &["peer_id", "ip_address", "services"], PeerVisitor {}) } } impl Serialize for Peer { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: serde::Serializer, { let mut this = serializer.serialize_struct("Peer", 3)?; this.serialize_field("peer_id", &self.peer_id)?; this.serialize_field("ip_address", &self.ip_address)?; this.serialize_field("services", &self.services)?; this.end() } } struct PeerVisitor {} impl<'de> Visitor<'de> for PeerVisitor { type Value = Peer; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("'Peer'") } fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error> where A: SeqAccess<'de>, { let peer_id = seq .next_element::<PeerId>()? .ok_or_else(|| serde::de::Error::invalid_length(0, &self))?; let ip_address = seq .next_element::<IpAddr>()? .ok_or_else(|| serde::de::Error::invalid_length(1, &self))?; let services = seq .next_element::<ServiceMap>()? .ok_or_else(|| serde::de::Error::invalid_length(2, &self))?; Ok(Peer { peer_id, ip_address, services, }) } } pub(crate) fn is_known( peer_id: &PeerId, local: &Local, active_peers: &ActivePeersList, replacements: &ReplacementPeersList, ) -> bool { peer_id == &local.peer_id() || active_peers.read().contains(peer_id) || replacements.read().contains(peer_id) } pub(crate) fn is_verified(peer_id: &PeerId, active_peers: &ActivePeersList) -> bool { active_peers .read() .find(peer_id) .map_or(false, |e| e.metrics().is_verified()) } pub(crate) fn set_front_and_update(peer_id: &PeerId, active_peers: &ActivePeersList) -> Option<usize> { if let Some(p) = active_peers.write().set_newest_and_get_mut(peer_id) { let metrics = p.metrics_mut(); metrics.set_last_verif_response_timestamp(); let new_count = metrics.increment_verified_count(); Some(new_count) } else { None } } #[derive(Debug, thiserror::Error)] pub enum Error { #[error("parsing peer ip address failed")] ParseIpAddr, #[error("peer services missing")] MissingServices, #[error("invalid service description")] Service(#[from] crate::local::services::Error), #[error("invalid public key bytes")] PublicKeyBytes, #[error("{0}")] ProtobufDecode(#[from] DecodeError), #[error("{0}")] ProtobufEncode(#[from] EncodeError), } #[cfg(test)] mod tests { use crypto::signatures::ed25519::SecretKey as PrivateKey; use super::*; use crate::local::services::AUTOPEERING_SERVICE_NAME; impl Peer { pub(crate) fn new_test_peer(index: u8) -> Self { let mut services = ServiceMap::default(); services.insert(AUTOPEERING_SERVICE_NAME, ServiceProtocol::Udp, 1337); let public_key = PrivateKey::generate().unwrap().public_key(); let peer_id = PeerId::from_public_key(public_key); Self { peer_id, ip_address: format!("127.0.0.{}", index).parse().unwrap(), services, } } pub(crate) fn num_services(&self) -> usize { self.services().len() } } }
pub(crate) mod lists; pub mod peer_id; pub mod stores; use std::{ fmt, net::{IpAddr, SocketAddr}, }; use bytes::BytesMut; use crypto::signatures::ed25519::PublicKey; use libp2p_core::{multiaddr::Protocol, Multiaddr}; use prost::{DecodeError, EncodeError, Message}; use serde::{ de::{SeqAccess, Visitor}, ser::SerializeStruct, Deserialize, Serialize, }; use self::lists::{ActivePeersList, ReplacementPeersList}; pub use self::{peer_id::PeerId, stores::PeerStore}; use crate::{ local::{ services::{ServiceMap, ServiceProtocol}, Local, }, proto, }; #[derive(Clone)] pub struct Peer { peer_id: PeerId, ip_address: IpAddr, services: ServiceMap, } impl Peer { pub fn new(address: IpAddr, public_key: PublicKey) -> Self { let peer_id = PeerId::from_public_key(public_key); Self { peer_id, ip_address: address, services: ServiceMap::default(), } } pub fn peer_id(&self) -> &PeerId { &self.peer_id } pub fn public_key(&self) -> &PublicKey { self.peer_id.public_key() } pub fn ip_address(&self) -> IpAddr { self.ip_address } pub fn port(&self, service_name: impl AsRef<str>) -> Option<u16> { self.services().get(service_name).map(|s| s.port()) } pub fn services(&self) -> &ServiceMap { &self.services } pub(crate) fn set_services(&mut self, services: ServiceMap) { self.services = services; } pub fn has_service(&self, service_name: impl AsRef<str>) -> bool { self.services.get(service_name).is_some() } pub fn add_service(&mut self, service_name: impl ToString, protocol: ServiceProtocol, port: u16) { self.services.insert(service_name.to_string(), protocol, port); }
ip, services, } = peer; let ip_address: IpAddr = ip.parse().map_err(|_| Error::ParseIpAddr)?; let public_key = PublicKey::try_from_bytes(public_key.try_into().map_err(|_| Error::PublicKeyBytes)?) .map_err(|_| Error::PublicKeyBytes)?; let peer_id = PeerId::from_public_key(public_key); let services: ServiceMap = services.ok_or(Error::MissingServices)?.try_into()?; Ok(Self { peer_id, ip_address, services, }) } } impl From<&Peer> for proto::Peer { fn from(peer: &Peer) -> Self { Self { ip: peer.ip_address().to_string(), public_key: peer.public_key().as_ref().to_vec(), services: Some(peer.services().into()), } } } impl AsRef<Peer> for Peer { fn as_ref(&self) -> &Self { self } } impl AsRef<PeerId> for Peer { fn as_ref(&self) -> &PeerId { self.peer_id() } } #[cfg(feature = "sled")] impl From<Peer> for sled::IVec { fn from(peer: Peer) -> Self { peer.to_bytes().into() } } #[cfg(feature = "sled")] impl From<sled::IVec> for Peer { fn from(bytes: sled::IVec) -> Self { Peer::from_bytes(bytes) } } impl<'de> Deserialize<'de> for Peer { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de>, { deserializer.deserialize_struct("Peer", &["peer_id", "ip_address", "services"], PeerVisitor {}) } } impl Serialize for Peer { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: serde::Serializer, { let mut this = serializer.serialize_struct("Peer", 3)?; this.serialize_field("peer_id", &self.peer_id)?; this.serialize_field("ip_address", &self.ip_address)?; this.serialize_field("services", &self.services)?; this.end() } } struct PeerVisitor {} impl<'de> Visitor<'de> for PeerVisitor { type Value = Peer; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("'Peer'") } fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error> where A: SeqAccess<'de>, { let peer_id = seq .next_element::<PeerId>()? .ok_or_else(|| serde::de::Error::invalid_length(0, &self))?; let ip_address = seq .next_element::<IpAddr>()? .ok_or_else(|| serde::de::Error::invalid_length(1, &self))?; let services = seq .next_element::<ServiceMap>()? .ok_or_else(|| serde::de::Error::invalid_length(2, &self))?; Ok(Peer { peer_id, ip_address, services, }) } } pub(crate) fn is_known( peer_id: &PeerId, local: &Local, active_peers: &ActivePeersList, replacements: &ReplacementPeersList, ) -> bool { peer_id == &local.peer_id() || active_peers.read().contains(peer_id) || replacements.read().contains(peer_id) } pub(crate) fn is_verified(peer_id: &PeerId, active_peers: &ActivePeersList) -> bool { active_peers .read() .find(peer_id) .map_or(false, |e| e.metrics().is_verified()) } pub(crate) fn set_front_and_update(peer_id: &PeerId, active_peers: &ActivePeersList) -> Option<usize> { if let Some(p) = active_peers.write().set_newest_and_get_mut(peer_id) { let metrics = p.metrics_mut(); metrics.set_last_verif_response_timestamp(); let new_count = metrics.increment_verified_count(); Some(new_count) } else { None } } #[derive(Debug, thiserror::Error)] pub enum Error { #[error("parsing peer ip address failed")] ParseIpAddr, #[error("peer services missing")] MissingServices, #[error("invalid service description")] Service(#[from] crate::local::services::Error), #[error("invalid public key bytes")] PublicKeyBytes, #[error("{0}")] ProtobufDecode(#[from] DecodeError), #[error("{0}")] ProtobufEncode(#[from] EncodeError), } #[cfg(test)] mod tests { use crypto::signatures::ed25519::SecretKey as PrivateKey; use super::*; use crate::local::services::AUTOPEERING_SERVICE_NAME; impl Peer { pub(crate) fn new_test_peer(index: u8) -> Self { let mut services = ServiceMap::default(); services.insert(AUTOPEERING_SERVICE_NAME, ServiceProtocol::Udp, 1337); let public_key = PrivateKey::generate().unwrap().public_key(); let peer_id = PeerId::from_public_key(public_key); Self { peer_id, ip_address: format!("127.0.0.{}", index).parse().unwrap(), services, } } pub(crate) fn num_services(&self) -> usize { self.services().len() } } }
pub fn service_socketaddr(&self, service_name: impl AsRef<str>) -> Option<SocketAddr> { self.services .get(service_name) .map(|endpoint| SocketAddr::new(self.ip_address, endpoint.port())) } pub fn service_multiaddr(&self, service_name: impl AsRef<str>) -> Option<Multiaddr> { self.services.get(service_name).map(|endpoint| { let mut multiaddr = Multiaddr::empty(); match self.ip_address { IpAddr::V4(ipv4_addr) => multiaddr.push(Protocol::Ip4(ipv4_addr)), IpAddr::V6(ipv6_addr) => multiaddr.push(Protocol::Ip6(ipv6_addr)), }; multiaddr.push(endpoint.to_libp2p_protocol()); multiaddr }) } pub fn from_protobuf(bytes: &[u8]) -> Result<Self, Error> { proto::Peer::decode(bytes)?.try_into() } pub fn to_protobuf(&self) -> Result<BytesMut, EncodeError> { let services: proto::ServiceMap = self.services().into(); let peer = proto::Peer { ip: self.ip_address.to_string(), public_key: self.public_key().as_ref().to_vec(), services: Some(services), }; let mut buf = BytesMut::with_capacity(peer.encoded_len()); peer.encode(&mut buf)?; Ok(buf) } pub(crate) fn into_id(self) -> PeerId { self.peer_id } } #[cfg(any(feature = "rocksdb1", feature = "sled1"))] impl Peer { pub(crate) fn to_bytes(&self) -> Vec<u8> { bincode::serialize(self).expect("serialization error") } pub(crate) fn from_bytes<B: AsRef<[u8]>>(bytes: B) -> Self { bincode::deserialize(bytes.as_ref()).expect("deserialization error") } } impl fmt::Debug for Peer { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("Peer") .field("peer_id", &self.peer_id.to_string()) .field("public_key", &bs58::encode(self.public_key().as_ref()).into_string()) .field("ip_address", &self.ip_address) .field("services", &self.services.to_string()) .finish() } } impl TryFrom<proto::Peer> for Peer { type Error = Error; fn try_from(peer: proto::Peer) -> Result<Self, Self::Error> { let proto::Peer { public_key,
random
[ { "content": "pub fn get_network_config_with_port(port: u16) -> NetworkConfig {\n\n let mut config = NetworkConfig::default();\n\n config.replace_port(Protocol::Tcp(port)).unwrap();\n\n config\n\n}\n\n\n", "file_path": "bee-network/bee-gossip/src/tests/common/network_config.rs", "rank": 0, ...
Rust
src/body.rs
ocornoc/gravity
1263ddc73c29a70a0fb254d41c2cb28dbc3eae63
use bevy::prelude::*; use ultraviolet::DVec3; pub struct TransformScale(pub f64); impl Default for TransformScale { fn default() -> Self { TransformScale(1e-8) } } #[derive(Clone, Copy, PartialEq, Debug, Default, PartialOrd)] pub struct Mass(pub f64); #[derive(Clone, Copy, PartialEq, Debug, Default)] pub struct Position { pub current: DVec3, pub previous: DVec3, } impl Position { pub const fn new(x: f64, y: f64, z: f64) -> Self { let pos = DVec3::new(x, y, z); Position { current: pos, previous: pos } } } #[derive(Clone, Copy, PartialEq, Debug, Default)] pub struct Velocity(pub DVec3); #[derive(Clone, Copy, PartialEq, Debug, Default)] pub struct LinAccel(pub DVec3); #[derive(Clone, Copy, PartialEq, Debug, Default)] pub struct NewLinAccel(pub DVec3); #[derive(Clone, Copy, PartialEq, Debug)] pub struct Timestep { pub current: f64, pub substeps: usize, current_frame_time: f64, pub paused: bool, } impl Default for Timestep { fn default() -> Self { Self::new(Timestep::DAY_PER_SECOND, true) } } impl Timestep { pub const REALTIME: f64 = 1.0; pub const MINUTE_PER_SECOND: f64 = 60.0 * Timestep::REALTIME; pub const HOUR_PER_SECOND: f64 = 60.0 * Timestep::MINUTE_PER_SECOND; pub const DAY_PER_SECOND: f64 = Timestep::HOUR_PER_SECOND * 24.0; pub fn new(rate: f64, paused: bool) -> Self { Self::new_with_substeps(rate, paused, 1) } pub fn new_with_substeps(rate: f64, paused: bool, substeps: usize) -> Self { assert_ne!(substeps, 0, "must be a positive amount of substeps"); Timestep { current: rate, substeps, current_frame_time: 0.00001, paused, } } #[allow(dead_code)] pub const fn real_time(paused: bool) -> Self { Timestep { current: Self::REALTIME, substeps: 5, current_frame_time: 0.00001, paused, } } } const SPEED_OF_LIGHT: f64 = 299792458.0; const SQUARE_SOL: f64 = SPEED_OF_LIGHT * SPEED_OF_LIGHT; const SQUARE_SOL_RECIP: f64 = 1.0 / SQUARE_SOL; const GRAV: f64 = 6.6743015e-11; pub struct GravityPlugin; impl Plugin for GravityPlugin { fn build(&self, app: &mut AppBuilder) { app .add_resource(Timestep::default()) .add_resource(TransformScale::default()) .add_system(eih_integrate_position.system()); } } type Q<'a, 'b> = Query<'b, ( Entity, &'a mut NewLinAccel, &'a Mass, &'a mut Position, &'a mut Velocity, &'a mut LinAccel, &'a mut Transform, )>; fn eih_integrate_position( mut timestep: ResMut<Timestep>, mut time: ResMut<Time>, tfs: Res<TransformScale>, mut q: Q, ) { if timestep.paused { return; } let substeps = timestep.substeps; let time = &mut *time; let timestep = &mut *timestep; let real_rate = timestep.current; timestep.current /= substeps as f64; center_and_set_positions(tfs.0, &mut q); for _ in 0..substeps { calculate_newaccel_eih(&q); integrate_accel(&mut q, time, timestep); center_and_set_positions(tfs.0, &mut q); } timestep.current = real_rate; } fn calculate_newaccel_eih(q: &Q) { for ( id0, mut newaccel, _, pos0, vel0, _, _, ) in unsafe { q.iter_unsafe() } { let [mut sum0, mut sum1, mut sum2, mut sum3] = [DVec3::new(0.0, 0.0, 0.0); 4]; let (pos0, vel0) = (pos0.current, vel0.0); for ( id1, _, &Mass(mass1), pos1, vel1, accel1, _, ) in unsafe { q.iter_unsafe() } { if id0 == id1 { continue; } let (pos1, vel1, accel1) = (pos1.current, vel1.0, accel1.0); let pos0spos1 = pos0 - pos1; let distsq01 = pos0spos1.mag_sq(); let distsq01rec = distsq01.recip(); let dist01 = distsq01.sqrt(); let dist01rec = dist01.recip(); let norm01 = pos0spos1 * dist01rec; let norm10: DVec3 = -norm01; let grav_mass1 = GRAV * mass1; let grm1divdistsq01 = grav_mass1 * distsq01rec; let grm1divdist01 = grav_mass1 * dist01rec; sum0 = norm10.mul_add(DVec3::broadcast(grm1divdistsq01), sum0); sum2 = (vel0 - vel1).mul_add(DVec3::broadcast( grm1divdistsq01 * norm01.dot(vel0.mul_add(DVec3::broadcast(4.0), -3.0 * vel1))), sum2, ); sum3 = accel1.mul_add(DVec3::broadcast(grm1divdist01), sum3); let mut temp_sum1_0: f64 = 0.0; let mut temp_sum1_1: f64 = 0.0; for (id2, _, &Mass(mass2), pos2, _, _, _) in unsafe { q.iter_unsafe() } { let pos2 = pos2.current; if id2 != id0 { temp_sum1_0 = mass2.mul_add((pos2 - pos0).mag().recip(), temp_sum1_0); } if id2 != id1 { temp_sum1_1 = mass2.mul_add((pos2 - pos1).mag().recip(), temp_sum1_1); } } sum1 = norm10.mul_add( DVec3::broadcast(grm1divdistsq01 * (-pos0spos1).dot(accel1).mul_add( 0.5, temp_sum1_1.mul_add(-GRAV, temp_sum1_0.mul_add( -4.0 * GRAV, norm01.dot(vel0).powi(2).mul_add(-1.5, vel0.dot(vel1).mul_add( -4.0, vel1.dot(vel1).mul_add(2.0, vel0.dot(vel0)), )) )) )), sum1, ); } newaccel.0 = sum0 + sum1 * SQUARE_SOL_RECIP + sum2 * SQUARE_SOL_RECIP + sum3 * (3.5 * SQUARE_SOL_RECIP); } } fn integrate_accel(q: &mut Q, time: &Time, timestep: &mut Timestep) { let old_time = timestep.current_frame_time; timestep.current_frame_time = time.delta_seconds_f64(); let dt = timestep.current * timestep.current_frame_time; let br0 = DVec3::broadcast(dt); let br1 = DVec3::broadcast(dt * dt); let br2 = (timestep.current * old_time).recip(); for (_, newaccel, _, mut pos, mut vel, mut accel, _) in q.iter_mut() { if pos.current == pos.previous && (vel.0 != DVec3::zero() || accel.0 != DVec3::zero()) { vel.0 = accel.0.mul_add(br0, vel.0); pos.previous = pos.current; pos.current = vel.0.mul_add(br0, pos.current); } else { let diff = pos.current - pos.previous; vel.0 = diff * br2; pos.previous = pos.current; pos.current = vel.0.mul_add(br0, accel.0.mul_add(br1, pos.current)); } accel.0 = newaccel.0; } } fn center_and_set_positions(tfs: f64, q: &mut Q) { #[cold] fn fail() { println!("Warning: failed to center and set positions within {} iters", MAX_INIT); } let mut delta; let mut total_mass = 0.0; let max = q .iter_mut() .max_by(|l, r| l.2.partial_cmp(&r.2).unwrap_or(std::cmp::Ordering::Equal)); if let Some((_, _, _, base_pos, ..)) = max { delta = base_pos.current; } else { return; } for (_, _, &Mass(mass), mut pos, .., mut tf) in q.iter_mut() { pos.previous -= delta; pos.current -= delta; tf.translation = Vec3::new( (pos.current.x * tfs) as f32, (pos.current.y * tfs) as f32, (pos.current.z * tfs) as f32, ); total_mass += mass; } const MAX_INIT: u32 = 500; let mut iters_left = MAX_INIT; while iters_left > 0 { for (_, _, &Mass(mass), pos, ..) in q.iter_mut() { delta = pos.current.mul_add(DVec3::broadcast(mass), delta); } delta /= total_mass; if delta.mag_sq() >= 1e-3 { for (_, _, _, mut pos, .., mut tf) in q.iter_mut() { pos.previous -= delta; pos.current -= delta; tf.translation = Vec3::new( (pos.current.x * tfs) as f32, (pos.current.y * tfs) as f32, (pos.current.z * tfs) as f32, ); } iters_left -= 1; } else { return; } } fail() }
use bevy::prelude::*; use ultraviolet::DVec3; pub struct TransformScale(pub f64); impl Default for TransformScale { fn default() -> Self { TransformScale(1e-8) } } #[derive(Clone, Copy, PartialEq, Debug, Default, PartialOrd)] pub struct Mass(pub f64); #[derive(Clone, Copy, PartialEq, Debug, Default)] pub struct Position { pub current: DVec3, pub previous: DVec3, } impl Position { pub const fn new(x: f64, y: f64, z: f64) -> Self { let pos = DVec3::new(x, y, z); Position { current: pos, previous: pos } } } #[derive(Clone, Copy, PartialEq, Debug, Default)] pub struct Velocity(pub DVec3); #[derive(Clone, Copy, PartialEq, Debug, Default)] pub struct LinAccel(pub DVec3); #[derive(Clone, Copy, PartialEq, Debug, Default)] pub struct NewLinAccel(pub DVec3); #[derive(Clone, Copy, PartialEq, Debug)] pub struct Timestep { pub current: f64, pub substeps: usize, current_frame_time: f64, pub paused: bool, } impl Default for Timestep { fn default() -> Self { Self::new(Timestep::DAY_PER_SECOND, true) } } impl Timestep { pub const REALTIME: f64 = 1.0; pub const MINUTE_PER_SECOND: f64 = 60.0 * Timestep::REALTIME; pub const HOUR_PER_SECOND: f64 = 60.0 * Timestep::MINUTE_PER_SECOND; pub const DAY_PER_SECOND: f64 = Timestep::HOUR_PER_SECOND * 24.0; pub fn new(rate: f64, paused: bool) -> Self { Self::new_with_substeps(rate, paused, 1) } pub fn new_with_substeps(rate: f64, paused: bool, substep
, paused, } } #[allow(dead_code)] pub const fn real_time(paused: bool) -> Self { Timestep { current: Self::REALTIME, substeps: 5, current_frame_time: 0.00001, paused, } } } const SPEED_OF_LIGHT: f64 = 299792458.0; const SQUARE_SOL: f64 = SPEED_OF_LIGHT * SPEED_OF_LIGHT; const SQUARE_SOL_RECIP: f64 = 1.0 / SQUARE_SOL; const GRAV: f64 = 6.6743015e-11; pub struct GravityPlugin; impl Plugin for GravityPlugin { fn build(&self, app: &mut AppBuilder) { app .add_resource(Timestep::default()) .add_resource(TransformScale::default()) .add_system(eih_integrate_position.system()); } } type Q<'a, 'b> = Query<'b, ( Entity, &'a mut NewLinAccel, &'a Mass, &'a mut Position, &'a mut Velocity, &'a mut LinAccel, &'a mut Transform, )>; fn eih_integrate_position( mut timestep: ResMut<Timestep>, mut time: ResMut<Time>, tfs: Res<TransformScale>, mut q: Q, ) { if timestep.paused { return; } let substeps = timestep.substeps; let time = &mut *time; let timestep = &mut *timestep; let real_rate = timestep.current; timestep.current /= substeps as f64; center_and_set_positions(tfs.0, &mut q); for _ in 0..substeps { calculate_newaccel_eih(&q); integrate_accel(&mut q, time, timestep); center_and_set_positions(tfs.0, &mut q); } timestep.current = real_rate; } fn calculate_newaccel_eih(q: &Q) { for ( id0, mut newaccel, _, pos0, vel0, _, _, ) in unsafe { q.iter_unsafe() } { let [mut sum0, mut sum1, mut sum2, mut sum3] = [DVec3::new(0.0, 0.0, 0.0); 4]; let (pos0, vel0) = (pos0.current, vel0.0); for ( id1, _, &Mass(mass1), pos1, vel1, accel1, _, ) in unsafe { q.iter_unsafe() } { if id0 == id1 { continue; } let (pos1, vel1, accel1) = (pos1.current, vel1.0, accel1.0); let pos0spos1 = pos0 - pos1; let distsq01 = pos0spos1.mag_sq(); let distsq01rec = distsq01.recip(); let dist01 = distsq01.sqrt(); let dist01rec = dist01.recip(); let norm01 = pos0spos1 * dist01rec; let norm10: DVec3 = -norm01; let grav_mass1 = GRAV * mass1; let grm1divdistsq01 = grav_mass1 * distsq01rec; let grm1divdist01 = grav_mass1 * dist01rec; sum0 = norm10.mul_add(DVec3::broadcast(grm1divdistsq01), sum0); sum2 = (vel0 - vel1).mul_add(DVec3::broadcast( grm1divdistsq01 * norm01.dot(vel0.mul_add(DVec3::broadcast(4.0), -3.0 * vel1))), sum2, ); sum3 = accel1.mul_add(DVec3::broadcast(grm1divdist01), sum3); let mut temp_sum1_0: f64 = 0.0; let mut temp_sum1_1: f64 = 0.0; for (id2, _, &Mass(mass2), pos2, _, _, _) in unsafe { q.iter_unsafe() } { let pos2 = pos2.current; if id2 != id0 { temp_sum1_0 = mass2.mul_add((pos2 - pos0).mag().recip(), temp_sum1_0); } if id2 != id1 { temp_sum1_1 = mass2.mul_add((pos2 - pos1).mag().recip(), temp_sum1_1); } } sum1 = norm10.mul_add( DVec3::broadcast(grm1divdistsq01 * (-pos0spos1).dot(accel1).mul_add( 0.5, temp_sum1_1.mul_add(-GRAV, temp_sum1_0.mul_add( -4.0 * GRAV, norm01.dot(vel0).powi(2).mul_add(-1.5, vel0.dot(vel1).mul_add( -4.0, vel1.dot(vel1).mul_add(2.0, vel0.dot(vel0)), )) )) )), sum1, ); } newaccel.0 = sum0 + sum1 * SQUARE_SOL_RECIP + sum2 * SQUARE_SOL_RECIP + sum3 * (3.5 * SQUARE_SOL_RECIP); } } fn integrate_accel(q: &mut Q, time: &Time, timestep: &mut Timestep) { let old_time = timestep.current_frame_time; timestep.current_frame_time = time.delta_seconds_f64(); let dt = timestep.current * timestep.current_frame_time; let br0 = DVec3::broadcast(dt); let br1 = DVec3::broadcast(dt * dt); let br2 = (timestep.current * old_time).recip(); for (_, newaccel, _, mut pos, mut vel, mut accel, _) in q.iter_mut() { if pos.current == pos.previous && (vel.0 != DVec3::zero() || accel.0 != DVec3::zero()) { vel.0 = accel.0.mul_add(br0, vel.0); pos.previous = pos.current; pos.current = vel.0.mul_add(br0, pos.current); } else { let diff = pos.current - pos.previous; vel.0 = diff * br2; pos.previous = pos.current; pos.current = vel.0.mul_add(br0, accel.0.mul_add(br1, pos.current)); } accel.0 = newaccel.0; } } fn center_and_set_positions(tfs: f64, q: &mut Q) { #[cold] fn fail() { println!("Warning: failed to center and set positions within {} iters", MAX_INIT); } let mut delta; let mut total_mass = 0.0; let max = q .iter_mut() .max_by(|l, r| l.2.partial_cmp(&r.2).unwrap_or(std::cmp::Ordering::Equal)); if let Some((_, _, _, base_pos, ..)) = max { delta = base_pos.current; } else { return; } for (_, _, &Mass(mass), mut pos, .., mut tf) in q.iter_mut() { pos.previous -= delta; pos.current -= delta; tf.translation = Vec3::new( (pos.current.x * tfs) as f32, (pos.current.y * tfs) as f32, (pos.current.z * tfs) as f32, ); total_mass += mass; } const MAX_INIT: u32 = 500; let mut iters_left = MAX_INIT; while iters_left > 0 { for (_, _, &Mass(mass), pos, ..) in q.iter_mut() { delta = pos.current.mul_add(DVec3::broadcast(mass), delta); } delta /= total_mass; if delta.mag_sq() >= 1e-3 { for (_, _, _, mut pos, .., mut tf) in q.iter_mut() { pos.previous -= delta; pos.current -= delta; tf.translation = Vec3::new( (pos.current.x * tfs) as f32, (pos.current.y * tfs) as f32, (pos.current.z * tfs) as f32, ); } iters_left -= 1; } else { return; } } fail() }
s: usize) -> Self { assert_ne!(substeps, 0, "must be a positive amount of substeps"); Timestep { current: rate, substeps, current_frame_time: 0.00001
function_block-random_span
[ { "content": "/// Get a new unique color, specified by RGB components.\n\npub fn new_color() -> [f32; 3] {\n\n COLORS[COUNTER.fetch_add(1, Ordering::Relaxed) % COLORS.len()]\n\n}\n", "file_path": "src/scene/newcolor.rs", "rank": 1, "score": 51750.93467058338 }, { "content": "fn unpause_af...
Rust
src/ir/operator.rs
dan-zheng/telamon
de463284fdcea70ce29cf43a9c62f3aa2da14276
use self::Operator::*; use crate::ir::{self, AccessPattern, LoweringMap, Operand, Type}; use fxhash::FxHashSet; use serde::{Deserialize, Serialize}; use std::borrow::Cow; use std::{self, fmt}; #[derive(Clone, Copy, PartialEq, Eq, Debug, Serialize, Deserialize)] #[repr(C)] pub enum Rounding { Exact, Nearest, Zero, Positive, Negative, } impl std::fmt::Display for Rounding { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { let name = match self { Rounding::Exact => "exact", Rounding::Nearest => "toward nearest", Rounding::Zero => "toward zero", Rounding::Positive => "toward +inf", Rounding::Negative => "toward -inf", }; write!(f, "{}", name) } } impl Rounding { fn check(self, t: ir::Type) -> Result<(), ir::TypeError> { if t.is_float() ^ (self == Rounding::Exact) { Ok(()) } else { Err(ir::TypeError::InvalidRounding { rounding: self, t }) } } } #[derive(Clone, Copy, Debug, Serialize, Deserialize)] #[repr(C)] pub enum BinOp { Add, Sub, Div, And, Or, Lt, Leq, Equals, Max, } impl fmt::Display for BinOp { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { fmt.write_str(self.name()) } } impl BinOp { fn name(self) -> &'static str { match self { BinOp::Add => "add", BinOp::Sub => "sub", BinOp::Div => "div", BinOp::And => "and", BinOp::Or => "or", BinOp::Lt => "lt", BinOp::Leq => "leq", BinOp::Equals => "equals", BinOp::Max => "max", } } pub fn t(self, operand_type: ir::Type) -> ir::Type { match self { BinOp::Lt | BinOp::Leq | BinOp::Equals => ir::Type::I(1), _ => operand_type, } } fn requires_rounding(self) -> bool { match self { BinOp::Lt | BinOp::Leq | BinOp::Equals | BinOp::Max => false, _ => true, } } } #[derive(Clone, Copy, Debug, Serialize, Deserialize)] #[repr(C)] pub enum UnaryOp { Mov, Cast(ir::Type), Exp(ir::Type), } impl fmt::Display for UnaryOp { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { match self { UnaryOp::Exp(..) => fmt.write_str("exp"), UnaryOp::Mov => fmt.write_str("mov"), UnaryOp::Cast(t) => write!(fmt, "cast({})", t), } } } impl UnaryOp { fn t(self, op_type: ir::Type) -> ir::Type { match self { UnaryOp::Mov | UnaryOp::Exp(..) => op_type, UnaryOp::Cast(t) => t, } } } #[derive(Clone, Debug, Serialize, Deserialize)] pub enum Operator<L = LoweringMap> { BinOp(BinOp, Operand<L>, Operand<L>, Rounding), UnaryOp(UnaryOp, Operand<L>), Mul(Operand<L>, Operand<L>, Rounding, Type), Mad(Operand<L>, Operand<L>, Operand<L>, Rounding), Ld(Type, Operand<L>, AccessPattern), St(Operand<L>, Operand<L>, bool, AccessPattern), TmpLd(Type, ir::MemId), TmpSt(Operand<L>, ir::MemId), } impl<L> Operator<L> { pub fn check( &self, iter_dims: &FxHashSet<ir::DimId>, fun: &ir::Function<L>, ) -> Result<(), ir::Error> { self.t() .map(|t| fun.device().check_type(t)) .unwrap_or(Ok(()))?; for operand in self.operands() { fun.device().check_type(operand.t())?; if let Some(dim_map) = operand.mapped_dims() { for &(lhs, rhs) in dim_map { if fun.find_mapping(lhs, rhs).is_none() { Err(ir::Error::MissingDimMapping { lhs, rhs })?; } } } } match *self { BinOp(operator, ref lhs, ref rhs, rounding) => { if operator.requires_rounding() { rounding.check(lhs.t())?; } else if rounding != Rounding::Exact { Err(ir::TypeError::InvalidRounding { rounding, t: lhs.t(), })?; } ir::TypeError::check_equals(lhs.t(), rhs.t())?; } Mul(ref lhs, ref rhs, rounding, res_type) => { rounding.check(lhs.t())?; ir::TypeError::check_equals(lhs.t(), rhs.t())?; match (lhs.t(), res_type) { (x, z) if x == z => (), (Type::I(32), Type::I(64)) | (Type::I(32), Type::PtrTo(_)) => (), (_, t) => Err(ir::TypeError::UnexpectedType { t })?, } } Mad(ref mul_lhs, ref mul_rhs, ref add_rhs, rounding) => { rounding.check(mul_lhs.t())?; ir::TypeError::check_equals(mul_lhs.t(), mul_rhs.t())?; match (mul_lhs.t(), add_rhs.t()) { (ref x, ref z) if x == z => (), (Type::I(32), Type::I(64)) | (Type::I(32), Type::PtrTo(_)) => (), (_, t) => Err(ir::TypeError::UnexpectedType { t })?, } } Ld(_, ref addr, ref pattern) => { pattern.check(iter_dims)?; let pointer_type = pattern.pointer_type(fun.device()); ir::TypeError::check_equals(addr.t(), pointer_type)?; } St(ref addr, _, _, ref pattern) => { pattern.check(iter_dims)?; let pointer_type = pattern.pointer_type(fun.device()); ir::TypeError::check_equals(addr.t(), pointer_type)?; } TmpLd(..) | UnaryOp(..) | TmpSt(..) => (), } Ok(()) } pub fn t(&self) -> Option<Type> { match self { Mad(_, _, op, _) => Some(op.t()), Ld(t, ..) | TmpLd(t, _) | Mul(.., t) => Some(*t), BinOp(operator, lhs, ..) => Some(operator.t(lhs.t())), UnaryOp(operator, operand) => Some(operator.t(operand.t())), St(..) | TmpSt(..) => None, } } pub fn operands(&self) -> Vec<&Operand<L>> { match self { BinOp(_, lhs, rhs, _) | Mul(lhs, rhs, _, _) | St(lhs, rhs, _, _) => { vec![lhs, rhs] } Mad(mul_lhs, mul_rhs, add_rhs, _) => vec![mul_lhs, mul_rhs, add_rhs], UnaryOp(_, op) | Ld(_, op, _) | TmpSt(op, _) => vec![op], TmpLd(..) => vec![], } } pub fn operands_mut<'b>(&'b mut self) -> Vec<&'b mut Operand<L>> { match self { BinOp(_, lhs, rhs, _) | Mul(lhs, rhs, _, _) | St(lhs, rhs, _, _) => { vec![lhs, rhs] } Mad(mul_lhs, mul_rhs, add_rhs, _) => vec![mul_lhs, mul_rhs, add_rhs], UnaryOp(_, op, ..) | Ld(_, op, ..) | TmpSt(op, _) => vec![op], TmpLd(..) => vec![], } } pub fn has_side_effects(&self) -> bool { match self { St(_, _, b, _) => *b, BinOp(..) | UnaryOp(..) | Mul(..) | Mad(..) | Ld(..) | TmpLd(..) | TmpSt(..) => false, } } pub fn is_mem_access(&self) -> bool { match self { St(..) | Ld(..) | TmpSt(..) | TmpLd(..) => true, _ => false, } } pub fn merge_dims(&mut self, lhs: ir::DimId, rhs: ir::DimId) { self.operands_mut() .iter_mut() .for_each(|x| x.merge_dims(lhs, rhs)); } pub fn mem_access_pattern(&self) -> Option<Cow<AccessPattern>> { match *self { Ld(_, _, ref pattern) | St(_, _, _, ref pattern) => { Some(Cow::Borrowed(pattern)) } TmpLd(_, mem_id) | TmpSt(_, mem_id) => { Some(Cow::Owned(AccessPattern::Unknown(Some(mem_id)))) } _ => None, } } pub fn mem_used(&self) -> Option<ir::MemId> { self.mem_access_pattern().and_then(|p| p.mem_block()) } pub fn map_operands<T, F>(self, mut f: F) -> Operator<T> where F: FnMut(Operand<L>) -> Operand<T>, { match self { BinOp(op, oper1, oper2, rounding) => { let oper1 = f(oper1); let oper2 = f(oper2); BinOp(op, oper1, oper2, rounding) } UnaryOp(operator, operand) => UnaryOp(operator, f(operand)), Mul(oper1, oper2, rounding, t) => { let oper1 = f(oper1); let oper2 = f(oper2); Mul(oper1, oper2, rounding, t) } Mad(oper1, oper2, oper3, rounding) => { let oper1 = f(oper1); let oper2 = f(oper2); let oper3 = f(oper3); Mad(oper1, oper2, oper3, rounding) } Ld(t, oper1, ap) => { let oper1 = f(oper1); Ld(t, oper1, ap) } St(oper1, oper2, side_effects, ap) => { let oper1 = f(oper1); let oper2 = f(oper2); St(oper1, oper2, side_effects, ap) } TmpLd(t, id) => TmpLd(t, id), TmpSt(oper1, id) => { let oper1 = f(oper1); TmpSt(oper1, id) } } } } impl<L> ir::IrDisplay<L> for Operator<L> { fn fmt(&self, fmt: &mut fmt::Formatter, function: &ir::Function<L>) -> fmt::Result { match self { BinOp(op, lhs, rhs, _rnd) => write!( fmt, "{}({}, {})", op, lhs.display(function), rhs.display(function) ), UnaryOp(op, arg) => write!(fmt, "{}({})", op, arg.display(function)), Mul(lhs, rhs, _rnd, _t) => write!( fmt, "mul({}, {})", lhs.display(function), rhs.display(function) ), Mad(arg0, arg1, arg2, _rnd) => write!( fmt, "mad({}, {}, {})", arg0.display(function), arg1.display(function), arg2.display(function) ), Ld(_t, arg, _ap) => write!(fmt, "load({})", arg.display(function)), St(dst, src, _side_effects, _ap) => write!( fmt, "store({}, {})", dst.display(function), src.display(function) ), TmpLd(_t, mem) => write!(fmt, "load({})", mem), TmpSt(src, mem) => write!(fmt, "store({}, {})", mem, src.display(function)), } } } impl Operator<()> { pub fn freeze(self, cnt: &mut ir::Counter) -> Operator { self.map_operands(|oper| oper.freeze(cnt)) } } impl<L> std::fmt::Display for Operator<L> { fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result { match self { BinOp(op, lhs, rhs, rnd) => write!(fmt, "{}[{}]({}, {})", op, rnd, lhs, rhs), UnaryOp(op, arg) => write!(fmt, "{}({})", op, arg), Mul(lhs, rhs, rnd, t) => write!(fmt, "Mul<{}>[{}]({}, {})", t, rnd, lhs, rhs), Mad(arg0, arg1, arg2, rnd) => { write!(fmt, "Mad[{}]({}, {}, {})", rnd, arg0, arg1, arg2) } Ld(_t, arg, _ap) => write!(fmt, "Load({})", arg), St(dst, src, _side_effects, _ap) => write!(fmt, "Store({}, {})", dst, src), TmpLd(_t, mem) => write!(fmt, "TempLoad({})", mem), TmpSt(src, mem) => write!(fmt, "TempStore({}, {})", mem, src), } } }
use self::Operator::*; use crate::ir::{self, AccessPattern, LoweringMap, Operand, Type}; use fxhash::FxHashSet; use serde::{Deserialize, Serialize}; use std::borrow::Cow; use std::{self, fmt}; #[derive(Clone, Copy, PartialEq, Eq, Debug, Serialize, Deserialize)] #[repr(C)] pub enum Rounding { Exact, Nearest, Zero, Positive, Negative, } impl std::fmt::Display for Rounding { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { let name = match self { Rounding::Exact => "exact", Rounding::Nearest => "toward nearest", Rounding::Zero => "toward zero", Rounding::Positive => "toward +inf", Rounding::Negative => "toward -inf", }; write!(f, "{}", name) } } impl Rounding { fn check(self, t: ir::Type) -> Result<(), ir::TypeError> { if t.is_float() ^ (self == Rounding::Exact) { Ok(()) } else { Err(ir::TypeError::InvalidRounding { rounding: self, t }) } } } #[derive(Clone, Copy, Debug, Serialize, Deserialize)] #[repr(C)] pub enum BinOp { Add, Sub, Div, And, Or, Lt, Leq, Equals, Max, } impl fmt::Display for BinOp { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { fmt.write_str(self.name()) } } impl BinOp { fn name(self) -> &'static str { match self { BinOp::Add => "add", Bi
pub fn t(self, operand_type: ir::Type) -> ir::Type { match self { BinOp::Lt | BinOp::Leq | BinOp::Equals => ir::Type::I(1), _ => operand_type, } } fn requires_rounding(self) -> bool { match self { BinOp::Lt | BinOp::Leq | BinOp::Equals | BinOp::Max => false, _ => true, } } } #[derive(Clone, Copy, Debug, Serialize, Deserialize)] #[repr(C)] pub enum UnaryOp { Mov, Cast(ir::Type), Exp(ir::Type), } impl fmt::Display for UnaryOp { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { match self { UnaryOp::Exp(..) => fmt.write_str("exp"), UnaryOp::Mov => fmt.write_str("mov"), UnaryOp::Cast(t) => write!(fmt, "cast({})", t), } } } impl UnaryOp { fn t(self, op_type: ir::Type) -> ir::Type { match self { UnaryOp::Mov | UnaryOp::Exp(..) => op_type, UnaryOp::Cast(t) => t, } } } #[derive(Clone, Debug, Serialize, Deserialize)] pub enum Operator<L = LoweringMap> { BinOp(BinOp, Operand<L>, Operand<L>, Rounding), UnaryOp(UnaryOp, Operand<L>), Mul(Operand<L>, Operand<L>, Rounding, Type), Mad(Operand<L>, Operand<L>, Operand<L>, Rounding), Ld(Type, Operand<L>, AccessPattern), St(Operand<L>, Operand<L>, bool, AccessPattern), TmpLd(Type, ir::MemId), TmpSt(Operand<L>, ir::MemId), } impl<L> Operator<L> { pub fn check( &self, iter_dims: &FxHashSet<ir::DimId>, fun: &ir::Function<L>, ) -> Result<(), ir::Error> { self.t() .map(|t| fun.device().check_type(t)) .unwrap_or(Ok(()))?; for operand in self.operands() { fun.device().check_type(operand.t())?; if let Some(dim_map) = operand.mapped_dims() { for &(lhs, rhs) in dim_map { if fun.find_mapping(lhs, rhs).is_none() { Err(ir::Error::MissingDimMapping { lhs, rhs })?; } } } } match *self { BinOp(operator, ref lhs, ref rhs, rounding) => { if operator.requires_rounding() { rounding.check(lhs.t())?; } else if rounding != Rounding::Exact { Err(ir::TypeError::InvalidRounding { rounding, t: lhs.t(), })?; } ir::TypeError::check_equals(lhs.t(), rhs.t())?; } Mul(ref lhs, ref rhs, rounding, res_type) => { rounding.check(lhs.t())?; ir::TypeError::check_equals(lhs.t(), rhs.t())?; match (lhs.t(), res_type) { (x, z) if x == z => (), (Type::I(32), Type::I(64)) | (Type::I(32), Type::PtrTo(_)) => (), (_, t) => Err(ir::TypeError::UnexpectedType { t })?, } } Mad(ref mul_lhs, ref mul_rhs, ref add_rhs, rounding) => { rounding.check(mul_lhs.t())?; ir::TypeError::check_equals(mul_lhs.t(), mul_rhs.t())?; match (mul_lhs.t(), add_rhs.t()) { (ref x, ref z) if x == z => (), (Type::I(32), Type::I(64)) | (Type::I(32), Type::PtrTo(_)) => (), (_, t) => Err(ir::TypeError::UnexpectedType { t })?, } } Ld(_, ref addr, ref pattern) => { pattern.check(iter_dims)?; let pointer_type = pattern.pointer_type(fun.device()); ir::TypeError::check_equals(addr.t(), pointer_type)?; } St(ref addr, _, _, ref pattern) => { pattern.check(iter_dims)?; let pointer_type = pattern.pointer_type(fun.device()); ir::TypeError::check_equals(addr.t(), pointer_type)?; } TmpLd(..) | UnaryOp(..) | TmpSt(..) => (), } Ok(()) } pub fn t(&self) -> Option<Type> { match self { Mad(_, _, op, _) => Some(op.t()), Ld(t, ..) | TmpLd(t, _) | Mul(.., t) => Some(*t), BinOp(operator, lhs, ..) => Some(operator.t(lhs.t())), UnaryOp(operator, operand) => Some(operator.t(operand.t())), St(..) | TmpSt(..) => None, } } pub fn operands(&self) -> Vec<&Operand<L>> { match self { BinOp(_, lhs, rhs, _) | Mul(lhs, rhs, _, _) | St(lhs, rhs, _, _) => { vec![lhs, rhs] } Mad(mul_lhs, mul_rhs, add_rhs, _) => vec![mul_lhs, mul_rhs, add_rhs], UnaryOp(_, op) | Ld(_, op, _) | TmpSt(op, _) => vec![op], TmpLd(..) => vec![], } } pub fn operands_mut<'b>(&'b mut self) -> Vec<&'b mut Operand<L>> { match self { BinOp(_, lhs, rhs, _) | Mul(lhs, rhs, _, _) | St(lhs, rhs, _, _) => { vec![lhs, rhs] } Mad(mul_lhs, mul_rhs, add_rhs, _) => vec![mul_lhs, mul_rhs, add_rhs], UnaryOp(_, op, ..) | Ld(_, op, ..) | TmpSt(op, _) => vec![op], TmpLd(..) => vec![], } } pub fn has_side_effects(&self) -> bool { match self { St(_, _, b, _) => *b, BinOp(..) | UnaryOp(..) | Mul(..) | Mad(..) | Ld(..) | TmpLd(..) | TmpSt(..) => false, } } pub fn is_mem_access(&self) -> bool { match self { St(..) | Ld(..) | TmpSt(..) | TmpLd(..) => true, _ => false, } } pub fn merge_dims(&mut self, lhs: ir::DimId, rhs: ir::DimId) { self.operands_mut() .iter_mut() .for_each(|x| x.merge_dims(lhs, rhs)); } pub fn mem_access_pattern(&self) -> Option<Cow<AccessPattern>> { match *self { Ld(_, _, ref pattern) | St(_, _, _, ref pattern) => { Some(Cow::Borrowed(pattern)) } TmpLd(_, mem_id) | TmpSt(_, mem_id) => { Some(Cow::Owned(AccessPattern::Unknown(Some(mem_id)))) } _ => None, } } pub fn mem_used(&self) -> Option<ir::MemId> { self.mem_access_pattern().and_then(|p| p.mem_block()) } pub fn map_operands<T, F>(self, mut f: F) -> Operator<T> where F: FnMut(Operand<L>) -> Operand<T>, { match self { BinOp(op, oper1, oper2, rounding) => { let oper1 = f(oper1); let oper2 = f(oper2); BinOp(op, oper1, oper2, rounding) } UnaryOp(operator, operand) => UnaryOp(operator, f(operand)), Mul(oper1, oper2, rounding, t) => { let oper1 = f(oper1); let oper2 = f(oper2); Mul(oper1, oper2, rounding, t) } Mad(oper1, oper2, oper3, rounding) => { let oper1 = f(oper1); let oper2 = f(oper2); let oper3 = f(oper3); Mad(oper1, oper2, oper3, rounding) } Ld(t, oper1, ap) => { let oper1 = f(oper1); Ld(t, oper1, ap) } St(oper1, oper2, side_effects, ap) => { let oper1 = f(oper1); let oper2 = f(oper2); St(oper1, oper2, side_effects, ap) } TmpLd(t, id) => TmpLd(t, id), TmpSt(oper1, id) => { let oper1 = f(oper1); TmpSt(oper1, id) } } } } impl<L> ir::IrDisplay<L> for Operator<L> { fn fmt(&self, fmt: &mut fmt::Formatter, function: &ir::Function<L>) -> fmt::Result { match self { BinOp(op, lhs, rhs, _rnd) => write!( fmt, "{}({}, {})", op, lhs.display(function), rhs.display(function) ), UnaryOp(op, arg) => write!(fmt, "{}({})", op, arg.display(function)), Mul(lhs, rhs, _rnd, _t) => write!( fmt, "mul({}, {})", lhs.display(function), rhs.display(function) ), Mad(arg0, arg1, arg2, _rnd) => write!( fmt, "mad({}, {}, {})", arg0.display(function), arg1.display(function), arg2.display(function) ), Ld(_t, arg, _ap) => write!(fmt, "load({})", arg.display(function)), St(dst, src, _side_effects, _ap) => write!( fmt, "store({}, {})", dst.display(function), src.display(function) ), TmpLd(_t, mem) => write!(fmt, "load({})", mem), TmpSt(src, mem) => write!(fmt, "store({}, {})", mem, src.display(function)), } } } impl Operator<()> { pub fn freeze(self, cnt: &mut ir::Counter) -> Operator { self.map_operands(|oper| oper.freeze(cnt)) } } impl<L> std::fmt::Display for Operator<L> { fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result { match self { BinOp(op, lhs, rhs, rnd) => write!(fmt, "{}[{}]({}, {})", op, rnd, lhs, rhs), UnaryOp(op, arg) => write!(fmt, "{}({})", op, arg), Mul(lhs, rhs, rnd, t) => write!(fmt, "Mul<{}>[{}]({}, {})", t, rnd, lhs, rhs), Mad(arg0, arg1, arg2, rnd) => { write!(fmt, "Mad[{}]({}, {}, {})", rnd, arg0, arg1, arg2) } Ld(_t, arg, _ap) => write!(fmt, "Load({})", arg), St(dst, src, _side_effects, _ap) => write!(fmt, "Store({}, {})", dst, src), TmpLd(_t, mem) => write!(fmt, "TempLoad({})", mem), TmpSt(src, mem) => write!(fmt, "TempStore({}, {})", mem, src), } } }
nOp::Sub => "sub", BinOp::Div => "div", BinOp::And => "and", BinOp::Or => "or", BinOp::Lt => "lt", BinOp::Leq => "leq", BinOp::Equals => "equals", BinOp::Max => "max", } }
function_block-function_prefixed
[ { "content": "#[allow(unused_mut)]\n\npub fn set_{{name}}(&mut self{{>args_decl}}, mut value: {{>value_type.name value_type}}) {\n\n {{#if is_symmetric~}}\n\n if {{arguments.[0].[0]}} > {{arguments.[1].[0]}} {\n\n std::mem::swap(&mut {{arguments.[0].[0]}}, &mut {{arguments.[1].[0]}});\n\n ...
Rust
src/cluster/session.rs
a1ph/cdrs-tokio
99a536e705ff1fd95be36d57d0832ddc21d7478f
use async_trait::async_trait; use bb8; use fnv::FnvHashMap; use std::iter::Iterator; use std::sync::Arc; use tokio::{io::AsyncWriteExt, sync::Mutex}; #[cfg(feature = "unstable-dynamic-cluster")] use crate::cluster::NodeTcpConfig; #[cfg(feature = "rust-tls")] use crate::cluster::{new_rustls_pool, ClusterRustlsConfig, RustlsConnectionPool}; use crate::cluster::{new_tcp_pool, startup, CDRSSession, ClusterTcpConfig, ConnectionPool, GetCompressor, GetConnection, TcpConnectionPool, ResponseCache}; use crate::error; use crate::load_balancing::LoadBalancingStrategy; use crate::transport::{CDRSTransport, TransportTcp}; use crate::authenticators::Authenticator; use crate::cluster::SessionPager; use crate::compression::Compression; use crate::events::{new_listener, EventStream, EventStreamNonBlocking, Listener}; use crate::frame::events::{ServerEvent, SimpleServerEvent, StatusChange, StatusChangeType}; use crate::frame::parser::parse_frame; use crate::frame::{Frame, IntoBytes, StreamId}; use crate::query::{BatchExecutor, ExecExecutor, PrepareExecutor, QueryExecutor}; #[derive(Debug)] pub struct Session<LB> { load_balancing: Mutex<LB>, event_stream: Option<Mutex<EventStreamNonBlocking>>, responses: Mutex<FnvHashMap<StreamId, Frame>>, #[allow(dead_code)] pub compression: Compression, } impl<'a, LB> GetCompressor<'a> for Session<LB> { fn get_compressor(&self) -> Compression { self.compression.clone() } } impl<'a, LB: Sized> Session<LB> { pub fn paged< T: CDRSTransport + Unpin + 'static, M: bb8::ManageConnection<Connection = Mutex<T>, Error = error::Error>, >( &'a mut self, page_size: i32, ) -> SessionPager<'a, M, Session<LB>, T> where Session<LB>: CDRSSession<'static, T, M>, { return SessionPager::new(self, page_size); } } #[async_trait] impl< T: CDRSTransport + Send + Sync + 'static, M: bb8::ManageConnection<Connection = Mutex<T>, Error = error::Error> + Sized, LB: LoadBalancingStrategy<ConnectionPool<M>> + Sized + Send + Sync, > GetConnection<T, M> for Session<LB> { async fn get_connection(&self) -> Option<Arc<ConnectionPool<M>>> { if cfg!(feature = "unstable-dynamic-cluster") { if let Some(ref event_stream_mx) = self.event_stream { if let Ok(ref mut event_stream) = event_stream_mx.try_lock() { loop { let next_event = event_stream.next(); match next_event { None => break, Some(ServerEvent::StatusChange(StatusChange { addr, change_type: StatusChangeType::Down, })) => { self.load_balancing .lock() .await .remove_node(|pool| pool.get_addr() == addr.addr); } Some(_) => continue, } } } } } self.load_balancing .lock() .await .next() } } #[async_trait] impl< 'a, T: CDRSTransport + Unpin + 'static, M: bb8::ManageConnection<Connection = Mutex<T>, Error = error::Error> + Sized, LB: LoadBalancingStrategy<ConnectionPool<M>> + Sized + Send + Sync, > QueryExecutor<T, M> for Session<LB> { } #[async_trait] impl< 'a, T: CDRSTransport + Unpin + 'static, LB: LoadBalancingStrategy<ConnectionPool<M>> + Sized + Send + Sync, M: bb8::ManageConnection<Connection = Mutex<T>, Error = error::Error> + Sized, > PrepareExecutor<T, M> for Session<LB> { } #[async_trait] impl< 'a, T: CDRSTransport + Unpin + 'static, LB: LoadBalancingStrategy<ConnectionPool<M>> + Sized + Send + Sync, M: bb8::ManageConnection<Connection = Mutex<T>, Error = error::Error> + Sized, > ExecExecutor<T, M> for Session<LB> { } #[async_trait] impl< 'a, T: CDRSTransport + Unpin + 'static, LB: LoadBalancingStrategy<ConnectionPool<M>> + Sized + Send + Sync, M: bb8::ManageConnection<Connection = Mutex<T>, Error = error::Error> + Sized, > BatchExecutor<T, M> for Session<LB> { } impl< 'a, T: CDRSTransport + Unpin + 'static, M: bb8::ManageConnection<Connection = Mutex<T>, Error = error::Error> + Sized, LB: LoadBalancingStrategy<ConnectionPool<M>> + Sized + Send + Sync, > CDRSSession<'a, T, M> for Session<LB> { } #[async_trait] impl <LB> ResponseCache for Session<LB> where LB: Send { async fn match_or_cache_response(&self, stream_id: i16, frame: Frame) -> Option<Frame> { if frame.stream == stream_id { return Some(frame); } let mut responses = self.responses.lock().await; responses.insert(frame.stream, frame); responses.remove(&stream_id) } } #[cfg(feature = "rust-tls")] async fn connect_tls_static<A, LB>( node_configs: &ClusterRustlsConfig<A>, mut load_balancing: LB, compression: Compression, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<RustlsConnectionPool<A>> + Sized, { let mut nodes: Vec<Arc<RustlsConnectionPool<A>>> = Vec::with_capacity(node_configs.0.len()); for node_config in &node_configs.0 { let node_connection_pool = new_rustls_pool(node_config.clone()).await?; nodes.push(Arc::new(node_connection_pool)); } load_balancing.init(nodes); Ok(Session { load_balancing: Mutex::new(load_balancing), event_stream: None, responses: Mutex::new(FnvHashMap::default()), compression, }) } #[cfg(all(feature = "rust-tls", feature = "unstable-dynamic-cluster"))] async fn connect_tls_dynamic<A, LB>( node_configs: &ClusterRustlsConfig<A>, mut load_balancing: LB, compression: Compression, event_src: NodeTcpConfig<'_, A>, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<RustlsConnectionPool<A>> + Sized, { let mut nodes: Vec<Arc<RustlsConnectionPool<A>>> = Vec::with_capacity(node_configs.0.len()); for node_config in &node_configs.0 { let node_connection_pool = new_rustls_pool(node_config.clone()).await?; nodes.push(Arc::new(node_connection_pool)); } load_balancing.init(nodes); let mut session = Session { load_balancing: Mutex::new(load_balancing), event_stream: None, responses: Mutex::new(FnvHashMap::default()), compression, }; let (listener, event_stream) = session.listen_non_blocking( event_src.addr, event_src.authenticator, vec![SimpleServerEvent::StatusChange], ).await?; tokio::spawn(listener.start(&Compression::None)); session.event_stream = Some(Mutex::new(event_stream)); Ok(session) } async fn connect_static<A, LB>( node_configs: &ClusterTcpConfig<'_, A>, mut load_balancing: LB, compression: Compression, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<TcpConnectionPool<A>> + Sized, { let mut nodes: Vec<Arc<TcpConnectionPool<A>>> = Vec::with_capacity(node_configs.0.len()); for node_config in &node_configs.0 { let node_connection_pool = new_tcp_pool(node_config.clone()).await?; nodes.push(Arc::new(node_connection_pool)); } load_balancing.init(nodes); Ok(Session { load_balancing: Mutex::new(load_balancing), event_stream: None, responses: Mutex::new(FnvHashMap::default()), compression, }) } #[cfg(feature = "unstable-dynamic-cluster")] async fn connect_dynamic<'a, A, LB>( node_configs: &ClusterTcpConfig<'a, A>, mut load_balancing: LB, compression: Compression, event_src: NodeTcpConfig<'a, A>, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<TcpConnectionPool<A>> + Sized, { let mut nodes: Vec<Arc<TcpConnectionPool<A>>> = Vec::with_capacity(node_configs.0.len()); for node_config in &node_configs.0 { let node_connection_pool = new_tcp_pool(node_config.clone()).await?; nodes.push(Arc::new(node_connection_pool)); } load_balancing.init(nodes); let mut session = Session { load_balancing: Mutex::new(load_balancing), event_stream: None, responses: Mutex::new(FnvHashMap::default()), compression, }; let (listener, event_stream) = session.listen_non_blocking( event_src.addr, event_src.authenticator, vec![SimpleServerEvent::StatusChange], ).await?; tokio::spawn(listener.start(&Compression::None)); session.event_stream = Some(Mutex::new(event_stream)); Ok(session) } pub async fn new<A, LB>( node_configs: &ClusterTcpConfig<'_, A>, load_balancing: LB, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<TcpConnectionPool<A>> + Sized, { connect_static(node_configs, load_balancing, Compression::None).await } #[cfg(feature = "unstable-dynamic-cluster")] pub async fn new_dynamic<'a, A, LB>( node_configs: &ClusterTcpConfig<'a, A>, load_balancing: LB, event_src: NodeTcpConfig<'a, A>, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<TcpConnectionPool<A>> + Sized, { connect_dynamic(node_configs, load_balancing, Compression::None, event_src).await } pub async fn new_snappy<A, LB>( node_configs: &ClusterTcpConfig<'_, A>, load_balancing: LB, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<TcpConnectionPool<A>> + Sized, { connect_static(node_configs, load_balancing, Compression::Snappy).await } #[cfg(feature = "unstable-dynamic-cluster")] pub async fn new_snappy_dynamic<'a, A, LB>( node_configs: &ClusterTcpConfig<'a, A>, load_balancing: LB, event_src: NodeTcpConfig<'a, A>, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<TcpConnectionPool<A>> + Sized, { connect_dynamic(node_configs, load_balancing, Compression::Snappy, event_src).await } pub async fn new_lz4<A, LB>( node_configs: &ClusterTcpConfig<'_, A>, load_balancing: LB, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<TcpConnectionPool<A>> + Sized, { connect_static(node_configs, load_balancing, Compression::Lz4).await } #[cfg(feature = "unstable-dynamic-cluster")] pub async fn new_lz4_dynamic<'a, A, LB>( node_configs: &ClusterTcpConfig<'_, A>, load_balancing: LB, event_src: NodeTcpConfig<'a, A>, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<TcpConnectionPool<A>> + Sized, { connect_dynamic(node_configs, load_balancing, Compression::Lz4, event_src).await } #[cfg(feature = "rust-tls")] pub async fn new_tls<A, LB>( node_configs: &ClusterRustlsConfig<A>, load_balancing: LB, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<RustlsConnectionPool<A>> + Sized, { connect_tls_static(node_configs, load_balancing, Compression::None).await } #[cfg(all(feature = "rust-tls", feature = "unstable-dynamic-cluster"))] pub async fn new_tls_dynamic<A, LB>( node_configs: &ClusterRustlsConfig<A>, load_balancing: LB, event_src: NodeTcpConfig<'_, A>, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<RustlsConnectionPool<A>> + Sized, { connect_tls_dynamic(node_configs, load_balancing, Compression::None, event_src).await } #[cfg(feature = "rust-tls")] pub async fn new_snappy_tls<A, LB>( node_configs: &ClusterRustlsConfig<A>, load_balancing: LB, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<RustlsConnectionPool<A>> + Sized, { connect_tls_static(node_configs, load_balancing, Compression::Snappy).await } #[cfg(all(feature = "rust-tls", feature = "unstable-dynamic-cluster"))] pub async fn new_snappy_tls_dynamic<A, LB>( node_configs: &ClusterRustlsConfig<A>, load_balancing: LB, event_src: NodeTcpConfig<'_, A>, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<RustlsConnectionPool<A>> + Sized, { connect_tls_dynamic(node_configs, load_balancing, Compression::Snappy, event_src).await } #[cfg(feature = "rust-tls")] pub async fn new_lz4_tls<A, LB>( node_configs: &ClusterRustlsConfig<A>, load_balancing: LB, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<RustlsConnectionPool<A>> + Sized, { connect_tls_static(node_configs, load_balancing, Compression::Lz4).await } #[cfg(all(feature = "rust-tls", feature = "unstable-dynamic-cluster"))] pub async fn new_lz4_tls_dynamic<A, LB>( node_configs: &ClusterRustlsConfig<A>, load_balancing: LB, event_src: NodeTcpConfig<'_, A>, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<RustlsConnectionPool<A>> + Sized, { connect_tls_dynamic(node_configs, load_balancing, Compression::Lz4, event_src).await } impl<'a, L> Session<L> { pub async fn listen<A: Authenticator + 'static + Sized>( &self, node: &str, authenticator: A, events: Vec<SimpleServerEvent>, ) -> error::Result<(Listener<Mutex<TransportTcp>>, EventStream)> { let compression = self.get_compressor(); let transport = TransportTcp::new(&node).await.map(Mutex::new)?; startup(&transport, &authenticator).await?; let query_frame = Frame::new_req_register(events).into_cbytes(); transport.lock().await.write(query_frame.as_slice()).await?; parse_frame(&transport, &compression).await?; Ok(new_listener(transport)) } pub async fn listen_non_blocking<A: Authenticator + 'static + Sized>( &self, node: &str, authenticator: A, events: Vec<SimpleServerEvent>, ) -> error::Result<(Listener<Mutex<TransportTcp>>, EventStreamNonBlocking)> { self.listen(node, authenticator, events).await.map(|l| { let (listener, stream) = l; (listener, stream.into()) }) } }
use async_trait::async_trait; use bb8; use fnv::FnvHashMap; use std::iter::Iterator; use std::sync::Arc; use tokio::{io::AsyncWriteExt, sync::Mutex}; #[cfg(feature = "unstable-dynamic-cluster")] use crate::cluster::NodeTcpConfig; #[cfg(feature = "rust-tls")] use crate::cluster::{new_rustls_pool, ClusterRustlsConfig, RustlsConnectionPool}; use crate::cluster::{new_tcp_pool, startup, CDRSSession, ClusterTcpConfig, ConnectionPool, GetCompressor, GetConnection, TcpConnectionPool, ResponseCache}; use crate::error; use crate::load_balancing::LoadBalancingStrategy; use crate::transport::{CDRSTransport, TransportTcp}; use crate::authenticators::Authenticator; use crate::cluster::SessionPager; use crate::compression::Compression; use crate::events::{new_listener, EventStream, EventStreamNonBlocking, Listener}; use crate::frame::events::{ServerEvent, SimpleServerEvent, StatusChange, StatusChangeType}; use crate::frame::parser::parse_frame; use crate::frame::{Frame, IntoBytes, StreamId}; use crate::query::{BatchExecutor, ExecExecutor, PrepareExecutor, QueryExecutor}; #[derive(Debug)] pub struct Session<LB> { load_balancing: Mutex<LB>, event_stream: Option<Mutex<EventStreamNonBlocking>>, responses: Mutex<FnvHashMap<StreamId, Frame>>, #[allow(dead_code)] pub compression: Compression, } impl<'a, LB> GetCompressor<'a> for Session<LB> { fn get_compressor(&self) -> Compression { self.compression.clone() } } impl<'a, LB: Sized> Session<LB> { pub fn paged< T: CDRSTransport + Unpin + 'static, M: bb8::ManageConnection<Connection = Mutex<T>, Error = error::Error>, >( &'a mut self, page_size: i32, ) -> SessionPager<'a, M, Session<LB>, T> where Session<LB>: CDRSSession<'static, T, M>, { return SessionPager::new(self, page_size); } } #[async_trait] impl< T: CDRSTransport + Send + Sync + 'static, M: bb8::ManageConnection<Connection = Mutex<T>, Error = error::Error> + Sized, LB: LoadBalancingStrategy<ConnectionPool<M>> + Sized + Send + Sync, > GetConnection<T, M> for Session<LB> { async fn get_connection(&self) -> Option<Arc<ConnectionPool<M>>> { if cfg!(feature = "unstable-dynamic-cluster") { if let Some(ref event_stream_mx) = self.event_stream { if let Ok(ref mut event_stream) = event_stream_mx.try_lock() { loop { let next_event = event_stream.next(); match next_event { None => break, Some(ServerEvent::StatusChange(StatusChange { addr, change_type: StatusChangeType::Down, })) => { self.load_balancing .lock() .await .remove_node(|pool| pool.get_addr() == addr.addr); } Some(_) => continue, } } } } } self.load_balancing .lock() .await .next() } } #[async_trait] impl< 'a, T: CDRSTransport + Unpin + 'static, M: bb8::ManageConnection<Connection = Mutex<T>, Error = error::Error> + Sized, LB: LoadBalancingStrategy<ConnectionPool<M>> + Sized + Send + Sync, > QueryExecutor<T, M> for Session<LB> { } #[async_trait] impl< 'a, T: CDRSTransport + Unpin + 'static, LB: LoadBalancingStrategy<ConnectionPool<M>> + Sized + Send + Sync, M: bb8::ManageConnection<Connection = Mutex<T>, Error = error::Error> + Sized, > PrepareExecutor<T, M> for Session<LB> { } #[async_trait] impl< 'a, T: CDRSTransport + Unpin + 'static, LB: LoadBalancingStrategy<ConnectionPool<M>> + Sized + Send + Sync, M: bb8::ManageConnection<Connection = Mutex<T>, Error = error::Error> + Sized, > ExecExecutor<T, M> for Session<LB> { } #[async_trait] impl< 'a, T: CDRSTransport + Unpin + 'static, LB: LoadBalancingStrategy<ConnectionPool<M>> + Sized + Send + Sync, M: bb8::ManageConnection<Connection = Mutex<T>, Error = error::Error> + Sized, > BatchExecutor<T, M> for Session<LB> { } impl< 'a, T: CDRSTransport + Unpin + 'static, M: bb8::ManageConnection<Connection = Mutex<T>, Error = error::Error> + Sized, LB: LoadBalancingStrategy<ConnectionPool<M>> + Sized + Send + Sync, > CDRSSession<'a, T, M> for Session<LB> { } #[async_trait] impl <LB> ResponseCache for Session<LB> where LB: Send { async fn match_or_cache_response(&self, stream_id: i16, frame: Frame) -> Option<Frame> { if frame.stream == stream_id { return Some(frame); } let mut responses = self.responses.lock().await; responses.insert(frame.stream, frame); responses.remove(&stream_id) } } #[cfg(feature = "rust-tls")] async fn connect_tls_static<A, LB>( node_configs: &ClusterRustlsConfig<A>, mut load_balancing: LB, compression: Compression, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<RustlsConnectionPool<A>> + Sized, { let mut nodes: Vec<Arc<RustlsConnectionPool<A>>> = Vec::with_capacity(node_configs.0.len()); for node_config in &node_configs.0 { let node_connection_pool = new_rustls_pool(node_config.clone()).await?; nodes.push(Arc::new(node_connection_pool)); } load_balancing.init(nodes);
} #[cfg(all(feature = "rust-tls", feature = "unstable-dynamic-cluster"))] async fn connect_tls_dynamic<A, LB>( node_configs: &ClusterRustlsConfig<A>, mut load_balancing: LB, compression: Compression, event_src: NodeTcpConfig<'_, A>, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<RustlsConnectionPool<A>> + Sized, { let mut nodes: Vec<Arc<RustlsConnectionPool<A>>> = Vec::with_capacity(node_configs.0.len()); for node_config in &node_configs.0 { let node_connection_pool = new_rustls_pool(node_config.clone()).await?; nodes.push(Arc::new(node_connection_pool)); } load_balancing.init(nodes); let mut session = Session { load_balancing: Mutex::new(load_balancing), event_stream: None, responses: Mutex::new(FnvHashMap::default()), compression, }; let (listener, event_stream) = session.listen_non_blocking( event_src.addr, event_src.authenticator, vec![SimpleServerEvent::StatusChange], ).await?; tokio::spawn(listener.start(&Compression::None)); session.event_stream = Some(Mutex::new(event_stream)); Ok(session) } async fn connect_static<A, LB>( node_configs: &ClusterTcpConfig<'_, A>, mut load_balancing: LB, compression: Compression, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<TcpConnectionPool<A>> + Sized, { let mut nodes: Vec<Arc<TcpConnectionPool<A>>> = Vec::with_capacity(node_configs.0.len()); for node_config in &node_configs.0 { let node_connection_pool = new_tcp_pool(node_config.clone()).await?; nodes.push(Arc::new(node_connection_pool)); } load_balancing.init(nodes); Ok(Session { load_balancing: Mutex::new(load_balancing), event_stream: None, responses: Mutex::new(FnvHashMap::default()), compression, }) } #[cfg(feature = "unstable-dynamic-cluster")] async fn connect_dynamic<'a, A, LB>( node_configs: &ClusterTcpConfig<'a, A>, mut load_balancing: LB, compression: Compression, event_src: NodeTcpConfig<'a, A>, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<TcpConnectionPool<A>> + Sized, { let mut nodes: Vec<Arc<TcpConnectionPool<A>>> = Vec::with_capacity(node_configs.0.len()); for node_config in &node_configs.0 { let node_connection_pool = new_tcp_pool(node_config.clone()).await?; nodes.push(Arc::new(node_connection_pool)); } load_balancing.init(nodes); let mut session = Session { load_balancing: Mutex::new(load_balancing), event_stream: None, responses: Mutex::new(FnvHashMap::default()), compression, }; let (listener, event_stream) = session.listen_non_blocking( event_src.addr, event_src.authenticator, vec![SimpleServerEvent::StatusChange], ).await?; tokio::spawn(listener.start(&Compression::None)); session.event_stream = Some(Mutex::new(event_stream)); Ok(session) } pub async fn new<A, LB>( node_configs: &ClusterTcpConfig<'_, A>, load_balancing: LB, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<TcpConnectionPool<A>> + Sized, { connect_static(node_configs, load_balancing, Compression::None).await } #[cfg(feature = "unstable-dynamic-cluster")] pub async fn new_dynamic<'a, A, LB>( node_configs: &ClusterTcpConfig<'a, A>, load_balancing: LB, event_src: NodeTcpConfig<'a, A>, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<TcpConnectionPool<A>> + Sized, { connect_dynamic(node_configs, load_balancing, Compression::None, event_src).await } pub async fn new_snappy<A, LB>( node_configs: &ClusterTcpConfig<'_, A>, load_balancing: LB, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<TcpConnectionPool<A>> + Sized, { connect_static(node_configs, load_balancing, Compression::Snappy).await } #[cfg(feature = "unstable-dynamic-cluster")] pub async fn new_snappy_dynamic<'a, A, LB>( node_configs: &ClusterTcpConfig<'a, A>, load_balancing: LB, event_src: NodeTcpConfig<'a, A>, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<TcpConnectionPool<A>> + Sized, { connect_dynamic(node_configs, load_balancing, Compression::Snappy, event_src).await } pub async fn new_lz4<A, LB>( node_configs: &ClusterTcpConfig<'_, A>, load_balancing: LB, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<TcpConnectionPool<A>> + Sized, { connect_static(node_configs, load_balancing, Compression::Lz4).await } #[cfg(feature = "unstable-dynamic-cluster")] pub async fn new_lz4_dynamic<'a, A, LB>( node_configs: &ClusterTcpConfig<'_, A>, load_balancing: LB, event_src: NodeTcpConfig<'a, A>, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<TcpConnectionPool<A>> + Sized, { connect_dynamic(node_configs, load_balancing, Compression::Lz4, event_src).await } #[cfg(feature = "rust-tls")] pub async fn new_tls<A, LB>( node_configs: &ClusterRustlsConfig<A>, load_balancing: LB, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<RustlsConnectionPool<A>> + Sized, { connect_tls_static(node_configs, load_balancing, Compression::None).await } #[cfg(all(feature = "rust-tls", feature = "unstable-dynamic-cluster"))] pub async fn new_tls_dynamic<A, LB>( node_configs: &ClusterRustlsConfig<A>, load_balancing: LB, event_src: NodeTcpConfig<'_, A>, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<RustlsConnectionPool<A>> + Sized, { connect_tls_dynamic(node_configs, load_balancing, Compression::None, event_src).await } #[cfg(feature = "rust-tls")] pub async fn new_snappy_tls<A, LB>( node_configs: &ClusterRustlsConfig<A>, load_balancing: LB, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<RustlsConnectionPool<A>> + Sized, { connect_tls_static(node_configs, load_balancing, Compression::Snappy).await } #[cfg(all(feature = "rust-tls", feature = "unstable-dynamic-cluster"))] pub async fn new_snappy_tls_dynamic<A, LB>( node_configs: &ClusterRustlsConfig<A>, load_balancing: LB, event_src: NodeTcpConfig<'_, A>, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<RustlsConnectionPool<A>> + Sized, { connect_tls_dynamic(node_configs, load_balancing, Compression::Snappy, event_src).await } #[cfg(feature = "rust-tls")] pub async fn new_lz4_tls<A, LB>( node_configs: &ClusterRustlsConfig<A>, load_balancing: LB, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<RustlsConnectionPool<A>> + Sized, { connect_tls_static(node_configs, load_balancing, Compression::Lz4).await } #[cfg(all(feature = "rust-tls", feature = "unstable-dynamic-cluster"))] pub async fn new_lz4_tls_dynamic<A, LB>( node_configs: &ClusterRustlsConfig<A>, load_balancing: LB, event_src: NodeTcpConfig<'_, A>, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<RustlsConnectionPool<A>> + Sized, { connect_tls_dynamic(node_configs, load_balancing, Compression::Lz4, event_src).await } impl<'a, L> Session<L> { pub async fn listen<A: Authenticator + 'static + Sized>( &self, node: &str, authenticator: A, events: Vec<SimpleServerEvent>, ) -> error::Result<(Listener<Mutex<TransportTcp>>, EventStream)> { let compression = self.get_compressor(); let transport = TransportTcp::new(&node).await.map(Mutex::new)?; startup(&transport, &authenticator).await?; let query_frame = Frame::new_req_register(events).into_cbytes(); transport.lock().await.write(query_frame.as_slice()).await?; parse_frame(&transport, &compression).await?; Ok(new_listener(transport)) } pub async fn listen_non_blocking<A: Authenticator + 'static + Sized>( &self, node: &str, authenticator: A, events: Vec<SimpleServerEvent>, ) -> error::Result<(Listener<Mutex<TransportTcp>>, EventStreamNonBlocking)> { self.listen(node, authenticator, events).await.map(|l| { let (listener, stream) = l; (listener, stream.into()) }) } }
Ok(Session { load_balancing: Mutex::new(load_balancing), event_stream: None, responses: Mutex::new(FnvHashMap::default()), compression, })
call_expression
[ { "content": "#[async_trait]\n\npub trait CDRSTransport: Sized + AsyncRead + AsyncWriteExt + Send + Sync {\n\n /// Creates a new independently owned handle to the underlying socket.\n\n ///\n\n /// The returned TcpStream is a reference to the same stream that this object references.\n\n /// Both han...
Rust
src/utils/v6/ipv6_cidr_separator.rs
sanderv32/cidr-utils
c0f7607e086d7e3ea25b429a55041a86fe624ddd
extern crate num_traits; use std::cmp::Ordering; use crate::cidr::Ipv6Cidr; use crate::num_bigint::BigUint; use crate::utils::Ipv6CidrCombiner; use num_traits::{One, ToPrimitive}; #[derive(Debug)] pub struct Ipv6CidrSeparator; impl Ipv6CidrSeparator { pub fn divide_by(cidr: &Ipv6Cidr, n: usize) -> Option<Vec<Ipv6CidrCombiner>> { let size = cidr.size(); let n_big_int = BigUint::from(n); if n == 0 || n_big_int > size { return None; } else if n == 1 { let mut combiner = Ipv6CidrCombiner::with_capacity(1); combiner.push(*cidr); return Some(vec![combiner]); } let log2_n = (n as f64).log2(); let mut output = Vec::with_capacity(n); if (log2_n - log2_n.floor()).abs() < 2.0 * std::f64::EPSILON { let mut iter = cidr.iter(); let bits = cidr.get_bits() + log2_n as u8; let usize_max_big_int = BigUint::from(usize::max_value()); let d = size / n_big_int; if d <= usize_max_big_int { for ip in iter.step_by(d.to_usize().unwrap()) { let mut combiner = Ipv6CidrCombiner::with_capacity(1); combiner.push(Ipv6Cidr::from_prefix_and_bits(ip, bits).unwrap()); output.push(combiner); } } else { let nth = d - BigUint::one(); if let Some(ip) = iter.next() { let mut combiner = Ipv6CidrCombiner::with_capacity(1); combiner.push(Ipv6Cidr::from_prefix_and_bits(ip, bits).unwrap()); output.push(combiner); while let Some(ip) = iter.nth_big_uint(nth.clone()) { let mut combiner = Ipv6CidrCombiner::with_capacity(1); combiner.push(Ipv6Cidr::from_prefix_and_bits(ip, bits).unwrap()); output.push(combiner); } } } } else { let d = size / n_big_int; let iter = cidr.iter(); let mut current_combiner = Ipv6CidrCombiner::new(); let mut i = BigUint::one(); for ip in iter { current_combiner.push(Ipv6Cidr::from_prefix_and_bits(ip, 128).unwrap()); if i == d { output.push(current_combiner); current_combiner = Ipv6CidrCombiner::new(); i = BigUint::one(); } else { i += BigUint::one(); } } let last_combiner = output.last_mut().unwrap(); for cidr in current_combiner.into_ipv6_cidr_vec().into_iter() { last_combiner.push(cidr); } } Some(output) } pub fn sub_networks(cidr: &Ipv6Cidr, bits: u8) -> Option<Vec<Ipv6Cidr>> { let cidr_bits = cidr.get_bits(); match cidr_bits.cmp(&bits) { Ordering::Greater => return None, Ordering::Equal => return Some(vec![*cidr]), Ordering::Less => (), } let n = 2usize.pow(u32::from(bits - cidr_bits)); let n_big_int = BigUint::from(n); let mut output = Vec::with_capacity(n); let size = cidr.size(); let d = size / n_big_int; let mut iter = cidr.iter(); let usize_max_big_int = BigUint::from(usize::max_value()); if d <= usize_max_big_int { for ip in iter.step_by(d.to_usize().unwrap()) { output.push(Ipv6Cidr::from_prefix_and_bits(ip, bits).unwrap()); } } else { let nth = d - BigUint::one(); if let Some(ip) = iter.next() { output.push(Ipv6Cidr::from_prefix_and_bits(ip, bits).unwrap()); while let Some(ip) = iter.nth_big_uint(nth.clone()) { output.push(Ipv6Cidr::from_prefix_and_bits(ip, bits).unwrap()); } } } Some(output) } }
extern crate num_traits; use std::cmp::Ordering; use crate::cidr::Ipv6Cidr; use crate::num_bigint::BigUint; use crate::utils::Ipv6CidrCombiner; use num_traits::{One, ToPrimitive}; #[derive(Debug)] pub struct Ipv6CidrSeparator; impl Ipv6CidrSeparator { pub fn divide_by(cidr: &Ipv6Cidr, n: usize) -> Option<Vec<Ipv6CidrCombiner>> { let size = cidr.size(); let n_big_int = BigUint::from(n); if n == 0 || n_big_int > size { return None; } else if n == 1 { let mut combiner = Ipv6CidrCombiner::with_capacity(1); combiner.push(*cidr); return Some(vec![combiner]); } let log2_n = (n as f64).log2(); let mut output = Vec::with_capacity(n); if (log2_n - log2_n.floor()).abs() < 2.0 * std::f64::EPSILON { let mut iter = cidr.iter(); let bits = cidr.get_bits() + log2_n as u8; let usize_max_big_int = BigUint::from(usize::max_value()); let d = size / n_big_int; if d <= usize_max_big_int { for ip in iter.step_by(d.to_usize().unwrap()) { let mut combiner = Ipv6CidrCombiner::with_capacity(1); combiner.push(Ipv6Cidr::from_prefix_and_bits(ip, bits).unwrap()); output.push(combiner); } } else { let nth = d - BigUint::one(); if let Some(ip) = iter.next() { let mut combiner = I
usize::max_value()); if d <= usize_max_big_int { for ip in iter.step_by(d.to_usize().unwrap()) { output.push(Ipv6Cidr::from_prefix_and_bits(ip, bits).unwrap()); } } else { let nth = d - BigUint::one(); if let Some(ip) = iter.next() { output.push(Ipv6Cidr::from_prefix_and_bits(ip, bits).unwrap()); while let Some(ip) = iter.nth_big_uint(nth.clone()) { output.push(Ipv6Cidr::from_prefix_and_bits(ip, bits).unwrap()); } } } Some(output) } }
pv6CidrCombiner::with_capacity(1); combiner.push(Ipv6Cidr::from_prefix_and_bits(ip, bits).unwrap()); output.push(combiner); while let Some(ip) = iter.nth_big_uint(nth.clone()) { let mut combiner = Ipv6CidrCombiner::with_capacity(1); combiner.push(Ipv6Cidr::from_prefix_and_bits(ip, bits).unwrap()); output.push(combiner); } } } } else { let d = size / n_big_int; let iter = cidr.iter(); let mut current_combiner = Ipv6CidrCombiner::new(); let mut i = BigUint::one(); for ip in iter { current_combiner.push(Ipv6Cidr::from_prefix_and_bits(ip, 128).unwrap()); if i == d { output.push(current_combiner); current_combiner = Ipv6CidrCombiner::new(); i = BigUint::one(); } else { i += BigUint::one(); } } let last_combiner = output.last_mut().unwrap(); for cidr in current_combiner.into_ipv6_cidr_vec().into_iter() { last_combiner.push(cidr); } } Some(output) } pub fn sub_networks(cidr: &Ipv6Cidr, bits: u8) -> Option<Vec<Ipv6Cidr>> { let cidr_bits = cidr.get_bits(); match cidr_bits.cmp(&bits) { Ordering::Greater => return None, Ordering::Equal => return Some(vec![*cidr]), Ordering::Less => (), } let n = 2usize.pow(u32::from(bits - cidr_bits)); let n_big_int = BigUint::from(n); let mut output = Vec::with_capacity(n); let size = cidr.size(); let d = size / n_big_int; let mut iter = cidr.iter(); let usize_max_big_int = BigUint::from(
random
[ { "content": "#[test]\n\nfn simple_test() {\n\n let mut combiner = IpCidrCombiner::new();\n\n\n\n combiner.push(IpCidr::from_str(\"192.168.1.100\").unwrap());\n\n combiner.push(IpCidr::from_str(\"192.168.1.101\").unwrap());\n\n combiner.push(IpCidr::from_str(\"192.168.1.102\").unwrap());\n\n comb...
Rust
crates/bench-api/src/lib.rs
dheaton-arm/wasmtime
86611d3bbc92b781ed136dcda7cdba9ec2c1cbee
mod unsafe_send_sync; use crate::unsafe_send_sync::UnsafeSendSync; use anyhow::{anyhow, Context, Result}; use std::os::raw::{c_int, c_void}; use std::slice; use std::{env, path::PathBuf}; use wasmtime::{Config, Engine, Instance, Linker, Module, Store}; use wasmtime_wasi::{sync::WasiCtxBuilder, WasiCtx}; pub type ExitCode = c_int; pub const OK: ExitCode = 0; pub const ERR: ExitCode = -1; #[cfg(feature = "shuffling-allocator")] #[global_allocator] static ALLOC: shuffling_allocator::ShufflingAllocator<std::alloc::System> = shuffling_allocator::wrap!(&std::alloc::System); #[repr(C)] pub struct WasmBenchConfig { pub working_dir_ptr: *const u8, pub working_dir_len: usize, pub stdout_path_ptr: *const u8, pub stdout_path_len: usize, pub stderr_path_ptr: *const u8, pub stderr_path_len: usize, pub stdin_path_ptr: *const u8, pub stdin_path_len: usize, pub compilation_timer: *mut u8, pub compilation_start: extern "C" fn(*mut u8), pub compilation_end: extern "C" fn(*mut u8), pub instantiation_timer: *mut u8, pub instantiation_start: extern "C" fn(*mut u8), pub instantiation_end: extern "C" fn(*mut u8), pub execution_timer: *mut u8, pub execution_start: extern "C" fn(*mut u8), pub execution_end: extern "C" fn(*mut u8), } impl WasmBenchConfig { fn working_dir(&self) -> Result<PathBuf> { let working_dir = unsafe { std::slice::from_raw_parts(self.working_dir_ptr, self.working_dir_len) }; let working_dir = std::str::from_utf8(working_dir) .context("given working directory is not valid UTF-8")?; Ok(working_dir.into()) } fn stdout_path(&self) -> Result<PathBuf> { let stdout_path = unsafe { std::slice::from_raw_parts(self.stdout_path_ptr, self.stdout_path_len) }; let stdout_path = std::str::from_utf8(stdout_path).context("given stdout path is not valid UTF-8")?; Ok(stdout_path.into()) } fn stderr_path(&self) -> Result<PathBuf> { let stderr_path = unsafe { std::slice::from_raw_parts(self.stderr_path_ptr, self.stderr_path_len) }; let stderr_path = std::str::from_utf8(stderr_path).context("given stderr path is not valid UTF-8")?; Ok(stderr_path.into()) } fn stdin_path(&self) -> Result<Option<PathBuf>> { if self.stdin_path_ptr.is_null() { return Ok(None); } let stdin_path = unsafe { std::slice::from_raw_parts(self.stdin_path_ptr, self.stdin_path_len) }; let stdin_path = std::str::from_utf8(stdin_path).context("given stdin path is not valid UTF-8")?; Ok(Some(stdin_path.into())) } } #[no_mangle] pub extern "C" fn wasm_bench_create( config: WasmBenchConfig, out_bench_ptr: *mut *mut c_void, ) -> ExitCode { let result = (|| -> Result<_> { let working_dir = config.working_dir()?; let working_dir = cap_std::fs::Dir::open_ambient_dir(&working_dir, cap_std::ambient_authority()) .with_context(|| { format!( "failed to preopen the working directory: {}", working_dir.display(), ) })?; let stdout_path = config.stdout_path()?; let stderr_path = config.stderr_path()?; let stdin_path = config.stdin_path()?; let state = Box::new(BenchState::new( config.compilation_timer, config.compilation_start, config.compilation_end, config.instantiation_timer, config.instantiation_start, config.instantiation_end, config.execution_timer, config.execution_start, config.execution_end, move || { let mut cx = WasiCtxBuilder::new(); let stdout = std::fs::File::create(&stdout_path) .with_context(|| format!("failed to create {}", stdout_path.display()))?; let stdout = cap_std::fs::File::from_std(stdout, cap_std::ambient_authority()); let stdout = wasi_cap_std_sync::file::File::from_cap_std(stdout); cx = cx.stdout(Box::new(stdout)); let stderr = std::fs::File::create(&stderr_path) .with_context(|| format!("failed to create {}", stderr_path.display()))?; let stderr = cap_std::fs::File::from_std(stderr, cap_std::ambient_authority()); let stderr = wasi_cap_std_sync::file::File::from_cap_std(stderr); cx = cx.stderr(Box::new(stderr)); if let Some(stdin_path) = &stdin_path { let stdin = std::fs::File::open(stdin_path) .with_context(|| format!("failed to open {}", stdin_path.display()))?; let stdin = cap_std::fs::File::from_std(stdin, cap_std::ambient_authority()); let stdin = wasi_cap_std_sync::file::File::from_cap_std(stdin); cx = cx.stdin(Box::new(stdin)); } cx = cx.preopened_dir(working_dir.try_clone()?, ".")?; if let Ok(val) = env::var("WASM_BENCH_USE_SMALL_WORKLOAD") { cx = cx.env("WASM_BENCH_USE_SMALL_WORKLOAD", &val)?; } Ok(cx.build()) }, )?); Ok(Box::into_raw(state) as _) })(); if let Ok(bench_ptr) = result { unsafe { assert!(!out_bench_ptr.is_null()); *out_bench_ptr = bench_ptr; } } to_exit_code(result.map(|_| ())) } #[no_mangle] pub extern "C" fn wasm_bench_free(state: *mut c_void) { assert!(!state.is_null()); unsafe { Box::from_raw(state as *mut BenchState); } } #[no_mangle] pub extern "C" fn wasm_bench_compile( state: *mut c_void, wasm_bytes: *const u8, wasm_bytes_length: usize, ) -> ExitCode { let state = unsafe { (state as *mut BenchState).as_mut().unwrap() }; let wasm_bytes = unsafe { slice::from_raw_parts(wasm_bytes, wasm_bytes_length) }; let result = state.compile(wasm_bytes).context("failed to compile"); to_exit_code(result) } #[no_mangle] pub extern "C" fn wasm_bench_instantiate(state: *mut c_void) -> ExitCode { let state = unsafe { (state as *mut BenchState).as_mut().unwrap() }; let result = state.instantiate().context("failed to instantiate"); to_exit_code(result) } #[no_mangle] pub extern "C" fn wasm_bench_execute(state: *mut c_void) -> ExitCode { let state = unsafe { (state as *mut BenchState).as_mut().unwrap() }; let result = state.execute().context("failed to execute"); to_exit_code(result) } fn to_exit_code<T>(result: impl Into<Result<T>>) -> ExitCode { match result.into() { Ok(_) => OK, Err(error) => { eprintln!("{:?}", error); ERR } } } struct BenchState { linker: Linker<HostState>, compilation_timer: *mut u8, compilation_start: extern "C" fn(*mut u8), compilation_end: extern "C" fn(*mut u8), instantiation_timer: *mut u8, instantiation_start: extern "C" fn(*mut u8), instantiation_end: extern "C" fn(*mut u8), make_wasi_cx: Box<dyn FnMut() -> Result<WasiCtx>>, module: Option<Module>, store_and_instance: Option<(Store<HostState>, Instance)>, } struct HostState { wasi: WasiCtx, #[cfg(feature = "wasi-nn")] wasi_nn: wasmtime_wasi_nn::WasiNnCtx, #[cfg(feature = "wasi-crypto")] wasi_crypto: wasmtime_wasi_crypto::WasiCryptoCtx, } impl BenchState { fn new( compilation_timer: *mut u8, compilation_start: extern "C" fn(*mut u8), compilation_end: extern "C" fn(*mut u8), instantiation_timer: *mut u8, instantiation_start: extern "C" fn(*mut u8), instantiation_end: extern "C" fn(*mut u8), execution_timer: *mut u8, execution_start: extern "C" fn(*mut u8), execution_end: extern "C" fn(*mut u8), make_wasi_cx: impl FnMut() -> Result<WasiCtx> + 'static, ) -> Result<Self> { let mut config = Config::new(); config.wasm_simd(true); let engine = Engine::new(&config)?; let mut linker = Linker::<HostState>::new(&engine); let execution_timer = unsafe { UnsafeSendSync::new(execution_timer) }; linker.func_wrap("bench", "start", move || { execution_start(*execution_timer.get()); Ok(()) })?; linker.func_wrap("bench", "end", move || { execution_end(*execution_timer.get()); Ok(()) })?; wasmtime_wasi::add_to_linker(&mut linker, |cx| &mut cx.wasi)?; #[cfg(feature = "wasi-nn")] wasmtime_wasi_nn::add_to_linker(&mut linker, |cx| &mut cx.wasi_nn)?; #[cfg(feature = "wasi-crypto")] wasmtime_wasi_crypto::add_to_linker(&mut linker, |cx| &mut cx.wasi_crypto)?; Ok(Self { linker, compilation_timer, compilation_start, compilation_end, instantiation_timer, instantiation_start, instantiation_end, make_wasi_cx: Box::new(make_wasi_cx) as _, module: None, store_and_instance: None, }) } fn compile(&mut self, bytes: &[u8]) -> Result<()> { assert!( self.module.is_none(), "create a new engine to repeat compilation" ); (self.compilation_start)(self.compilation_timer); let module = Module::from_binary(self.linker.engine(), bytes)?; (self.compilation_end)(self.compilation_timer); self.module = Some(module); Ok(()) } fn instantiate(&mut self) -> Result<()> { let module = self .module .as_ref() .expect("compile the module before instantiating it"); let host = HostState { wasi: (self.make_wasi_cx)().context("failed to create a WASI context")?, #[cfg(feature = "wasi-nn")] wasi_nn: wasmtime_wasi_nn::WasiNnCtx::new()?, #[cfg(feature = "wasi-crypto")] wasi_crypto: wasmtime_wasi_nn::WasiCryptoCtx::new(), }; (self.instantiation_start)(self.instantiation_timer); let mut store = Store::new(self.linker.engine(), host); let instance = self.linker.instantiate(&mut store, &module)?; (self.instantiation_end)(self.instantiation_timer); self.store_and_instance = Some((store, instance)); Ok(()) } fn execute(&mut self) -> Result<()> { let (mut store, instance) = self .store_and_instance .take() .expect("instantiate the module before executing it"); let start_func = instance.get_typed_func::<(), (), _>(&mut store, "_start")?; match start_func.call(&mut store, ()) { Ok(_) => Ok(()), Err(trap) => { match trap.i32_exit_status() { Some(0) => Ok(()), Some(n) => Err(anyhow!("_start exited with a non-zero code: {}", n)), None => Err(anyhow!( "executing the benchmark resulted in a trap: {}", trap )), } } } } }
mod unsafe_send_sync; use crate::unsafe_send_sync::UnsafeSendSync; use anyhow::{anyhow, Context, Result}; use std::os::raw::{c_int, c_void}; use std::slice; use std::{env, path::PathBuf}; use wasmtime::{Config, Engine, Instance, Linker, Module, Store}; use wasmtime_wasi::{sync::WasiCtxBuilder, WasiCtx}; pub type ExitCode = c_int; pub const OK: ExitCode = 0; pub const ERR: ExitCode = -1; #[cfg(feature = "shuffling-allocator")] #[global_allocator] static ALLOC: shuffling_allocator::ShufflingAllocator<std::alloc::System> = shuffling_allocator::wrap!(&std::alloc::System); #[repr(C)] pub struct WasmBenchConfig { pub working_dir_ptr: *const u8, pub working_dir_len: usize, pub stdout_path_ptr: *const u8, pub stdout_path_len: usize, pub stderr_path_ptr: *const u8, pub stderr_path_len: usize, pub stdin_path_ptr: *const u8, pub stdin_path_len: usize, pub compilation_timer: *mut u8, pub compilation_start: extern "C" fn(*mut u8), pub compilation_end: extern "C" fn(*mut u8), pub instantiation_timer: *mut u8, pub instantiation_start: extern "C" fn(*mut u8), pub instantiation_end: extern "C" fn(*mut u8), pub execution_timer: *mut u8, pub execution_start: extern "C" fn(*mut u8), pub execution_end: extern "C" fn(*mut u8), } impl WasmBenchConfig { fn working_dir(&self) -> Result<PathBuf> { let working_dir = unsafe { std::slice::from_raw_parts(self.working_dir_ptr, self.working_dir_len) }; let working_dir = std::str::from_utf8(working_dir) .context("given working directory is not valid UTF-8")?; Ok(working_dir.into()) } fn stdout_path(&self) -> Result<PathBuf> { let stdout_path = unsafe { std::slice::from_raw_parts(self.stdout_path_ptr, self.stdout_path_len) }; let stdout_path = std::str::from_utf8(stdout_path).context("given stdout path is not valid UTF-8")?; Ok(stdout_path.into()) } fn stderr_path(&self) -> Result<PathBuf> { let stderr_path = unsafe { std::slice::from_raw_parts(self.stderr_path_ptr, self.stderr_path_len) }; let stderr_path = std::str::from_utf8(stderr_path).context("given stderr path is not valid UTF-8")?; Ok(stderr_path.into()) } fn stdin_path(&self) -> Result<Option<PathBuf>> { if self.stdin_path_ptr.is_null() { return Ok(None); } let stdin_path = unsafe { std::slice::from_raw_parts(self.stdin_path_ptr, self.stdin_path_len) }; let stdin_path = std::str::from_utf8(stdin_path).context("given stdin path is not valid UTF-8")?; Ok(Some(stdin_path.into())) } } #[no_mangle] pub extern "C" fn wasm_bench_create( config: WasmBenchConfig, out_bench_ptr: *mut *mut c_void, ) -> ExitCode { let result = (|| -> Result<_> { let working_dir = config.working_dir()?; let working_dir = cap_std::fs::Dir::open_ambient_dir(&working_dir, cap_std::ambient_authority()) .with_context(|| { format!( "failed to preopen the working directory: {}", working_dir.display(), ) })?; let stdout_path = config.stdout_path()?; let stderr_path = config.stderr_path()?; let stdin_path = config.stdin_path()?; let state = Box::new(BenchState::new( config.compilation_timer, config.compilation_start, config.compilation_end, config.instantiation_timer, config.instantiation_start, config.instantiation_end, config.execution_timer, config.execution_start, config.execution_end, move || { let mut cx = WasiCtxBuilder::new(); let stdout = std::fs::File::create(&stdout_path) .with_context(|| format!("failed to create {}", stdout_path.display()))?; let stdout = cap_std::fs::File::from_std(stdout, cap_std::ambient_authority()); let stdout = wasi_cap_std_sync::file::File::from_cap_std(stdout); cx = cx.stdout(Box::new(stdout)); let stderr = std::fs::File::create(&stderr_path) .with_context(|| format!("failed to create {}", stderr_path.display()))?; let stderr = cap_std::fs::File::from_std(stderr, cap_std::ambient_authority()); let stderr = wasi_cap_std_sync::file::File::from_cap_std(stderr); cx = cx.stderr(Box::new(stderr)); if let Some(stdin_path) = &stdin_path { let stdin = std::fs::File::open(stdin_path) .with_context(|| format!("failed to open {}", stdin_path.display()))?; let stdin = cap_std::fs::File::from_std(stdin, cap_std::ambient_authority()); let stdin = wasi_cap_std_sync::file::File::from_cap_std(stdin); cx = cx.stdin(Box::new(stdin)); } cx = cx.preopened_dir(working_dir.try_clone()?, ".")?; if let Ok(val) = env::var("WASM_BENCH_USE_SMALL_WORKLOAD") { cx = cx.env("WASM_BENCH_USE_SMALL_WORKLOAD", &val)?; } Ok(cx.build()) }, )?); Ok(Box::into_raw(state) as _) })(); if let Ok(bench_ptr) = result { unsafe { assert!(!out_bench_ptr.is_null()); *out_bench_ptr = bench_ptr; } } to_exit_code(result.map(|_| ())) } #[no_mangle] pub extern "C" fn wasm_bench_free(state: *mut c_void) { assert!(!state.is_null()); unsafe { Box::from_raw(state as *mut BenchState); } } #[no_mangle] pub extern "C" fn wasm_bench_compile( state: *mut c_void, wasm_bytes: *const u8, wasm_bytes_length: usize, ) -> ExitCode { let state = unsafe { (state as *mut BenchState).as_mut().unwrap() }; let wasm_bytes = unsafe { slice::from_raw_parts(wasm_bytes, wasm_bytes_length) }; let result = state.compile(wasm_bytes).context("failed to compile"); to_exit_code(result) } #[no_mangle] pub extern "C" fn wasm_bench_instantiate(state: *mut c_void) -> ExitCode { let state = unsafe { (state as *mut BenchState).as_mut().unwrap() }; let result = state.instantiate().context("failed to instantiate"); to_exit_code(result) } #[no_mangle] pub extern "C" fn wasm_bench_execute(state: *mut c_void) -> ExitCode { let state = unsafe { (state as *mut BenchState).as_mut().unwrap() }; let result = state.execute().context("failed to execute"); to_exit_code(result) } fn to_exit_code<T>(result: impl Into<Result<T>>) -> ExitCode { match result.into() { Ok(_) => OK, Err(error) => { eprintln!("{:?}", error); ERR } } } struct BenchState { linker: Linker<HostState>, compilation_timer: *mut u8, compilation_start: extern "C" fn(*mut u8), compilation_end: extern "C" fn(*mut u8), instantiation_timer: *mut u8, instantiation_start: extern "C" fn(*mut u8), instantiation_end: extern "C" fn(*mut u8), make_wasi_cx: Box<dyn FnMut() -> Result<WasiCtx>>, module: Option<Module>, store_and_instance: Option<(Store<HostState>, Instance)>, } struct HostState { wasi: WasiCtx, #[cfg(feature = "wasi-nn")] wasi_nn: wasmtime_wasi_nn::WasiNnCtx, #[cfg(feature = "wasi-crypto")] wasi_crypto: wasmtime_wasi_crypto::WasiCryptoCtx, } impl BenchState { fn new( compilation_timer: *mut u8, compilation_start: extern "C" fn(*mut u8), compilation_end: extern "C" fn(*mut u8), instantiation_timer: *mut u8, instantiation_start: extern "C" fn(*mut u8), instantiation_end: extern "C" fn(*mut u8), execution_timer: *mut u8, execution_start: extern "C" fn(*mut u8), execution_end: extern "C" fn(*mut u8), make_wasi_cx: impl FnMut() -> Result<WasiCtx> + 'static, ) -> Result<Self> { let mut config = Config::new(); config.wasm_simd(true); let engine = Engine::new(&config)?; let mut linker = Linker::<HostState>::new(&engine); let execution_timer = unsafe { UnsafeSendSync::new(execution_timer) }; linker.func_wrap("bench", "start", move || { execution_start(*execution_timer.get()); Ok(()) })?; linker.func_wrap("bench", "end", move || { execution_end(*execution_timer.get()); Ok(()) })?; wasmtime_wasi::add_to_linker(&mut linker, |cx| &mut cx.wasi)?; #[cfg(feature = "wasi-nn")] wasmtime_wasi_nn::add_to_linker(&mut linker, |cx| &mut cx.wasi_nn)?; #[cfg(feature = "wasi-crypto")] wasmtime_wasi_crypto::add_to_linker(&mut linker, |cx| &mut cx.wasi_crypto)?; Ok(Self { linker, compilation_timer, compilation_start, compilation_end, instantiation_timer, instantiation_start, instantiation_end, make_wasi_cx: Box::new(make_wasi_cx) as _, module: None, store_and_instance: None, }) } fn compile(&mut self, bytes: &[u8]) -> Result
antiate(&mut self) -> Result<()> { let module = self .module .as_ref() .expect("compile the module before instantiating it"); let host = HostState { wasi: (self.make_wasi_cx)().context("failed to create a WASI context")?, #[cfg(feature = "wasi-nn")] wasi_nn: wasmtime_wasi_nn::WasiNnCtx::new()?, #[cfg(feature = "wasi-crypto")] wasi_crypto: wasmtime_wasi_nn::WasiCryptoCtx::new(), }; (self.instantiation_start)(self.instantiation_timer); let mut store = Store::new(self.linker.engine(), host); let instance = self.linker.instantiate(&mut store, &module)?; (self.instantiation_end)(self.instantiation_timer); self.store_and_instance = Some((store, instance)); Ok(()) } fn execute(&mut self) -> Result<()> { let (mut store, instance) = self .store_and_instance .take() .expect("instantiate the module before executing it"); let start_func = instance.get_typed_func::<(), (), _>(&mut store, "_start")?; match start_func.call(&mut store, ()) { Ok(_) => Ok(()), Err(trap) => { match trap.i32_exit_status() { Some(0) => Ok(()), Some(n) => Err(anyhow!("_start exited with a non-zero code: {}", n)), None => Err(anyhow!( "executing the benchmark resulted in a trap: {}", trap )), } } } } }
<()> { assert!( self.module.is_none(), "create a new engine to repeat compilation" ); (self.compilation_start)(self.compilation_timer); let module = Module::from_binary(self.linker.engine(), bytes)?; (self.compilation_end)(self.compilation_timer); self.module = Some(module); Ok(()) } fn inst
random
[ { "content": "pub fn create_global(store: &mut StoreOpaque, gt: &GlobalType, val: Val) -> Result<InstanceId> {\n\n let mut module = Module::new();\n\n let mut func_imports = Vec::new();\n\n let mut externref_init = None;\n\n let mut shared_signature_id = None;\n\n\n\n let global = Global {\n\n ...
Rust
src/stream/server/ts.rs
burjee/mock-yo-stream
6161822cccb477c4f33217788078463d7cad9c7c
use std::fs::File; use mpeg2ts; use mpeg2ts::{ ts::{TsPacket, TsHeader, TsPayload, Pid, ContinuityCounter}, pes::PesHeader, }; pub struct TransportStream { video_continuity_counter: ContinuityCounter, audio_continuity_counter: ContinuityCounter, packets: Vec<TsPacket>, } impl TransportStream { const PAT_PID: u16 = 0; const PMT_PID: u16 = 256; const VIDEO_PID: u16 = 257; const AUDIO_PID: u16 = 258; const VIDEO_STREAM_ID: u8 = 224; const AUDIO_STREAM_ID: u8 = 192; pub fn new() -> TransportStream { TransportStream { video_continuity_counter: ContinuityCounter::new(), audio_continuity_counter: ContinuityCounter::new(), packets: Vec::new(), } } pub fn write_file(&mut self, filename: &str) { use mpeg2ts::ts::{TsPacketWriter, WriteTsPacket}; let filename = format!("./video/{}", filename); let file = File::create(filename).unwrap(); let packets: Vec<_> = self.packets.drain(..).collect(); let mut writer = TsPacketWriter::new(file); writer.write_ts_packet(&TransportStream::default_pat()).unwrap(); writer.write_ts_packet(&TransportStream::default_pmt()).unwrap(); for packet in &packets { writer.write_ts_packet(packet).unwrap(); } } pub fn push_video(&mut self, timestamp: u64, composition_time: u64, is_keyframe: bool, mut video: Vec<u8>) -> Result<(), ()> { use mpeg2ts::{ ts::{AdaptationField, payload}, es::StreamId, }; let mut header = TransportStream::default_header(TransportStream::VIDEO_PID); header.continuity_counter = self.video_continuity_counter; let packet = { let data = { let bytes: Vec<u8> = if video.len() < 153 { video.drain(..).collect() } else { video.drain(..153).collect() }; mpeg2ts::ts::payload::Bytes::new(&bytes[..]).unwrap() }; let pcr = mpeg2ts::time::ClockReference::new(timestamp * 90).unwrap(); let adaptation_field = if is_keyframe { Some(AdaptationField { discontinuity_indicator: false, random_access_indicator: true, es_priority_indicator: false, pcr: Some(pcr), opcr: None, splice_countdown: None, transport_private_data: Vec::new(), extension: None, }) } else { None }; let pts = mpeg2ts::time::Timestamp::new((timestamp + composition_time) * 90).unwrap(); let dts = mpeg2ts::time::Timestamp::new(timestamp * 90).unwrap(); TsPacket { header: header.clone(), adaptation_field, payload: Some(TsPayload::Pes(payload::Pes { header: PesHeader { stream_id: StreamId::new(TransportStream::VIDEO_STREAM_ID), priority: false, data_alignment_indicator: false, copyright: false, original_or_copy: false, pts: Some(pts), dts: Some(dts), escr: None, }, pes_packet_len: 0, data, })), } }; self.packets.push(packet); header.continuity_counter.increment(); while video.len() > 0 { let raw = { let bytes: Vec<u8> = if video.len() < payload::Bytes::MAX_SIZE { video.drain(..).collect() } else { video.drain(..payload::Bytes::MAX_SIZE).collect() }; mpeg2ts::ts::payload::Bytes::new(&bytes[..]).unwrap() }; let packet = TsPacket { header: header.clone(), adaptation_field: None, payload: Some(TsPayload::Raw(raw)), }; self.packets.push(packet); header.continuity_counter.increment(); } self.video_continuity_counter = header.continuity_counter; Ok(()) } pub fn push_audio(&mut self, timestamp: u64, mut audio: Vec<u8>) { use mpeg2ts::{ts::payload, es::StreamId}; let data = { let bytes: Vec<u8> = if audio.len() < 153 { audio.drain(..).collect() } else { audio.drain(..153).collect() }; mpeg2ts::ts::payload::Bytes::new(&bytes[..]).unwrap() }; let mut header = TransportStream::default_header(TransportStream::AUDIO_PID); header.continuity_counter = self.audio_continuity_counter; let packet = TsPacket { header: header.clone(), adaptation_field: None, payload: Some(TsPayload::Pes(payload::Pes { header: PesHeader { stream_id: StreamId::new(TransportStream::AUDIO_STREAM_ID), priority: false, data_alignment_indicator: false, copyright: false, original_or_copy: false, pts: Some(mpeg2ts::time::Timestamp::new(timestamp * 90).unwrap()), dts: None, escr: None, }, pes_packet_len: 0, data, })), }; self.packets.push(packet); header.continuity_counter.increment(); while audio.len() > 0 { let raw = { let bytes: Vec<u8> = if audio.len() < payload::Bytes::MAX_SIZE { audio.drain(..).collect() } else { audio.drain(..payload::Bytes::MAX_SIZE).collect() }; mpeg2ts::ts::payload::Bytes::new(&bytes[..]).unwrap() }; let packet = TsPacket { header: header.clone(), adaptation_field: None, payload: Some(TsPayload::Raw(raw)), }; self.packets.push(packet); header.continuity_counter.increment(); } self.audio_continuity_counter = header.continuity_counter; } pub fn default_header(pid: u16) -> TsHeader { use mpeg2ts::ts::TransportScramblingControl; TsHeader { transport_error_indicator: false, transport_priority: false, pid: Pid::new(pid).unwrap(), transport_scrambling_control: TransportScramblingControl::NotScrambled, continuity_counter: ContinuityCounter::new(), } } pub fn default_pat() -> TsPacket { use mpeg2ts::ts::{VersionNumber, payload::Pat, ProgramAssociation}; TsPacket { header: TransportStream::default_header(TransportStream::PAT_PID), adaptation_field: None, payload: Some(TsPayload::Pat(Pat { transport_stream_id: 1, version_number: VersionNumber::default(), table: vec![ProgramAssociation { program_num: 1, program_map_pid: Pid::new(TransportStream::PMT_PID).unwrap(), }], })), } } pub fn default_pmt() -> TsPacket { use mpeg2ts::{ ts::{VersionNumber, payload::Pmt, EsInfo}, es::StreamType, }; TsPacket { header: TransportStream::default_header(TransportStream::PMT_PID), adaptation_field: None, payload: Some(TsPayload::Pmt(Pmt { program_num: 1, pcr_pid: Some(Pid::new(TransportStream::VIDEO_PID).unwrap()), version_number: VersionNumber::default(), table: vec![ EsInfo { stream_type: StreamType::H264, elementary_pid: Pid::new(TransportStream::VIDEO_PID).unwrap(), descriptors: vec![], }, EsInfo { stream_type: StreamType::AdtsAac, elementary_pid: Pid::new(TransportStream::AUDIO_PID).unwrap(), descriptors: vec![], }, ], })), } } }
use std::fs::File; use mpeg2ts; use mpeg2ts::{ ts::{TsPacket, TsHeader, TsPayload, Pid, ContinuityCounter}, pes::PesHeader, }; pub struct TransportStream { video_continuity_counter: ContinuityCounter, audio_continuity_counter: ContinuityCounter, packets: Vec<TsPacket>, } impl TransportStream { const PAT_PID: u16 = 0; const PMT_PID: u16 = 256; const VIDEO_PID: u16 = 257; const AUDIO_PID: u16 = 258; const VIDEO_STREAM_ID: u8 = 224; const AUDIO_STREAM_ID: u8 = 192; pub fn new() -> TransportStream { TransportStream { video_continuity_counter: ContinuityCounter::new(), audio_continuity_counter: ContinuityCounter::new(), packets: Vec::new(), } } pub fn write_file(&mut self, filename: &str) { use mpeg2ts::ts::{TsPacketWriter, WriteTsPacket}; let filename = format!("./video/{}", filename); let file = File:
e::H264, elementary_pid: Pid::new(TransportStream::VIDEO_PID).unwrap(), descriptors: vec![], }, EsInfo { stream_type: StreamType::AdtsAac, elementary_pid: Pid::new(TransportStream::AUDIO_PID).unwrap(), descriptors: vec![], }, ], })), } } }
:create(filename).unwrap(); let packets: Vec<_> = self.packets.drain(..).collect(); let mut writer = TsPacketWriter::new(file); writer.write_ts_packet(&TransportStream::default_pat()).unwrap(); writer.write_ts_packet(&TransportStream::default_pmt()).unwrap(); for packet in &packets { writer.write_ts_packet(packet).unwrap(); } } pub fn push_video(&mut self, timestamp: u64, composition_time: u64, is_keyframe: bool, mut video: Vec<u8>) -> Result<(), ()> { use mpeg2ts::{ ts::{AdaptationField, payload}, es::StreamId, }; let mut header = TransportStream::default_header(TransportStream::VIDEO_PID); header.continuity_counter = self.video_continuity_counter; let packet = { let data = { let bytes: Vec<u8> = if video.len() < 153 { video.drain(..).collect() } else { video.drain(..153).collect() }; mpeg2ts::ts::payload::Bytes::new(&bytes[..]).unwrap() }; let pcr = mpeg2ts::time::ClockReference::new(timestamp * 90).unwrap(); let adaptation_field = if is_keyframe { Some(AdaptationField { discontinuity_indicator: false, random_access_indicator: true, es_priority_indicator: false, pcr: Some(pcr), opcr: None, splice_countdown: None, transport_private_data: Vec::new(), extension: None, }) } else { None }; let pts = mpeg2ts::time::Timestamp::new((timestamp + composition_time) * 90).unwrap(); let dts = mpeg2ts::time::Timestamp::new(timestamp * 90).unwrap(); TsPacket { header: header.clone(), adaptation_field, payload: Some(TsPayload::Pes(payload::Pes { header: PesHeader { stream_id: StreamId::new(TransportStream::VIDEO_STREAM_ID), priority: false, data_alignment_indicator: false, copyright: false, original_or_copy: false, pts: Some(pts), dts: Some(dts), escr: None, }, pes_packet_len: 0, data, })), } }; self.packets.push(packet); header.continuity_counter.increment(); while video.len() > 0 { let raw = { let bytes: Vec<u8> = if video.len() < payload::Bytes::MAX_SIZE { video.drain(..).collect() } else { video.drain(..payload::Bytes::MAX_SIZE).collect() }; mpeg2ts::ts::payload::Bytes::new(&bytes[..]).unwrap() }; let packet = TsPacket { header: header.clone(), adaptation_field: None, payload: Some(TsPayload::Raw(raw)), }; self.packets.push(packet); header.continuity_counter.increment(); } self.video_continuity_counter = header.continuity_counter; Ok(()) } pub fn push_audio(&mut self, timestamp: u64, mut audio: Vec<u8>) { use mpeg2ts::{ts::payload, es::StreamId}; let data = { let bytes: Vec<u8> = if audio.len() < 153 { audio.drain(..).collect() } else { audio.drain(..153).collect() }; mpeg2ts::ts::payload::Bytes::new(&bytes[..]).unwrap() }; let mut header = TransportStream::default_header(TransportStream::AUDIO_PID); header.continuity_counter = self.audio_continuity_counter; let packet = TsPacket { header: header.clone(), adaptation_field: None, payload: Some(TsPayload::Pes(payload::Pes { header: PesHeader { stream_id: StreamId::new(TransportStream::AUDIO_STREAM_ID), priority: false, data_alignment_indicator: false, copyright: false, original_or_copy: false, pts: Some(mpeg2ts::time::Timestamp::new(timestamp * 90).unwrap()), dts: None, escr: None, }, pes_packet_len: 0, data, })), }; self.packets.push(packet); header.continuity_counter.increment(); while audio.len() > 0 { let raw = { let bytes: Vec<u8> = if audio.len() < payload::Bytes::MAX_SIZE { audio.drain(..).collect() } else { audio.drain(..payload::Bytes::MAX_SIZE).collect() }; mpeg2ts::ts::payload::Bytes::new(&bytes[..]).unwrap() }; let packet = TsPacket { header: header.clone(), adaptation_field: None, payload: Some(TsPayload::Raw(raw)), }; self.packets.push(packet); header.continuity_counter.increment(); } self.audio_continuity_counter = header.continuity_counter; } pub fn default_header(pid: u16) -> TsHeader { use mpeg2ts::ts::TransportScramblingControl; TsHeader { transport_error_indicator: false, transport_priority: false, pid: Pid::new(pid).unwrap(), transport_scrambling_control: TransportScramblingControl::NotScrambled, continuity_counter: ContinuityCounter::new(), } } pub fn default_pat() -> TsPacket { use mpeg2ts::ts::{VersionNumber, payload::Pat, ProgramAssociation}; TsPacket { header: TransportStream::default_header(TransportStream::PAT_PID), adaptation_field: None, payload: Some(TsPayload::Pat(Pat { transport_stream_id: 1, version_number: VersionNumber::default(), table: vec![ProgramAssociation { program_num: 1, program_map_pid: Pid::new(TransportStream::PMT_PID).unwrap(), }], })), } } pub fn default_pmt() -> TsPacket { use mpeg2ts::{ ts::{VersionNumber, payload::Pmt, EsInfo}, es::StreamType, }; TsPacket { header: TransportStream::default_header(TransportStream::PMT_PID), adaptation_field: None, payload: Some(TsPayload::Pmt(Pmt { program_num: 1, pcr_pid: Some(Pid::new(TransportStream::VIDEO_PID).unwrap()), version_number: VersionNumber::default(), table: vec![ EsInfo { stream_type: StreamTyp
random
[ { "content": "fn get_data_type(data_type: DataType) -> u8 {\n\n match data_type {\n\n DataType::Video => 0x09,\n\n DataType::Audio => 0x08,\n\n }\n\n}\n\n\n\n// --------------------\n\n// Flv File:\n\n// --------------------\n\n// Flv Header\n\n// Previous Tag Size 0\n\n// Tag 1\n\n// Previo...
Rust
src/options/pane_options.rs
AntonGepting/tmux-interface
7a1dea0ad658e2cb8743311480d207ad1d196a48
use crate::{Error, Switch}; use crate::{SetOption, ShowOptions}; use std::fmt; use std::str::FromStr; pub const ALLOW_RENAME: usize = 1 << 0; pub const ALTERNATE_SCREEN: usize = 1 << 1; pub const REMAIN_ON_EXIT: usize = 1 << 2; pub const WINDOW_ACTIVE_STYLE: usize = 1 << 3; pub const WINDOW_STYLE: usize = 1 << 4; pub const PANE_OPTIONS_NONE: usize = 0; pub const PANE_OPTIONS_ALL: usize = ALLOW_RENAME | ALTERNATE_SCREEN | REMAIN_ON_EXIT | WINDOW_ACTIVE_STYLE | WINDOW_STYLE; pub const PANE_OPTIONS_NUM: usize = 5; pub const PANE_OPTIONS: [( &str, fn(p: &mut PaneOptions, i: Option<usize>, s: &str), fn(p: &PaneOptions) -> Option<String>, usize, ); PANE_OPTIONS_NUM] = [ #[cfg(feature = "tmux_3_0")] ( "allow-rename", |p, _, s| p.allow_rename = s.parse().ok(), |p| p.allow_rename.as_ref().map(|v| v.to_string()), ALLOW_RENAME, ), #[cfg(feature = "tmux_3_0")] ( "alternate-screen", |p, _, s| p.alternate_screen = s.parse().ok(), |p| p.alternate_screen.as_ref().map(|v| v.to_string()), ALTERNATE_SCREEN, ), #[cfg(feature = "tmux_3_0")] ( "remain-on-exit", |p, _, s| p.remain_on_exit = s.parse().ok(), |p| p.remain_on_exit.as_ref().map(|v| v.to_string()), REMAIN_ON_EXIT, ), #[cfg(feature = "tmux_3_0")] ( "window-active-style", |p, _, s| p.window_active_style = Some(s.to_string()), |p| { p.window_active_style .as_ref() .map(|v| format!("\"{}\"", v.to_string())) }, WINDOW_ACTIVE_STYLE, ), #[cfg(feature = "tmux_3_0")] ( "window-style", |p, _, s| p.window_style = Some(s.to_string()), |p| { p.window_style .as_ref() .map(|v| format!("\"{}\"", v.to_string())) }, WINDOW_STYLE, ), ]; #[derive(Default, PartialEq, Clone, Debug)] pub struct PaneOptions { #[cfg(feature = "tmux_3_0")] pub allow_rename: Option<Switch>, #[cfg(feature = "tmux_3_0")] pub alternate_screen: Option<Switch>, #[cfg(feature = "tmux_3_0")] pub remain_on_exit: Option<Switch>, #[cfg(feature = "tmux_3_0")] pub window_active_style: Option<String>, #[cfg(feature = "tmux_3_0")] pub window_style: Option<String>, } impl PaneOptions { pub fn get_all() -> Result<Self, Error> { let s = ShowOptions::new().global().output()?.to_string(); s.parse() } pub fn get(bitflags: usize) -> Result<Self, Error> { let selected_option = PANE_OPTIONS .iter() .filter(|t| bitflags == t.3) .map(|t| format!("{}", t.0)) .collect::<Vec<String>>() .join(" "); let s = ShowOptions::new() .pane() .option(&selected_option) .output()? .to_string(); s.parse() } pub fn set(&self, bitflags: usize) -> Result<(), Error> { for selected_option in PANE_OPTIONS.iter().filter(|t| bitflags & t.3 == t.3) { if let Some(selected_value) = selected_option.2(&self) { SetOption::new() .pane() .option(selected_option.0) .value(&selected_value) .output()?; } } Ok(()) } } impl FromStr for PaneOptions { type Err = Error; fn from_str(options: &str) -> Result<Self, Self::Err> { let mut pane_options: PaneOptions = Default::default(); let mut v: Vec<&str>; let mut arr: Vec<&str>; for option in options.lines() { v = option.trim().splitn(2, ' ').collect(); arr = v[0].split(|c| c == '[' || c == ']').collect(); for pane_var in PANE_OPTIONS.iter() { if pane_var.0 == arr[0] { pane_var.1( &mut pane_options, arr.get(1).and_then(|i| i.parse::<usize>().ok()), v.get(1).unwrap_or(&""), ) } } } Ok(pane_options) } } impl fmt::Display for PaneOptions { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { for var in PANE_OPTIONS.iter() { if let Some(ref v) = var.2(self) { write!(f, "{} {}\n", var.0, v)?; } } Ok(()) } } #[derive(Default, Debug)] pub struct PaneOptionsBuilder<'a> { #[cfg(feature = "tmux_3_0")] pub allow_rename: Option<Switch>, #[cfg(feature = "tmux_3_0")] pub alternate_screen: Option<Switch>, #[cfg(feature = "tmux_3_0")] pub remain_on_exit: Option<Switch>, #[cfg(feature = "tmux_3_0")] pub window_active_style: Option<&'a str>, #[cfg(feature = "tmux_3_0")] pub window_style: Option<&'a str>, } impl<'a> PaneOptionsBuilder<'a> { pub fn new() -> Self { Default::default() } #[cfg(feature = "tmux_3_0")] pub fn allow_rename(&mut self, allow_rename: Switch) -> &mut Self { self.allow_rename = Some(allow_rename); self } #[cfg(feature = "tmux_3_0")] pub fn alternate_screen(&mut self, alternate_screen: Switch) -> &mut Self { self.alternate_screen = Some(alternate_screen); self } #[cfg(feature = "tmux_3_0")] pub fn remain_on_exit(&mut self, remain_on_exit: Switch) -> &mut Self { self.remain_on_exit = Some(remain_on_exit); self } #[cfg(feature = "tmux_3_0")] pub fn window_active_style(&mut self, window_active_style: &'a str) -> &mut Self { self.window_active_style = Some(window_active_style); self } #[cfg(feature = "tmux_3_0")] pub fn window_style(&mut self, window_style: &'a str) -> &mut Self { self.window_style = Some(window_style); self } pub fn build(&self) -> PaneOptions { PaneOptions { #[cfg(feature = "tmux_3_0")] allow_rename: self.allow_rename.clone(), #[cfg(feature = "tmux_3_0")] alternate_screen: self.alternate_screen.clone(), #[cfg(feature = "tmux_3_0")] remain_on_exit: self.remain_on_exit.clone(), #[cfg(feature = "tmux_3_0")] window_active_style: self.window_active_style.map(|s| s.to_string()), #[cfg(feature = "tmux_3_0")] window_style: self.window_style.map(|s| s.to_string()), } } }
use crate::{Error, Switch}; use crate::{SetOption, ShowOptions}; use std::fmt; use std::str::FromStr; pub const ALLOW_RENAME: usize = 1 << 0; pub const ALTERNATE_SCREEN: usize = 1 << 1; pub const REMAIN_ON_EXIT: usize = 1 << 2; pub const WINDOW_ACTIVE_STYLE: usize = 1 << 3; pub const WINDOW_STYLE: usize = 1 << 4; pub const PANE_OPTIONS_NONE: usize = 0; pub const PANE_OPTIONS_ALL: usize = ALLOW_RENAME | ALTERNATE_SCREEN | REMAIN_ON_EXIT | WINDOW_ACTIVE_STYLE | WINDOW_STYLE; pub const PANE_OPTIONS_NUM: usize = 5; pub const PANE_OPTIONS: [( &str, fn(p: &mut PaneOptions, i: Option<usize>, s: &str), fn(p: &PaneOptions) -> Option<String>, usize, ); PANE_OPTIONS_NUM] = [ #[cfg(feature = "tmux_3_0")] ( "allow-rename", |p, _, s| p.allow_rename = s.parse().ok(), |p| p.allow_rename.as_ref().map(|v| v.to_string()), ALLOW_RENAME, ), #[cfg(feature = "tmux_3_0")] ( "alternate-screen", |p, _, s| p.alternate_screen = s.parse().ok(), |p| p.alternate_screen.as_ref().map(|v| v.to_string()), ALTERNATE_SCREEN, ), #[cfg(feature = "tmux_3_0")] ( "remain-on-exit", |p, _, s| p.remain_on_exit = s.parse().ok(), |p| p.remain_on_exit.as_ref().map(|v| v.to_string()), REMAIN_ON_EXIT, ), #[cfg(feature = "tmux_3_0")] ( "window-active-style", |p, _, s| p.window_active_style = Some(s.to_string()), |p| { p.window_active_style .as_ref() .map(|v| format!("\"{}\"", v.to_string())) }, WINDOW_ACTIVE_STYLE, ), #[cfg(feature = "tmux_3_0")] ( "window-style", |p, _, s| p.window_style = Some(s.to_string()), |p| { p.window_style .as_ref() .map(|v| format!("\"{}\"", v.to_string())) }, WINDOW_STYLE, ), ]; #[derive(Default, PartialEq, Clone, Debug)] pub struct PaneOptions { #[cfg(feature = "tmux_3_0")] pub allow_rename: Option<Switch>, #[cfg(feature = "tmux_3_0")] pub alternate_screen: Option<Switch>, #[cfg(feature = "tmux_3_0")] pub remain_on_exit: Option<Switch>, #[cfg(feature = "tmux_3_0")] pub window_active_style: Option<String>, #[cfg(feature = "tmux_3_0")] pub window_style: Option<String>, } impl PaneOptions { pub fn get_all() -> Result<Self, Error> { let s = ShowOptions::new().global().output()?.to_string(); s.parse() } pub fn get(bitflags: usize) -> Result<Self, Error> { let selected_option = PANE_OPTIONS .iter() .filter(|t| bitflags == t.3) .
pub fn set(&self, bitflags: usize) -> Result<(), Error> { for selected_option in PANE_OPTIONS.iter().filter(|t| bitflags & t.3 == t.3) { if let Some(selected_value) = selected_option.2(&self) { SetOption::new() .pane() .option(selected_option.0) .value(&selected_value) .output()?; } } Ok(()) } } impl FromStr for PaneOptions { type Err = Error; fn from_str(options: &str) -> Result<Self, Self::Err> { let mut pane_options: PaneOptions = Default::default(); let mut v: Vec<&str>; let mut arr: Vec<&str>; for option in options.lines() { v = option.trim().splitn(2, ' ').collect(); arr = v[0].split(|c| c == '[' || c == ']').collect(); for pane_var in PANE_OPTIONS.iter() { if pane_var.0 == arr[0] { pane_var.1( &mut pane_options, arr.get(1).and_then(|i| i.parse::<usize>().ok()), v.get(1).unwrap_or(&""), ) } } } Ok(pane_options) } } impl fmt::Display for PaneOptions { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { for var in PANE_OPTIONS.iter() { if let Some(ref v) = var.2(self) { write!(f, "{} {}\n", var.0, v)?; } } Ok(()) } } #[derive(Default, Debug)] pub struct PaneOptionsBuilder<'a> { #[cfg(feature = "tmux_3_0")] pub allow_rename: Option<Switch>, #[cfg(feature = "tmux_3_0")] pub alternate_screen: Option<Switch>, #[cfg(feature = "tmux_3_0")] pub remain_on_exit: Option<Switch>, #[cfg(feature = "tmux_3_0")] pub window_active_style: Option<&'a str>, #[cfg(feature = "tmux_3_0")] pub window_style: Option<&'a str>, } impl<'a> PaneOptionsBuilder<'a> { pub fn new() -> Self { Default::default() } #[cfg(feature = "tmux_3_0")] pub fn allow_rename(&mut self, allow_rename: Switch) -> &mut Self { self.allow_rename = Some(allow_rename); self } #[cfg(feature = "tmux_3_0")] pub fn alternate_screen(&mut self, alternate_screen: Switch) -> &mut Self { self.alternate_screen = Some(alternate_screen); self } #[cfg(feature = "tmux_3_0")] pub fn remain_on_exit(&mut self, remain_on_exit: Switch) -> &mut Self { self.remain_on_exit = Some(remain_on_exit); self } #[cfg(feature = "tmux_3_0")] pub fn window_active_style(&mut self, window_active_style: &'a str) -> &mut Self { self.window_active_style = Some(window_active_style); self } #[cfg(feature = "tmux_3_0")] pub fn window_style(&mut self, window_style: &'a str) -> &mut Self { self.window_style = Some(window_style); self } pub fn build(&self) -> PaneOptions { PaneOptions { #[cfg(feature = "tmux_3_0")] allow_rename: self.allow_rename.clone(), #[cfg(feature = "tmux_3_0")] alternate_screen: self.alternate_screen.clone(), #[cfg(feature = "tmux_3_0")] remain_on_exit: self.remain_on_exit.clone(), #[cfg(feature = "tmux_3_0")] window_active_style: self.window_active_style.map(|s| s.to_string()), #[cfg(feature = "tmux_3_0")] window_style: self.window_style.map(|s| s.to_string()), } } }
map(|t| format!("{}", t.0)) .collect::<Vec<String>>() .join(" "); let s = ShowOptions::new() .pane() .option(&selected_option) .output()? .to_string(); s.parse() }
function_block-function_prefix_line
[ { "content": "#[test]\n\nfn from_str() {\n\n use crate::Version;\n\n\n\n let examples = &[\n\n \"tmux next-3.2\",\n\n \"tmux 3.1b\",\n\n \"tmux 3.1a\",\n\n \"tmux 3.1\",\n\n \"tmux 3.0a\",\n\n \"tmux 3.0\",\n\n \"tmux 2.9a\",\n\n \"tmux 2.9\",\n\n ...
Rust
crates/fluvio-storage/src/range_map.rs
bohlmannc/fluvio
b5a3105600b6886c55d76707d369fa59f5d9673b
use std::cmp::max; use std::cmp::min; use std::collections::BTreeMap; use std::ops::Bound::Included; use std::ffi::OsStr; use tracing::debug; use tracing::trace; use tracing::error; use dataplane::Offset; use crate::segment::ReadSegment; use crate::StorageError; use crate::config::ConfigOption; use crate::util::log_path_get_offset; #[derive(Debug)] pub(crate) struct SegmentList { segments: BTreeMap<Offset, ReadSegment>, min_offset: Offset, max_offset: Offset, } impl SegmentList { pub fn new() -> Self { SegmentList { segments: BTreeMap::new(), max_offset: 0, min_offset: -1, } } pub async fn from_dir( option: &ConfigOption, ) -> Result<(SegmentList, Option<Offset>), StorageError> { let dirs = option.base_dir.read_dir()?; debug!("reading segments at: {:#?}", dirs); let files: Vec<_> = dirs.filter_map(|entry| entry.ok()).collect(); let mut offsets: Vec<Offset> = vec![]; for entry in files { if let Ok(metadata) = entry.metadata() { if metadata.is_file() { let path = entry.path(); trace!("scanning file: {:#?}", path); if path.extension() == Some(OsStr::new("log")) { if let Ok(offset) = log_path_get_offset(&path) { trace!("detected valid log: {}", offset); offsets.push(offset); /* match Segment::open(offset,option).await { Ok(segment) => segments.add_segment(segment), Err(err) => error!("error opening segment: {:#?}",err) } } else { debug!("not log, skipping: {:#?}",path); */ } } } } } offsets.sort_unstable(); let last_offset = offsets.pop(); let mut segments = Self::new(); for offset in offsets { match ReadSegment::open_unknown(offset, option).await { Ok(segment) => segments.add_segment(segment), Err(err) => error!("error opening segment: {:#?}", err), } } Ok((segments, last_offset)) } #[allow(dead_code)] pub fn len(&self) -> usize { self.segments.len() } pub fn min_offset(&self) -> Offset { self.min_offset } pub fn add_segment(&mut self, segment: ReadSegment) { debug!( base_offset = segment.get_base_offset(), end_offset = segment.get_end_offset(), "inserting" ); self.max_offset = max(self.max_offset, segment.get_end_offset()); self.min_offset = if self.min_offset < 0 { segment.get_base_offset() } else { min(self.min_offset, segment.get_base_offset()) }; self.segments.insert(segment.get_base_offset(), segment); } #[allow(dead_code)] pub fn get_segment(&self, offset: Offset) -> Option<&ReadSegment> { self.segments.get(&offset) } pub fn find_segment(&self, offset: Offset) -> Option<(&Offset, &ReadSegment)> { if offset < self.min_offset { None } else if offset == self.min_offset { (&self.segments) .range((Included(offset), Included(offset))) .next_back() } else if offset >= self.max_offset { None } else { let range = ( Included(offset - self.max_offset + self.min_offset + 1), Included(offset), ); (&self.segments).range(range).next_back() } } } #[cfg(test)] mod tests { use std::env::temp_dir; use std::path::PathBuf; use flv_util::fixture::ensure_new_dir; use dataplane::fixture::create_batch; use dataplane::Offset; use crate::StorageError; use crate::segment::MutableSegment; use crate::segment::ReadSegment; use crate::config::ConfigOption; use super::SegmentList; async fn create_segment( option: &ConfigOption, start: Offset, end_offset: Offset, ) -> Result<ReadSegment, StorageError> { let mut mut_segment = MutableSegment::create(start, option).await?; mut_segment.write_batch(&mut create_batch()).await?; mut_segment.set_end_offset(end_offset); let segment = mut_segment.convert_to_segment().await?; Ok(segment) } fn default_option(base_dir: PathBuf) -> ConfigOption { ConfigOption { segment_max_bytes: 100, base_dir, index_max_bytes: 1000, index_max_interval_bytes: 0, ..Default::default() } } #[fluvio_future::test] async fn test_segment_empty() { let rep_dir = temp_dir().join("segmentlist-read-empty"); ensure_new_dir(&rep_dir).expect("new"); let option = default_option(rep_dir); let (segments, last_segment) = SegmentList::from_dir(&option).await.expect("from"); assert_eq!(segments.len(), 0); assert!(last_segment.is_none()); } #[fluvio_future::test] async fn test_segment_single_base_zero() { let rep_dir = temp_dir().join("segmentlist-single-zero"); ensure_new_dir(&rep_dir).expect("new"); let mut list = SegmentList::new(); let option = default_option(rep_dir); list.add_segment(create_segment(&option, 0, 500).await.expect("create")); println!("segments: {:#?}", list); assert!(list.find_segment(-1).is_none()); assert!(list.find_segment(0).is_some()); assert!(list.find_segment(1).is_some()); assert!(list.find_segment(499).is_some()); assert!(list.find_segment(500).is_none()); assert!(list.find_segment(501).is_none()); } #[fluvio_future::test] async fn test_segment_single_base_some() { let rep_dir = temp_dir().join("segmentlist-single-some"); ensure_new_dir(&rep_dir).expect("new"); let mut list = SegmentList::new(); let option = default_option(rep_dir); list.add_segment(create_segment(&option, 100, 500).await.expect("create")); println!("segments: {:#?}", list); assert!(list.find_segment(50).is_none()); assert!(list.find_segment(99).is_none()); assert!(list.find_segment(100).is_some()); assert!(list.find_segment(499).is_some()); assert!(list.find_segment(500).is_none()); } #[fluvio_future::test] async fn test_segment_many_zero() { let rep_dir = temp_dir().join("segmentlist-many-zero"); ensure_new_dir(&rep_dir).expect("new"); let mut list = SegmentList::new(); let option = default_option(rep_dir); list.add_segment(create_segment(&option, 0, 500).await.expect("create")); list.add_segment(create_segment(&option, 500, 2000).await.expect("create")); list.add_segment(create_segment(&option, 2000, 3000).await.expect("create")); list.add_segment(create_segment(&option, 3000, 4000).await.expect("create")); println!("segments: {:#?}", list); assert_eq!(list.len(), 4); assert_eq!(list.find_segment(0).expect("segment").0, &0); assert_eq!(list.find_segment(1).expect("segment").0, &0); assert_eq!(list.find_segment(499).expect("segment").0, &0); assert_eq!(list.find_segment(500).expect("segment").0, &500); assert_eq!(list.find_segment(1500).expect("segment").0, &500); assert_eq!(list.find_segment(3000).expect("segment").0, &3000); assert!(list.find_segment(4000).is_none()); assert!(list.find_segment(4001).is_none()); } #[fluvio_future::test] async fn test_segment_many_some() { let rep_dir = temp_dir().join("segmentlist-many-some"); ensure_new_dir(&rep_dir).expect("new"); let mut list = SegmentList::new(); let option = default_option(rep_dir); list.add_segment(create_segment(&option, 100, 600).await.expect("create")); list.add_segment(create_segment(&option, 600, 4000).await.expect("create")); list.add_segment(create_segment(&option, 4000, 9000).await.expect("create")); println!("segments: {:#?}", list); assert_eq!(list.len(), 3); assert!(list.find_segment(0).is_none()); assert!(list.find_segment(99).is_none()); assert_eq!(list.find_segment(100).expect("segment").0, &100); assert_eq!(list.find_segment(599).expect("segment").0, &100); assert_eq!(list.find_segment(600).expect("segment").0, &600); assert_eq!(list.find_segment(900).expect("segment").0, &600); assert_eq!(list.find_segment(8000).expect("segment").0, &4000); assert!(list.find_segment(9000).is_none()); assert!(list.find_segment(10000).is_none()); } }
use std::cmp::max; use std::cmp::min; use std::collections::BTreeMap; use std::ops::Bound::Included; use std::ffi::OsStr; use tracing::debug; use tracing::trace; use tracing::error; use dataplane::Offset; use crate::segment::ReadSegment; use crate::StorageError; use crate::config::ConfigOption; use crate::util::log_path_get_offset; #[derive(Debug)] pub(crate) struct SegmentList { segments: BTreeMap<Offset, ReadSegment>, min_offset: Offset, max_offset: Offset, } impl SegmentList { pub fn new() -> Self { SegmentList { segments: BTreeMap::new(), max_offset: 0, min_offset: -1, } } pub async fn from_dir( option: &ConfigOption, ) -> Result<(SegmentList, Option<Offset>), StorageError> { let dirs = option.base_dir.read_dir()?; debug!("reading segments at: {:#?}", dirs); let files: Vec<_> = dirs.filter_map(|entry| entry.ok()).collect(); let mut offsets: Vec<Offset> = vec![]; for entry in files { if let Ok(metadata) = entry.metadata() { if metadata.is_file() { let path = entry.path(); trace!("scanning file: {:#?}", path); if path.extension() == Some(OsStr::new("log")) { if let Ok(offset) = log_path_get_offset(&path) { trace!("detected valid log: {}", offset); offsets.push(offset); /* match Segment::open(offset,option).await { Ok(segment) => segments.add_segment(segment), Err(err) => error!("error opening segment: {:#?}",err) } } else { debug!("not log, skipping: {:#?}",path); */ } } } } } offsets.sort_unstable(); let last_offset = offsets.pop(); let mut segments = Self::new(); for offset in offsets { match ReadSegment::open_unknown(offset, option).await { Ok(segment) => segments.add_segment(segment), Err(err) => error!("error opening segment: {:#?}", err), } } Ok((segments, last_offset)) } #[allow(dead_code)] pub fn len(&self) -> usize { self.segments.len() } pub fn min_offset(&self) -> Offset { self.min_offset } pub fn add_segment(&mut self, segment: ReadSegment) { debug!( base_offset = segment.get_base_offset(), end_offset = segment.get_end_offset(), "inserting" ); self.max_offset = max(self.max_offset, segment.get_end_offset()); self.min_offset = if self.min_offset < 0 { segment.get_base_offset() } else { min(self.min_offset, segment.get_base_offset()) }; self.segments.insert(segment.get_base_offset(), segment); } #[allow(dead_code)] pub fn get_segment(&self, offset: Offset) -> Option<&ReadSegment> { self.segments.get(&offset) } pub fn find_segment(&self, offset: Offset) -> Option<(&Offset, &ReadSegment)> { if offset < self.min_offset { None } els
} #[cfg(test)] mod tests { use std::env::temp_dir; use std::path::PathBuf; use flv_util::fixture::ensure_new_dir; use dataplane::fixture::create_batch; use dataplane::Offset; use crate::StorageError; use crate::segment::MutableSegment; use crate::segment::ReadSegment; use crate::config::ConfigOption; use super::SegmentList; async fn create_segment( option: &ConfigOption, start: Offset, end_offset: Offset, ) -> Result<ReadSegment, StorageError> { let mut mut_segment = MutableSegment::create(start, option).await?; mut_segment.write_batch(&mut create_batch()).await?; mut_segment.set_end_offset(end_offset); let segment = mut_segment.convert_to_segment().await?; Ok(segment) } fn default_option(base_dir: PathBuf) -> ConfigOption { ConfigOption { segment_max_bytes: 100, base_dir, index_max_bytes: 1000, index_max_interval_bytes: 0, ..Default::default() } } #[fluvio_future::test] async fn test_segment_empty() { let rep_dir = temp_dir().join("segmentlist-read-empty"); ensure_new_dir(&rep_dir).expect("new"); let option = default_option(rep_dir); let (segments, last_segment) = SegmentList::from_dir(&option).await.expect("from"); assert_eq!(segments.len(), 0); assert!(last_segment.is_none()); } #[fluvio_future::test] async fn test_segment_single_base_zero() { let rep_dir = temp_dir().join("segmentlist-single-zero"); ensure_new_dir(&rep_dir).expect("new"); let mut list = SegmentList::new(); let option = default_option(rep_dir); list.add_segment(create_segment(&option, 0, 500).await.expect("create")); println!("segments: {:#?}", list); assert!(list.find_segment(-1).is_none()); assert!(list.find_segment(0).is_some()); assert!(list.find_segment(1).is_some()); assert!(list.find_segment(499).is_some()); assert!(list.find_segment(500).is_none()); assert!(list.find_segment(501).is_none()); } #[fluvio_future::test] async fn test_segment_single_base_some() { let rep_dir = temp_dir().join("segmentlist-single-some"); ensure_new_dir(&rep_dir).expect("new"); let mut list = SegmentList::new(); let option = default_option(rep_dir); list.add_segment(create_segment(&option, 100, 500).await.expect("create")); println!("segments: {:#?}", list); assert!(list.find_segment(50).is_none()); assert!(list.find_segment(99).is_none()); assert!(list.find_segment(100).is_some()); assert!(list.find_segment(499).is_some()); assert!(list.find_segment(500).is_none()); } #[fluvio_future::test] async fn test_segment_many_zero() { let rep_dir = temp_dir().join("segmentlist-many-zero"); ensure_new_dir(&rep_dir).expect("new"); let mut list = SegmentList::new(); let option = default_option(rep_dir); list.add_segment(create_segment(&option, 0, 500).await.expect("create")); list.add_segment(create_segment(&option, 500, 2000).await.expect("create")); list.add_segment(create_segment(&option, 2000, 3000).await.expect("create")); list.add_segment(create_segment(&option, 3000, 4000).await.expect("create")); println!("segments: {:#?}", list); assert_eq!(list.len(), 4); assert_eq!(list.find_segment(0).expect("segment").0, &0); assert_eq!(list.find_segment(1).expect("segment").0, &0); assert_eq!(list.find_segment(499).expect("segment").0, &0); assert_eq!(list.find_segment(500).expect("segment").0, &500); assert_eq!(list.find_segment(1500).expect("segment").0, &500); assert_eq!(list.find_segment(3000).expect("segment").0, &3000); assert!(list.find_segment(4000).is_none()); assert!(list.find_segment(4001).is_none()); } #[fluvio_future::test] async fn test_segment_many_some() { let rep_dir = temp_dir().join("segmentlist-many-some"); ensure_new_dir(&rep_dir).expect("new"); let mut list = SegmentList::new(); let option = default_option(rep_dir); list.add_segment(create_segment(&option, 100, 600).await.expect("create")); list.add_segment(create_segment(&option, 600, 4000).await.expect("create")); list.add_segment(create_segment(&option, 4000, 9000).await.expect("create")); println!("segments: {:#?}", list); assert_eq!(list.len(), 3); assert!(list.find_segment(0).is_none()); assert!(list.find_segment(99).is_none()); assert_eq!(list.find_segment(100).expect("segment").0, &100); assert_eq!(list.find_segment(599).expect("segment").0, &100); assert_eq!(list.find_segment(600).expect("segment").0, &600); assert_eq!(list.find_segment(900).expect("segment").0, &600); assert_eq!(list.find_segment(8000).expect("segment").0, &4000); assert!(list.find_segment(9000).is_none()); assert!(list.find_segment(10000).is_none()); } }
e if offset == self.min_offset { (&self.segments) .range((Included(offset), Included(offset))) .next_back() } else if offset >= self.max_offset { None } else { let range = ( Included(offset - self.max_offset + self.min_offset + 1), Included(offset), ); (&self.segments).range(range).next_back() } }
function_block-function_prefixed
[ { "content": "pub fn log_path_get_offset<P>(path: P) -> Result<Offset, OffsetError>\n\nwhere\n\n P: AsRef<Path>,\n\n{\n\n let log_path = path.as_ref();\n\n\n\n match log_path.file_stem() {\n\n None => Err(OffsetError::InvalidPath),\n\n Some(file_name) => {\n\n if file_name.len(...
Rust
neutopia/src/lib.rs
konkers/neutopia
770dbbc3dd6a61d418f38683f43ccfb729ea1039
use std::collections::HashMap; use std::io::{prelude::*, Cursor, SeekFrom}; use failure::{format_err, Error}; pub mod interval; pub mod rom; pub mod rommap; pub mod util; pub mod verify; pub use rom::NeutopiaRom; pub use verify::{verify, RomInfo}; #[derive(Clone, Debug)] pub struct Room { pub warps: Vec<u8>, pub enemies: Vec<u8>, pub objects: Vec<rom::object::TableEntry>, } #[derive(Clone, Debug)] pub struct Area { pub rooms: Vec<Room>, pub chest_table: Vec<rom::Chest>, } #[derive(Clone, Debug)] pub struct Chest { pub info: rom::Chest, pub area: u8, pub room: u8, pub index: u8, } #[derive(Clone, Debug)] pub struct Conditional { pub data: Vec<rom::object::TableEntry>, } pub struct Neutopia { pub areas: Vec<Area>, pub conditionals: HashMap<rom::Chest, Conditional>, pub rom_data: Vec<u8>, n: NeutopiaRom, } impl Neutopia { pub fn new(data: &[u8]) -> Result<Self, Error> { let mut rando = Self { n: NeutopiaRom::new(data)?, areas: Vec::new(), conditionals: HashMap::new(), rom_data: Vec::from(data), }; for area_idx in 0..=0xf { rando.import_area(area_idx)?; } Ok(rando) } fn import_area(&mut self, area_idx: usize) -> Result<(), Error> { let room_info_table = &self.n.room_info_tables[area_idx]; let chest_table = &self.n.chest_tables[&self.n.chest_table_pointers[area_idx]]; let mut rooms = Vec::new(); for room_idx in 0u8..0x40 { let room = &room_info_table[&room_idx]; let mut object_table = rom::object::parse_object_table(&room.object_table)?; if object_table.len() > 2 { let mut i = 0; while (i + 2) < object_table.len() { if let Some(id) = object_table[i].chest_id() { let chest = &chest_table[id as usize]; let next = object_table[i + 1].clone(); let next_next = object_table[i + 2].clone(); if next.is_conditional() { object_table.remove(i + 1); object_table.remove(i + 1); self.conditionals.insert( chest.clone(), Conditional { data: vec![next, next_next], }, ); } } i += 1; } } rooms.push(Room { warps: room.warp_table.clone(), enemies: room.enemy_table.clone(), objects: object_table, }); } self.areas.push(Area { rooms, chest_table: chest_table.clone(), }); Ok(()) } pub fn filter_chests(&self, filter: impl Fn(&Chest) -> bool) -> Vec<Chest> { let mut chests = Vec::new(); for (area_idx, area) in self.areas.iter().enumerate() { for (room_idx, room) in area.rooms.iter().enumerate() { let mut chest_index = 0; for entry in &room.objects { if let Some(id) = entry.chest_id() { let chest = Chest { info: area.chest_table[id as usize].clone(), area: area_idx as u8, room: room_idx as u8, index: chest_index, }; chest_index += 1; if filter(&chest) { chests.push(chest); } } } } } chests } fn get_table_id_for_chest(&self, chest: &Chest) -> Result<usize, Error> { let area = &self.areas[chest.area as usize]; let room = &area.rooms[chest.room as usize]; let mut chest_index = 0; for obj_entry in &room.objects { if let Some(id) = obj_entry.chest_id() { if chest_index == chest.index { return Ok(id as usize); } chest_index += 1; } } Err(format_err!("can't find id for chest {:?}", chest)) } pub fn update_chests(&mut self, chests: &[Chest]) -> Result<(), Error> { for chest in chests { let id = self.get_table_id_for_chest(chest)?; let entry = self.areas[chest.area as usize] .chest_table .get_mut(id as usize) .ok_or_else(|| format_err!("incoherent chest id {:02x}", id))?; *entry = chest.info.clone(); } Ok(()) } fn write_area(&self, area_idx: usize, rom_writer: &mut Cursor<Vec<u8>>) -> Result<u32, Error> { let area = &self.areas[area_idx]; let cur_offset = rom_writer.position(); let mut room_ptrs = Cursor::new(Vec::new()); let room_ptrs_offset = cur_offset; let room_data_offset = cur_offset + 0x40 * 3; rom_writer.seek(SeekFrom::Start(room_data_offset as u64))?; for room_idx in 0..0x40 { let room = &area.rooms[room_idx]; let room_offset = rom_writer.position(); room_ptrs.write_all(&util::rom_offset_to_pointer(room_offset as u32))?; let mut object_table = room.objects.clone(); for i in 0..object_table.len() { if let Some(id) = object_table[i].chest_id() { let chest = &area.chest_table[id as usize]; let loc = match object_table[i].loc() { Some(loc) => loc, _ => continue, }; if let Some(cond) = self.conditionals.get(&chest) { for j in 0..cond.data.len() { let mut entry = cond.data[j].clone(); if let rom::object::TableEntry::Object(ref mut o) = entry { o.x = loc.0; o.y = loc.1; } object_table.insert(i + j + 1, entry); } break; } } } rom_writer.seek(SeekFrom::Current(3 * 3))?; let warp_table_ptr = rom_writer.position() as u32; rom_writer.write_all(&room.warps)?; let enemy_table_ptr = rom_writer.position() as u32; rom_writer.write_all(&room.enemies)?; rom_writer.write_all(&[0xff])?; let object_table_ptr = rom_writer.position() as u32; for o in &object_table { o.write(rom_writer)?; } rom_writer.write_all(&[0xff])?; let room_end_pos = rom_writer.position(); rom_writer.seek(SeekFrom::Start(room_offset))?; rom_writer.write_all(&util::rom_offset_to_pointer(warp_table_ptr))?; rom_writer.write_all(&util::rom_offset_to_pointer(enemy_table_ptr))?; rom_writer.write_all(&util::rom_offset_to_pointer(object_table_ptr))?; rom_writer.seek(SeekFrom::Start(room_end_pos))?; } let next_offset = rom_writer.position() as u32; rom_writer.seek(SeekFrom::Start(room_ptrs_offset as u64))?; rom_writer.write_all(room_ptrs.get_ref())?; rom_writer.seek(SeekFrom::Start( rommap::AREA_TABLE as u64 + area_idx as u64 * 3, ))?; rom_writer.write_all(&util::rom_offset_to_pointer(room_ptrs_offset as u32))?; Ok(next_offset) } pub fn write(&self) -> Result<Vec<u8>, Error> { let mut rom_writer = Cursor::new(self.rom_data.clone()); let area_range = 4..=0xf; for area_idx in area_range.clone() { let area = &self.areas[area_idx]; let offset = 0x4fe00 + (0x20 * area_idx as u64); rom_writer.seek(SeekFrom::Start(offset))?; for chest in &area.chest_table { chest.write(&mut rom_writer)?; } rom_writer.seek(SeekFrom::Start( rommap::CHEST_TABLE as u64 + 3 * area_idx as u64, ))?; let ptr = util::rom_offset_to_pointer(offset as u32); rom_writer.write_all(&ptr)?; } let mut cur_offset = self.n.area_pointers[4]; let mut offset_c = None; for area_idx in area_range { if area_idx == 0xc { offset_c = Some(cur_offset); } rom_writer.seek(SeekFrom::Start(cur_offset as u64))?; cur_offset = self.write_area(area_idx, &mut rom_writer)? } if let Some(offset) = offset_c { rom_writer.seek(SeekFrom::Start(rommap::AREA_TABLE as u64 + 0x10 * 3))?; rom_writer.write_all(&util::rom_offset_to_pointer(offset as u32))?; } Ok(rom_writer.into_inner()) } } pub fn area_name(area: u8) -> &'static str { match area { 0x0 => "Land Sphere", 0x1 => "Subterranean Sphere", 0x2 => "Sea Sphere", 0x3 => "Sky Sphere", 0x4 => "Crypt 1", 0x5 => "Crypt 2", 0x6 => "Crypt 3", 0x7 => "Crypt 4", 0x8 => "Crypt 5", 0x9 => "Crypt 6", 0xa => "Crypt 7", 0xb => "Crypt 8", 0xc => "Land Sphere Rooms", 0xd => "Subterranean Sphere Rooms", 0xe => "Sea Sphere Rooms", 0xf => "Sky Sphere Rooms", _ => "Unknown", } } #[cfg(test)] mod tests {}
use std::collections::HashMap; use std::io::{prelude::*, Cursor, SeekFrom}; use failure::{format_err, Error}; pub mod interval; pub mod rom; pub mod rommap; pub mod util; pub mod verify; pub use rom::NeutopiaRom; pub use verify::{verify, RomInfo}; #[derive(Clone, Debug)] pub struct Room { pub warps: Vec<u8>, pub enemies: Vec<u8>, pub objects: Vec<rom::object::TableEntry>, } #[derive(Clone, Debug)] pub struct Area { pub rooms: Vec<Room>, pub chest_table: Vec<rom::Chest>, } #[derive(Clone, Debug)] pub struct Chest { pub info: rom::Chest, pub area: u8, pub room: u8, pub index: u8, } #[derive(Clone, Debug)] pub struct Conditional { pub data: Vec<rom::object::TableEntry>, } pub struct Neutopia { pub areas: Vec<Area>, pub conditionals: HashMap<rom::Chest, Conditional>, pub rom_data: Vec<u8>, n: NeutopiaRom, } impl Neutopia { pub fn new(data: &[u8]) -> Result<Self, Error> { let mut rando = Self { n: NeutopiaRom::new(data)?, areas: Vec::new(), conditionals: HashMap::new(), rom_data: Vec::from(data), }; for area_idx in 0..=0xf { rando.import_area(area_idx)?; } Ok(rando) } fn import_area(&mut self, area_idx: usize) -> Result<(), Error> { let room_info_table = &self.n.room_info_tables[area_idx]; let chest_table = &self.n.chest_tables[&self.n.chest_table_pointers[area_idx]]; let mut rooms = Vec::new(); for room_idx in 0u8..0x40 { let room = &room_info_table[&room_idx]; let mut object_table = rom::object::parse_object_table(&room.object_table)?; if object_table.len() > 2 { let mut i = 0; while (i + 2) < object_table.len() { if let Some(id) = object_table[i].chest_id() { let chest = &chest_table[id as usize]; let next = object_table[i + 1].clone(); let next_next = object_table[i + 2].clone(); if next.is_conditional() { object_table.remove(i + 1); object_table.remove(i + 1); self.conditionals.insert( chest.clone(), Conditional { data: vec![next, next_next], }, ); } } i += 1; } } rooms.push(Room { warps: room.warp_table.clone(), enemies: room.enemy_table.clone(), objects: object_table, }); } self.areas.push(Area { rooms, chest_table: chest_table.clone(), }); Ok(()) } pub fn filter_chests(&self, filter: impl Fn(&Chest) -> bool) -> Vec<Chest> { let mut chests = Vec::new(); for (area_idx, area) in self.areas.iter().enumerate() { for (room_idx, room) in area.rooms.iter().enumerate() { let mut chest_index = 0; for entry in &room.objects { if let Some(id) = entry.chest_id() { let chest = Chest { info: area.chest_table[id as usize].clone(), area: area_idx as u8, room: room_idx as u8, index: chest_index, }; chest_index += 1; if filter(&chest) { chests.push(chest); } } } } } chests } fn get_table_id_for_chest(&self, chest: &Chest) -> Result<usize, Error> { let area = &self.areas[chest.area as usize]; let room = &area.rooms[chest.room as usize]; let mut chest_index = 0; for obj_entry in &room.objects { if let Some(id) = obj_entry.chest_id() { if chest_index == chest.index { return Ok(id as usize); } chest_index += 1; } } Err(format_err!("can't find id for chest {:?}", chest)) }
fn write_area(&self, area_idx: usize, rom_writer: &mut Cursor<Vec<u8>>) -> Result<u32, Error> { let area = &self.areas[area_idx]; let cur_offset = rom_writer.position(); let mut room_ptrs = Cursor::new(Vec::new()); let room_ptrs_offset = cur_offset; let room_data_offset = cur_offset + 0x40 * 3; rom_writer.seek(SeekFrom::Start(room_data_offset as u64))?; for room_idx in 0..0x40 { let room = &area.rooms[room_idx]; let room_offset = rom_writer.position(); room_ptrs.write_all(&util::rom_offset_to_pointer(room_offset as u32))?; let mut object_table = room.objects.clone(); for i in 0..object_table.len() { if let Some(id) = object_table[i].chest_id() { let chest = &area.chest_table[id as usize]; let loc = match object_table[i].loc() { Some(loc) => loc, _ => continue, }; if let Some(cond) = self.conditionals.get(&chest) { for j in 0..cond.data.len() { let mut entry = cond.data[j].clone(); if let rom::object::TableEntry::Object(ref mut o) = entry { o.x = loc.0; o.y = loc.1; } object_table.insert(i + j + 1, entry); } break; } } } rom_writer.seek(SeekFrom::Current(3 * 3))?; let warp_table_ptr = rom_writer.position() as u32; rom_writer.write_all(&room.warps)?; let enemy_table_ptr = rom_writer.position() as u32; rom_writer.write_all(&room.enemies)?; rom_writer.write_all(&[0xff])?; let object_table_ptr = rom_writer.position() as u32; for o in &object_table { o.write(rom_writer)?; } rom_writer.write_all(&[0xff])?; let room_end_pos = rom_writer.position(); rom_writer.seek(SeekFrom::Start(room_offset))?; rom_writer.write_all(&util::rom_offset_to_pointer(warp_table_ptr))?; rom_writer.write_all(&util::rom_offset_to_pointer(enemy_table_ptr))?; rom_writer.write_all(&util::rom_offset_to_pointer(object_table_ptr))?; rom_writer.seek(SeekFrom::Start(room_end_pos))?; } let next_offset = rom_writer.position() as u32; rom_writer.seek(SeekFrom::Start(room_ptrs_offset as u64))?; rom_writer.write_all(room_ptrs.get_ref())?; rom_writer.seek(SeekFrom::Start( rommap::AREA_TABLE as u64 + area_idx as u64 * 3, ))?; rom_writer.write_all(&util::rom_offset_to_pointer(room_ptrs_offset as u32))?; Ok(next_offset) } pub fn write(&self) -> Result<Vec<u8>, Error> { let mut rom_writer = Cursor::new(self.rom_data.clone()); let area_range = 4..=0xf; for area_idx in area_range.clone() { let area = &self.areas[area_idx]; let offset = 0x4fe00 + (0x20 * area_idx as u64); rom_writer.seek(SeekFrom::Start(offset))?; for chest in &area.chest_table { chest.write(&mut rom_writer)?; } rom_writer.seek(SeekFrom::Start( rommap::CHEST_TABLE as u64 + 3 * area_idx as u64, ))?; let ptr = util::rom_offset_to_pointer(offset as u32); rom_writer.write_all(&ptr)?; } let mut cur_offset = self.n.area_pointers[4]; let mut offset_c = None; for area_idx in area_range { if area_idx == 0xc { offset_c = Some(cur_offset); } rom_writer.seek(SeekFrom::Start(cur_offset as u64))?; cur_offset = self.write_area(area_idx, &mut rom_writer)? } if let Some(offset) = offset_c { rom_writer.seek(SeekFrom::Start(rommap::AREA_TABLE as u64 + 0x10 * 3))?; rom_writer.write_all(&util::rom_offset_to_pointer(offset as u32))?; } Ok(rom_writer.into_inner()) } } pub fn area_name(area: u8) -> &'static str { match area { 0x0 => "Land Sphere", 0x1 => "Subterranean Sphere", 0x2 => "Sea Sphere", 0x3 => "Sky Sphere", 0x4 => "Crypt 1", 0x5 => "Crypt 2", 0x6 => "Crypt 3", 0x7 => "Crypt 4", 0x8 => "Crypt 5", 0x9 => "Crypt 6", 0xa => "Crypt 7", 0xb => "Crypt 8", 0xc => "Land Sphere Rooms", 0xd => "Subterranean Sphere Rooms", 0xe => "Sea Sphere Rooms", 0xf => "Sky Sphere Rooms", _ => "Unknown", } } #[cfg(test)] mod tests {}
pub fn update_chests(&mut self, chests: &[Chest]) -> Result<(), Error> { for chest in chests { let id = self.get_table_id_for_chest(chest)?; let entry = self.areas[chest.area as usize] .chest_table .get_mut(id as usize) .ok_or_else(|| format_err!("incoherent chest id {:02x}", id))?; *entry = chest.info.clone(); } Ok(()) }
function_block-full_function
[ { "content": "pub fn verify(data: &[u8]) -> Result<RomInfo, Error> {\n\n let expected_size = 384 * 1024;\n\n let header_size = 0x200;\n\n\n\n let (headered, buffer) = if data.len() == expected_size {\n\n (false, &data as &[u8])\n\n } else if data.len() == expected_size + header_size {\n\n ...
Rust
vm/actor/src/util/balance_table.rs
zatoichi-labs/forest
4422cddcf42fab20912d1ad1f92e2b997f1b5bda
use crate::{BytesKey, HAMT_BIT_WIDTH}; use address::Address; use cid::Cid; use ipld_blockstore::BlockStore; use ipld_hamt::{Error, Hamt}; use num_bigint::biguint_ser::BigUintDe; use num_traits::CheckedSub; use vm::TokenAmount; pub struct BalanceTable<'a, BS>(Hamt<'a, BytesKey, BS>); impl<'a, BS> BalanceTable<'a, BS> where BS: BlockStore, { pub fn new(bs: &'a BS) -> Self { Self(Hamt::new_with_bit_width(bs, HAMT_BIT_WIDTH)) } pub fn from_root(bs: &'a BS, cid: &Cid) -> Result<Self, Error> { Ok(Self(Hamt::load_with_bit_width(cid, bs, HAMT_BIT_WIDTH)?)) } #[inline] pub fn root(&mut self) -> Result<Cid, Error> { self.0.flush() } #[inline] pub fn get(&self, key: &Address) -> Result<TokenAmount, String> { Ok(self .0 .get::<_, BigUintDe>(&key.to_bytes())? .ok_or("no key {} in map root")? .0) } #[inline] pub fn has(&self, key: &Address) -> Result<bool, Error> { match self.0.get::<_, BigUintDe>(&key.to_bytes())? { Some(_) => Ok(true), None => Ok(false), } } #[inline] pub fn set(&mut self, key: &Address, value: TokenAmount) -> Result<(), Error> { self.0.set(key.to_bytes().into(), BigUintDe(value)) } pub fn add(&mut self, key: &Address, value: &TokenAmount) -> Result<(), String> { let prev = self.get(key)?; Ok(self.0.set(key.to_bytes().into(), BigUintDe(prev + value))?) } pub fn add_create(&mut self, key: &Address, value: TokenAmount) -> Result<(), String> { let new_val = match self.0.get::<_, BigUintDe>(&key.to_bytes())? { Some(v) => v.0 + value, None => value, }; Ok(self.0.set(key.to_bytes().into(), BigUintDe(new_val))?) } pub fn subtract_with_minimum( &mut self, key: &Address, req: &TokenAmount, floor: &TokenAmount, ) -> Result<TokenAmount, String> { let prev = self.get(key)?; let res = prev .checked_sub(req) .unwrap_or_else(|| TokenAmount::from(0u8)); let new_val: &TokenAmount = std::cmp::max(&res, floor); if &prev > new_val { self.0 .set(key.to_bytes().into(), BigUintDe(new_val.clone()))?; Ok(prev - new_val) } else { Ok(TokenAmount::default()) } } pub fn must_subtract(&mut self, key: &Address, req: &TokenAmount) -> Result<(), String> { let sub_amt = self.subtract_with_minimum(key, req, &TokenAmount::from(0u8))?; if &sub_amt != req { return Err(format!( "Couldn't subtract value from address {} (req: {}, available: {})", key, req, sub_amt )); } Ok(()) } pub fn remove(&mut self, key: &Address) -> Result<TokenAmount, String> { let prev = self.get(key)?; self.0.delete(&key.to_bytes())?; Ok(prev) } pub fn total(&self) -> Result<TokenAmount, String> { let mut total = TokenAmount::default(); self.0.for_each(|_, v: BigUintDe| { total += v.0; Ok(()) })?; Ok(total) } }
use crate::{BytesKey, HAMT_BIT_WIDTH}; use address::Address; use cid::Cid; use ipld_blockstore::BlockStore; use ipld_hamt::{Error, Hamt}; use num_bigint::biguint_ser::BigUintDe; use num_traits::CheckedSub; use vm::TokenAmount; pub struct BalanceTable<'a, BS>(Hamt<'a, BytesKey, BS>); impl<'a, BS> BalanceTable<'a, BS> where BS: BlockStore, { pub fn new(bs: &'a BS) -> Self { Self(Hamt::new_with_bit_width(bs, HAMT_BIT_WIDTH)) } pub fn from_root(bs: &'a BS, cid: &Cid) -> Result<Self, Error> { Ok(Self(Hamt::load_with_bit_width(cid, bs, HAMT_BIT_WIDTH)?)) } #[inline] pub fn root(&mut self) -> Result<Cid, Error> { self.0.flush() } #[inline] pub fn get(&self, key: &Address) -> Result<TokenAmount, String> { Ok(self .0 .get::<_, BigUintDe>(&key.to_bytes())? .ok_or("no key {} in map root")? .0) } #[inline] pub fn has(&self, key: &Address) -> Result<bool, Error> { match self.0.get::<_, BigUintDe>(&key.to_bytes())? { Some(_) => Ok(true), None => Ok(false), } } #[inline] pub fn set(&mut self, key: &Address, value: TokenAmount) -> Result<(), Error> { self.0.set(key.to_bytes().into(), BigUintDe(value)) } pub fn add(&mut self, key: &Address, value: &TokenAmount) -> Result<(), String> { let prev = self.get(key)?; Ok(self.0.set(key.to_bytes().into(), BigUintDe(prev + value))?) } pub fn add_create(&mut self, key: &Address, value: TokenAmount) -> Result<(), String> { let new_val = match self.0.get::<_, BigUintDe>(&key.to_bytes())? { Some(v) => v.0 + value, None => value, }; Ok(self.0.set(key.to_bytes().into(), BigUintDe(new_val))?) } pub fn subtract_with_minimum( &mut self, key: &Address, req: &TokenAmount, floor: &TokenAmount, ) -> Result<TokenAmount, String> { let prev = self.get(key)?; let res = prev .checked_sub(req) .unwrap_or_else(|| TokenAmount::from(0u8)); let new_val: &TokenAmount = std::cmp::max(&res, floor); if &prev > new_val { self.0 .set(key.to_bytes().into(), BigUintDe(new_val.clone()))?; Ok(prev - new_val) } else { Ok(TokenAmount::default()) } } pub fn must_subtract(&mut self, key: &Address, req: &TokenAmount) -> Result<(), String> { let sub_amt = self.subtract_with_minimum(key, req, &TokenAmount::from(0u8))?; if &su
pub fn remove(&mut self, key: &Address) -> Result<TokenAmount, String> { let prev = self.get(key)?; self.0.delete(&key.to_bytes())?; Ok(prev) } pub fn total(&self) -> Result<TokenAmount, String> { let mut total = TokenAmount::default(); self.0.for_each(|_, v: BigUintDe| { total += v.0; Ok(()) })?; Ok(total) } }
b_amt != req { return Err(format!( "Couldn't subtract value from address {} (req: {}, available: {})", key, req, sub_amt )); } Ok(()) }
function_block-function_prefixed
[ { "content": "fn mutate_balance_table<BS, F>(store: &BS, c: &mut Cid, f: F) -> Result<(), String>\n\nwhere\n\n F: FnOnce(&mut BalanceTable<BS>) -> Result<(), String>,\n\n BS: BlockStore,\n\n{\n\n let mut t = BalanceTable::from_root(store, &c)?;\n\n\n\n f(&mut t)?;\n\n\n\n *c = t.root()?;\n\n O...
Rust
src/client-rs/src/profile/profile_file.rs
vinimin/fluvio
142c050a2f1aaa83aeda19705fedd670fffaf1a1
use std::env; use std::fs::read_to_string; use std::io::Error as IoError; use std::io::ErrorKind; use std::path::{Path, PathBuf}; use dirs::home_dir; use serde::Deserialize; use types::defaults::{CLI_CONFIG_PATH, CLI_DEFAULT_PROFILE, CLI_PROFILES_DIR}; use types::defaults::{CONFIG_FILE_EXTENTION, FLV_FLUVIO_HOME}; use types::socket_helpers::ServerAddress; use super::ProfileConfig; #[derive(Debug, PartialEq, Deserialize)] pub struct ProfileFile { pub version: String, sc: Option<TargetAddr>, spu: Option<TargetAddr>, kf: Option<TargetAddr>, } #[derive(Debug, PartialEq, Deserialize)] struct TargetAddr { pub host: String, pub port: u16, } impl Into<ServerAddress> for TargetAddr { fn into(self) -> ServerAddress { ServerAddress::new(self.host,self.port) } } impl ProfileFile { pub fn from_file<T: AsRef<Path>>(path: T) -> Result<Self, IoError> { let file_str: String = read_to_string(path)?; toml::from_str(&file_str) .map_err(|err| IoError::new(ErrorKind::InvalidData, format!("{}", err))) } } impl From<ProfileFile> for ProfileConfig { fn from(file: ProfileFile) -> ProfileConfig { Self { sc_addr: file.sc.map(|addr| addr.into()), spu_addr: file.spu.map(|addr| addr.into()), kf_addr: file.kf.map( |addr| addr.into()) } } } pub fn build_cli_profile_file_path(profile_name: Option<&String>) -> Result<PathBuf, IoError> { let base_path = match env::var(FLV_FLUVIO_HOME) { Ok(val) => { let mut user_dir = PathBuf::new(); user_dir.push(val); user_dir } Err(_) => { if let Some(mut home_dir) = home_dir() { home_dir.push(CLI_CONFIG_PATH); home_dir } else { return Err(IoError::new( ErrorKind::InvalidInput, "can't get home directory", )); } } }; let mut file_path = base_path.join(CLI_PROFILES_DIR); if profile_name.is_some() { file_path.push(profile_name.unwrap()); } else { file_path.push(CLI_DEFAULT_PROFILE); } file_path.set_extension(CONFIG_FILE_EXTENTION); Ok(file_path) } #[cfg(test)] pub mod test { use super::*; use std::path::PathBuf; #[test] fn test_default_profile_ok() { let mut profile_path = PathBuf::new(); profile_path.push("./test-data/profiles/default.toml"); let result = ProfileFile::from_file(profile_path); assert!(result.is_ok()); let expected = ProfileFile { version: "1.0".to_owned(), sc: Some(TargetAddr { host: "127.0.0.1".to_owned(), port: 9033, }), spu: Some(TargetAddr { host: "127.0.0.1".to_owned(), port: 9034, }), kf: Some(TargetAddr { host: "127.0.0.1".to_owned(), port: 9093, }), }; assert_eq!(result.unwrap(), expected); } #[test] fn test_default_profile_not_found() { let mut profile_path = PathBuf::new(); profile_path.push("./test-data/profiles/notfound.toml"); let result = ProfileFile::from_file(profile_path); assert!(result.is_err()); assert_eq!( format!("{}", result.unwrap_err()), "No such file or directory (os error 2)" ); } #[test] fn test_invalid_profile_file() { let mut profile_path = PathBuf::new(); profile_path.push("./test-data/profiles/invalid.toml"); let result = ProfileFile::from_file(profile_path); assert!(result.is_err()); assert!( format!("{}", result.unwrap_err()).contains( "missing field `port` for key `sc`") ); } #[test] fn test_build_default_profile_file_path() { let file_path = build_cli_profile_file_path(None); assert_eq!(file_path.is_ok(), true); let mut expected_file_path = PathBuf::new(); expected_file_path.push(home_dir().unwrap()); expected_file_path.push(".fluvio/profiles/default.toml"); assert_eq!(file_path.unwrap(), expected_file_path); } #[test] fn test_build_custom_cli_profile_file_path() { let file_path = build_cli_profile_file_path(Some(&"custom".to_owned())); assert_eq!(file_path.is_ok(), true); let mut expected_file_path = PathBuf::new(); expected_file_path.push(home_dir().unwrap()); expected_file_path.push(".fluvio/profiles/custom.toml"); assert_eq!(file_path.unwrap(), expected_file_path); } }
use std::env; use std::fs::read_to_string; use std::io::Error as IoError; use std::io::ErrorKind; use std::path::{Path, PathBuf}; use dirs::home_dir; use serde::Deserialize; use types::defaults::{CLI_CONFIG_PATH, CLI_DEFAULT_PROFILE, CLI_PROFILES_DIR}; use types::defaults::{CONFIG_FILE_EXTENTION, FLV_FLUVIO_HOME}; use types::socket_helpers::ServerAddress; use super::ProfileConfig; #[derive(Debug, PartialEq, Deserialize)] pub struct ProfileFile { pub version: String, sc: Option<TargetAddr>, spu: Option<TargetAddr>, kf: Option<TargetAddr>, } #[derive(Debug, PartialEq, Deserialize)] struct TargetAddr { pub host: String, pub port: u16, } impl Into<ServerAddress> for TargetAddr { fn into(self) -> ServerAddress { ServerAddress::new(self.host,self.port) } } impl ProfileFile { pub fn from_file<T: AsRef<Path>>(path: T) -> Result<Self, IoError> { let file_str: String = read_to_string(path)?; toml::from_str(&file_str) .map_err(|err| IoError::new(ErrorKind::InvalidData, format!("{}", err))) } } impl From<ProfileFile> for ProfileConfig { fn from(file: ProfileFile) -> ProfileConfig { Self { sc_addr: file.sc.map(|addr| addr.into()), spu_addr: file.spu.map(|addr| addr.into()), kf_addr: file.kf.map( |addr| addr.into()) } } } pub fn build_cli_profile_file_path(profile_name: Option<&String>) -> Result<PathBuf, IoError> { let base_path = match env::var(FLV_FLUVIO_HOME) { Ok(val) => { let mut user_dir = PathBuf::new(); user_dir.push(val); user_dir } Err(_) =
expected_file_path.push(home_dir().unwrap()); expected_file_path.push(".fluvio/profiles/custom.toml"); assert_eq!(file_path.unwrap(), expected_file_path); } }
> { if let Some(mut home_dir) = home_dir() { home_dir.push(CLI_CONFIG_PATH); home_dir } else { return Err(IoError::new( ErrorKind::InvalidInput, "can't get home directory", )); } } }; let mut file_path = base_path.join(CLI_PROFILES_DIR); if profile_name.is_some() { file_path.push(profile_name.unwrap()); } else { file_path.push(CLI_DEFAULT_PROFILE); } file_path.set_extension(CONFIG_FILE_EXTENTION); Ok(file_path) } #[cfg(test)] pub mod test { use super::*; use std::path::PathBuf; #[test] fn test_default_profile_ok() { let mut profile_path = PathBuf::new(); profile_path.push("./test-data/profiles/default.toml"); let result = ProfileFile::from_file(profile_path); assert!(result.is_ok()); let expected = ProfileFile { version: "1.0".to_owned(), sc: Some(TargetAddr { host: "127.0.0.1".to_owned(), port: 9033, }), spu: Some(TargetAddr { host: "127.0.0.1".to_owned(), port: 9034, }), kf: Some(TargetAddr { host: "127.0.0.1".to_owned(), port: 9093, }), }; assert_eq!(result.unwrap(), expected); } #[test] fn test_default_profile_not_found() { let mut profile_path = PathBuf::new(); profile_path.push("./test-data/profiles/notfound.toml"); let result = ProfileFile::from_file(profile_path); assert!(result.is_err()); assert_eq!( format!("{}", result.unwrap_err()), "No such file or directory (os error 2)" ); } #[test] fn test_invalid_profile_file() { let mut profile_path = PathBuf::new(); profile_path.push("./test-data/profiles/invalid.toml"); let result = ProfileFile::from_file(profile_path); assert!(result.is_err()); assert!( format!("{}", result.unwrap_err()).contains( "missing field `port` for key `sc`") ); } #[test] fn test_build_default_profile_file_path() { let file_path = build_cli_profile_file_path(None); assert_eq!(file_path.is_ok(), true); let mut expected_file_path = PathBuf::new(); expected_file_path.push(home_dir().unwrap()); expected_file_path.push(".fluvio/profiles/default.toml"); assert_eq!(file_path.unwrap(), expected_file_path); } #[test] fn test_build_custom_cli_profile_file_path() { let file_path = build_cli_profile_file_path(Some(&"custom".to_owned())); assert_eq!(file_path.is_ok(), true); let mut expected_file_path = PathBuf::new();
random
[ { "content": "// converts a host/port to SocketAddress\n\npub fn host_port_to_socket_addr(host: &str, port: u16) -> Result<SocketAddr, IoError> {\n\n let addr_string = format!(\"{}:{}\", host, port);\n\n string_to_socket_addr(&addr_string)\n\n}\n\n\n", "file_path": "src/types/src/socket_helpers.rs", ...
Rust
src/lib/redis.rs
PavelZX/rust-actix-rest-api-boilerplate
3852a2dd8b941dfe18e5f990fedb72d2665c87fe
use mobc::Pool; use mobc::async_trait; use mobc::Manager; use redis::aio::Connection; use redis::Client; use std::time::Duration; use super::error; pub struct RedisConnectionManager { client: Client, } impl RedisConnectionManager { pub fn new(c: Client) -> Self { Self { client: c } } } #[async_trait] impl Manager for RedisConnectionManager { type Connection = Connection; type Error = redis::RedisError; async fn connect(&self) -> Result<Self::Connection, Self::Error> { let c = self.client.get_async_connection().await?; Ok(c) } async fn check(&self, mut conn: Self::Connection) -> Result<Self::Connection, Self::Error> { redis::cmd("PING").query_async(&mut conn).await?; Ok(conn) } } pub async fn conn(settings: &config::Config) -> Pool<RedisConnectionManager> { let host = settings.get::<String>("redis.host").unwrap(); let port = settings.get::<String>("redis.port").unwrap(); let password = settings.get::<String>("redis.password").unwrap(); let db = settings.get::<String>("redis.db").unwrap(); let pool_get_timeout_seconds = settings.get::<u64>("redis.pool_get_timeout_seconds").unwrap(); let pool_max_open = settings.get::<u64>("redis.pool_max_open").unwrap(); let pool_max_idle = settings.get::<u64>("redis.pool_max_idle").unwrap(); let pool_max_lifetime_seconds = settings.get::<u64>("redis.pool_max_lifetime_seconds").unwrap(); let client = redis::Client::open(&format!("redis://:{}@{}:{}/{}", password, host, port, db)[..]).unwrap(); let manager = RedisConnectionManager::new(client); let pool = Pool::builder() .get_timeout(Some(Duration::from_secs(pool_get_timeout_seconds))) .max_open(pool_max_open) .max_idle(pool_max_idle) .max_lifetime(Some(Duration::from_secs(pool_max_lifetime_seconds))) .build(manager); pool } pub async fn expire(key: String, value: i64, pool: &mobc::Pool<RedisConnectionManager>, log: &slog::Logger) -> Result<(), error::Error> { let mut con = pool.get().await.unwrap(); let result = redis::cmd("EXPIRE").arg(key).arg(value).query_async::<_, i16>(&mut con as &mut redis::aio::Connection).await; if let Err(err) = result { error!(log, "{}", err); return Err(error::err500()); } Ok(()) } pub async fn get_expire(key: String, pool: &mobc::Pool<RedisConnectionManager>, log: &slog::Logger) -> Result<i64, error::Error> { let mut con = pool.get().await.unwrap(); let result = redis::cmd("TTL").arg(key).query_async::<_, i64>(&mut con as &mut redis::aio::Connection).await; match result { Ok(v) => Ok(v), Err(e) => { error!(log, "{}", e); Err(error::err500()) } } } pub async fn del(key: String, pool: &mobc::Pool<RedisConnectionManager>, log: &slog::Logger) -> Result<(), error::Error> { let mut con = pool.get().await.unwrap(); let result = redis::cmd("DEL").arg(key).query_async::<_, i32>(&mut con as &mut redis::aio::Connection).await; if let Err(err) = result { error!(log, "{}", err); return Err(error::err500()); } Ok(()) } pub async fn has_key(key: String, pool: &mobc::Pool<RedisConnectionManager>, log: &slog::Logger) -> Result<bool, error::Error> { let mut con = pool.get().await.unwrap(); let result = redis::cmd("EXISTS").arg(key).query_async::<_, i32>(&mut con as &mut redis::aio::Connection).await; match result { Ok(v) => { if v > 0 { Ok(true) } else { Ok(false) } }, Err(e) => { error!(log, "{}", e); Err(error::err500()) } } } pub async fn set<T: redis::ToRedisArgs>(key: String, value: T, pool: &mobc::Pool<RedisConnectionManager>, log: &slog::Logger) -> Result<(), error::Error> { let mut con = pool.get().await.unwrap(); let result = redis::cmd("SET").arg(key).arg(value).query_async::<_, String>(&mut con as &mut redis::aio::Connection).await; if let Err(err) = result { error!(log, "{}", err); return Err(error::err500()); } Ok(()) } pub async fn set_with_expire<T: redis::ToRedisArgs>(key: String, value: T, time: i64, pool: &mobc::Pool<RedisConnectionManager>, log: &slog::Logger) -> Result<(), error::Error> { let mut con = pool.get().await.unwrap(); let result = redis::cmd("SET").arg(key).arg(value).arg("EX").arg(time).query_async::<_, String>(&mut con as &mut redis::aio::Connection).await; if let Err(err) = result { error!(log, "{}", err); return Err(error::err500()); } Ok(()) } pub async fn get<T: redis::FromRedisValue>(key: String, pool: &mobc::Pool<RedisConnectionManager>, log: &slog::Logger) -> Result<T, error::Error> { let mut con = pool.get().await.unwrap(); let result = redis::cmd("GET").arg(key).query_async::<_, T>(&mut con as &mut redis::aio::Connection).await; match result { Ok(v) => Ok(v), Err(e) => { error!(log, "{}", e); Err(error::err500()) } } } pub async fn hset<T: redis::ToRedisArgs>(key: String, item: String, value: T, pool: &mobc::Pool<RedisConnectionManager>, log: &slog::Logger) -> Result<(), error::Error> { let mut con = pool.get().await.unwrap(); let result = redis::cmd("HSET").arg(key).arg(item).arg(value).query_async::<_, i32>(&mut con as &mut redis::aio::Connection).await; if let Err(err) = result { error!(log, "{}", err); return Err(error::err500()); } Ok(()) } pub async fn hset_with_expire<T: redis::ToRedisArgs>(key: String, item: String, value: T, time: i64, pool: &mobc::Pool<RedisConnectionManager>, log: &slog::Logger) -> Result<(), error::Error> { let mut con = pool.get().await.unwrap(); let result = redis::cmd("HSET").arg(&key).arg(item).arg(value).query_async::<_, i32>(&mut con as &mut redis::aio::Connection).await; if let Err(err) = result { error!(log, "{}", err); return Err(error::err500()); } if time > 0 { expire(key, time, &pool, &log).await?; } Ok(()) } pub async fn hget<T: redis::FromRedisValue>(key: String, item: String, pool: &mobc::Pool<RedisConnectionManager>, log: &slog::Logger) -> Result<T, error::Error> { let mut con = pool.get().await.unwrap(); let result = redis::cmd("HGET").arg(key).arg(item).query_async::<_, T>(&mut con as &mut redis::aio::Connection).await; match result { Ok(v) => Ok(v), Err(e) => { error!(log, "{}", e); Err(error::err500()) } } } pub async fn hhas_key(key: String, item: String, pool: &mobc::Pool<RedisConnectionManager>, log: &slog::Logger) -> Result<bool, error::Error> { let mut con = pool.get().await.unwrap(); let result = redis::cmd("HEXISTS").arg(key).arg(item).query_async::<_, i32>(&mut con as &mut redis::aio::Connection).await; match result { Ok(v) => { if v > 0 { Ok(true) } else { Ok(false) } }, Err(e) => { error!(log, "{}", e); Err(error::err500()) } } } pub async fn hdel(key: String, item: String, pool: &mobc::Pool<RedisConnectionManager>, log: &slog::Logger) -> Result<(), error::Error> { let mut con = pool.get().await.unwrap(); let result = redis::cmd("HDEL").arg(key).arg(item).query_async::<_, i32>(&mut con as &mut redis::aio::Connection).await; if let Err(err) = result { error!(log, "{}", err); return Err(error::err500()); } Ok(()) }
use mobc::Pool; use mobc::async_trait; use mobc::Manager; use redis::aio::Connection; use redis::Client; use std::time::Duration; use super::error; pub struct RedisConnectionManager { client: Client, } impl RedisConnectionManager { pub fn new(c: Client) -> Self { Self { client: c } } } #[async_trait] impl Manager for RedisConnectionManager { type Connection = Connection; type Error = redis::RedisError; async fn connect(&self) -> Result<Self::Connection, Self::Error> { let c = self.client.get_async_connection().await?; Ok(c) } async fn check(&self, mut conn: Self::Connection) -> Result<Self::Connection, Self::Error> { redis::cmd("PING").query_async(&mut conn).await?; Ok(conn) } } pub async fn conn(settings: &config::Config) -> Pool<RedisConnectionManager> { let host = settings.get::<String>("redis.host").unwrap(); let port = settings.get::<String>("redis.port").unwrap(); let password = settings.get::<String>("redis.password").unwrap(); let db = settings.get::<String>("redis.db").unwrap(); let pool_get_timeout_seconds = settings.get::<u64>("redis.pool_get_timeout_seconds").unwrap(); let pool_max_open = settings.get::<u64>("redis.pool_max_open").unwrap(); let pool_max_idle = settings.get::<u64>("redis.pool_max_idle").unwrap(); let pool_max_lifetime_seconds = settings.get::<u64>("redis.pool_max_lifetime_seconds").unwrap(); let client = redis::Client::open(&format!("redis://:{}@{}:{}/{}", password, host, port, db)[..]).unwrap(); let manager = RedisConnectionManager::new(client); let pool = Pool::builder() .get_timeout(Some(Duration::from_secs(pool_get_timeout_seconds))) .max_open(pool_max_open) .max_idle(pool_max_idle) .max_lifetime(Some(Duration::from_secs(pool_max_lifetime_seconds))) .build(manager); pool } pub async fn expire(key: String, value: i64, pool: &mobc::Pool<RedisConnectionManager>, log: &slog::Logger) -> Result<(), error::Error> { let mut con = pool.get().await.unwrap(); let result = redis::cmd("EXPIRE").arg(key).arg(value).query_async::<_, i16>(&mut con as &mut redis::aio::Connection).await; if let Err(err) = result { error!(log, "{}", err); return Err(error::err500()); } Ok(()) } pub async fn get_expire(key: String, pool: &mobc::Pool<RedisConnectionManager>, log: &slog::Logger) -> Result<i64, error::Error> { let mut con = pool.get().await.unwrap(); let result = redis::cmd("TTL").arg(key).query_async::<_, i64>(&mut con as &mut redis::aio::Connection).await; match result { Ok(v) => Ok(v), Err(e) => { error!(log, "{}", e); Err(error::err500()) } } } pub async fn del(key: String, pool: &mobc::Pool<RedisConnectionManager>, log: &slog::Logger) -> Result<(), error::Error> { let mut con = pool.get().await.unwrap(); let result = redis::cmd("DEL").arg(key).query_async::<_, i32>(&mut con as &mut redis::aio::Connection).await; if let Err(err) = result { error!(log, "{}", err); return Err(error::err500()); } Ok(()) } pub async fn has_key(key: String, pool: &mobc::Pool<RedisConnectionManager>, log: &slog::Logger) -> Result<bool, error::Error> { let mut con = pool.get().await.unwrap(); let result = redis::cmd("EXISTS").arg(key).query_async::<_, i32>(&mut con as &mut redis::aio::Connection).await; match result { Ok(v) => { if v > 0 { Ok(true) } else { Ok(false) } }, Err(e) => { error!(log, "{}", e); Err(error::err500()) } } } pub async fn set<T: redis::ToRedisArgs>(key: String, value: T, pool: &mobc::Pool<RedisConnectionManager>, log: &slog::Logger) -> Result<(), error::Error> { let mut con = pool.get().await.unwrap(); let result = redis::cmd("SET").arg(key).arg(value).query_async::<_, String>(&mut con as &mut redis::aio::Connection).await; if let Err(err) = result { error!(log, "{}", err); return Err(error::err500()); } Ok(()) } pub async fn set_with_expire<T: redis::ToRedisArgs>(key: String, value: T, time: i64, pool: &mobc::Pool<RedisConnectionManager>, log: &slog::Logger) -> Result<(), error::Error> { let mut con = pool.get().await.unwrap(); let result = redis::cmd("SET").arg(key).arg(value).arg("EX").arg(time).query_async::<_, String>(&mut con as &mut redis::aio::Connection).await; if let Err(err) = result { error!(log, "{}", err); return Err(error::err500()); } Ok(()) } pub async fn get<T: redis::FromRedisValue>(key: String, pool: &mobc::Pool<RedisConnectionManager>, log: &slog::Logger) -> Result<T, error::Error> { let mut con = pool.get().await.unwrap(); let result = redis::cmd("GET").arg(key).query_async::<_, T>(&mut con as &mut redis::aio::Connection).await;
} pub async fn hset<T: redis::ToRedisArgs>(key: String, item: String, value: T, pool: &mobc::Pool<RedisConnectionManager>, log: &slog::Logger) -> Result<(), error::Error> { let mut con = pool.get().await.unwrap(); let result = redis::cmd("HSET").arg(key).arg(item).arg(value).query_async::<_, i32>(&mut con as &mut redis::aio::Connection).await; if let Err(err) = result { error!(log, "{}", err); return Err(error::err500()); } Ok(()) } pub async fn hset_with_expire<T: redis::ToRedisArgs>(key: String, item: String, value: T, time: i64, pool: &mobc::Pool<RedisConnectionManager>, log: &slog::Logger) -> Result<(), error::Error> { let mut con = pool.get().await.unwrap(); let result = redis::cmd("HSET").arg(&key).arg(item).arg(value).query_async::<_, i32>(&mut con as &mut redis::aio::Connection).await; if let Err(err) = result { error!(log, "{}", err); return Err(error::err500()); } if time > 0 { expire(key, time, &pool, &log).await?; } Ok(()) } pub async fn hget<T: redis::FromRedisValue>(key: String, item: String, pool: &mobc::Pool<RedisConnectionManager>, log: &slog::Logger) -> Result<T, error::Error> { let mut con = pool.get().await.unwrap(); let result = redis::cmd("HGET").arg(key).arg(item).query_async::<_, T>(&mut con as &mut redis::aio::Connection).await; match result { Ok(v) => Ok(v), Err(e) => { error!(log, "{}", e); Err(error::err500()) } } } pub async fn hhas_key(key: String, item: String, pool: &mobc::Pool<RedisConnectionManager>, log: &slog::Logger) -> Result<bool, error::Error> { let mut con = pool.get().await.unwrap(); let result = redis::cmd("HEXISTS").arg(key).arg(item).query_async::<_, i32>(&mut con as &mut redis::aio::Connection).await; match result { Ok(v) => { if v > 0 { Ok(true) } else { Ok(false) } }, Err(e) => { error!(log, "{}", e); Err(error::err500()) } } } pub async fn hdel(key: String, item: String, pool: &mobc::Pool<RedisConnectionManager>, log: &slog::Logger) -> Result<(), error::Error> { let mut con = pool.get().await.unwrap(); let result = redis::cmd("HDEL").arg(key).arg(item).query_async::<_, i32>(&mut con as &mut redis::aio::Connection).await; if let Err(err) = result { error!(log, "{}", err); return Err(error::err500()); } Ok(()) }
match result { Ok(v) => Ok(v), Err(e) => { error!(log, "{}", e); Err(error::err500()) } }
if_condition
[ { "content": "pub fn required_str(v: &Option<String>, name: &str) -> Result<String, error::Error> {\n\n not_none(v.as_ref(), name)?;\n\n\n\n let v = v.as_ref().unwrap().to_string();\n\n if v.chars().count() == 0 {\n\n return Err(error::new(400002, &format!(\"{} can not be empty\", name)[..], 422...
Rust
src/osu/versions/no_sliders_no_leniency/mod.rs
RealistikDash/akat-rust-pp
90653f6da82ff981da250a55427dd40aa6ea5b2b
#![cfg(feature = "no_sliders_no_leniency")] use super::super::DifficultyAttributes; mod difficulty_object; mod osu_object; mod skill; mod skill_kind; mod slider_state; use difficulty_object::DifficultyObject; use osu_object::OsuObject; use skill::Skill; use skill_kind::SkillKind; use slider_state::SliderState; use crate::{parse::HitObjectKind, Beatmap, Mods, StarResult, Strains}; const OBJECT_RADIUS: f32 = 64.0; const SECTION_LEN: f32 = 400.0; const DIFFICULTY_MULTIPLIER: f32 = 0.0675; const NORMALIZED_RADIUS: f32 = 52.0; pub fn stars(map: &Beatmap, mods: impl Mods, passed_objects: Option<usize>) -> StarResult { let take = passed_objects.unwrap_or_else(|| map.hit_objects.len()); let attributes = map.attributes().mods(mods); let hitwindow = super::difficulty_range(attributes.od).floor() / attributes.clock_rate; let od = (80.0 - hitwindow) / 6.0; if take < 2 { return StarResult::Osu(DifficultyAttributes { ar: attributes.ar, od, ..Default::default() }); } let radius = OBJECT_RADIUS * (1.0 - 0.7 * (attributes.cs - 5.0) / 5.0) / 2.0; let mut scaling_factor = NORMALIZED_RADIUS / radius; if radius < 30.0 { let small_circle_bonus = (30.0 - radius).min(5.0) / 50.0; scaling_factor *= 1.0 + small_circle_bonus; } let clock_rate = attributes.clock_rate; let mut max_combo = 0; let mut state = SliderState::new(&map); let mut hit_objects = map .hit_objects .iter() .take(take) .filter_map(|h| match &h.kind { HitObjectKind::Circle => { max_combo += 1; Some(OsuObject::new(h.pos, h.start_time, false, clock_rate)) } HitObjectKind::Slider { pixel_len, repeats, .. } => { max_combo += state.count_ticks(h.start_time, *pixel_len, *repeats, &map); Some(OsuObject::new(h.pos, h.start_time, false, clock_rate)) } HitObjectKind::Spinner { .. } => { max_combo += 1; Some(OsuObject::new(h.pos, h.start_time, true, clock_rate)) } HitObjectKind::Hold { .. } => None, }); let mut aim = Skill::new(SkillKind::Aim); let mut speed = Skill::new(SkillKind::Speed); let mut prev_prev = None; let mut prev = hit_objects.next().unwrap(); let mut prev_vals = None; let mut current_section_end = (prev.time / SECTION_LEN).ceil() * SECTION_LEN; let curr = hit_objects.next().unwrap(); let h = DifficultyObject::new(&curr, &prev, prev_vals, prev_prev, scaling_factor); while h.base.time > current_section_end { current_section_end += SECTION_LEN; } aim.process(&h); speed.process(&h); prev_prev = Some(prev); prev_vals = Some((h.jump_dist, h.strain_time)); prev = curr; for curr in hit_objects { let h = DifficultyObject::new(&curr, &prev, prev_vals, prev_prev, scaling_factor); while h.base.time > current_section_end { aim.save_current_peak(); aim.start_new_section_from(current_section_end); speed.save_current_peak(); speed.start_new_section_from(current_section_end); current_section_end += SECTION_LEN; } aim.process(&h); speed.process(&h); prev_prev = Some(prev); prev_vals = Some((h.jump_dist, h.strain_time)); prev = curr; } aim.save_current_peak(); speed.save_current_peak(); let aim_strain = aim.difficulty_value().sqrt() * DIFFICULTY_MULTIPLIER; let speed_strain = speed.difficulty_value().sqrt() * DIFFICULTY_MULTIPLIER; let stars = aim_strain + speed_strain + (aim_strain - speed_strain).abs() / 2.0; StarResult::Osu(DifficultyAttributes { stars, ar: attributes.ar, od, speed_strain, aim_strain, max_combo, n_circles: map.n_circles as usize, n_spinners: map.n_spinners as usize, }) } pub fn strains(map: &Beatmap, mods: impl Mods) -> Strains { let attributes = map.attributes().mods(mods); if map.hit_objects.len() < 2 { return Strains::default(); } let radius = OBJECT_RADIUS * (1.0 - 0.7 * (attributes.cs - 5.0) / 5.0) / 2.0; let mut scaling_factor = NORMALIZED_RADIUS / radius; if radius < 30.0 { let small_circle_bonus = (30.0 - radius).min(5.0) / 50.0; scaling_factor *= 1.0 + small_circle_bonus; } let clock_rate = attributes.clock_rate; let mut hit_objects = map.hit_objects.iter().filter_map(|h| match &h.kind { HitObjectKind::Circle | HitObjectKind::Slider { .. } => { Some(OsuObject::new(h.pos, h.start_time, false, clock_rate)) } HitObjectKind::Spinner { .. } => { Some(OsuObject::new(h.pos, h.start_time, true, clock_rate)) } HitObjectKind::Hold { .. } => None, }); let mut aim = Skill::new(SkillKind::Aim); let mut speed = Skill::new(SkillKind::Speed); let mut prev_prev = None; let mut prev = hit_objects.next().unwrap(); let mut prev_vals = None; let mut current_section_end = (prev.time / SECTION_LEN).ceil() * SECTION_LEN; let curr = hit_objects.next().unwrap(); let h = DifficultyObject::new(&curr, &prev, prev_vals, prev_prev, scaling_factor); while h.base.time > current_section_end { current_section_end += SECTION_LEN; } aim.process(&h); speed.process(&h); prev_prev = Some(prev); prev_vals = Some((h.jump_dist, h.strain_time)); prev = curr; for curr in hit_objects { let h = DifficultyObject::new(&curr, &prev, prev_vals, prev_prev, scaling_factor); while h.base.time > current_section_end { aim.save_current_peak(); aim.start_new_section_from(current_section_end); speed.save_current_peak(); speed.start_new_section_from(current_section_end); current_section_end += SECTION_LEN; } aim.process(&h); speed.process(&h); prev_prev = Some(prev); prev_vals = Some((h.jump_dist, h.strain_time)); prev = curr; } aim.save_current_peak(); speed.save_current_peak(); let strains = aim .strain_peaks .into_iter() .zip(speed.strain_peaks.into_iter()) .map(|(aim, speed)| aim + speed) .collect(); Strains { section_length: SECTION_LEN, strains, } }
#![cfg(feature = "no_sliders_no_leniency")] use super::super::DifficultyAttributes; mod difficulty_object; mod osu_object; mod skill; mod skill_kind; mod slider_state; use difficulty_object::DifficultyObject; use osu_object::OsuObject; use skill::Skill; use skill_kind::SkillKind; use slider_state::SliderState; use crate::{parse::HitObjectKind, Beatmap, Mods, StarResult, Strains}; const OBJECT_RADIUS: f32 = 64.0; const SECTION_LEN: f32 = 400.0; const DIFFICULTY_MULTIPLIER: f32 = 0.0675; const NORMALIZED_RADIUS: f32 = 52.0; pub fn stars(map: &Beatmap, mods: impl Mods, passed_objects: Option<usize>) -> StarResult { let take = passed_objects.unwrap_or_else(|| map.hit_objects.len()); let attributes = map.attributes().mods(mods); let hitwindow = super::difficulty_range(attributes.od).floor() / attributes.clock_rate; let od = (80.0 - hitwindow) / 6.0; if take < 2 { return StarResult::Osu(DifficultyAttributes { ar: attributes.ar, od, ..Default::default() }); } let radius = OBJECT_RADIUS * (1.0 - 0.7 * (attributes.cs - 5.0) / 5.0) / 2.0; let mut scaling_factor = NORMALIZED_RADIUS / radius; if radius < 30.0 { let small_circle_bonus = (30.0 - radius).min(5.0) / 50.0; scaling_factor *= 1.0 + small_circle_bonus; } let clock_rate = attributes.clock_rate; let mut max_combo = 0; let mut state = SliderState::new(&map); let mut hit_objects = map .hit_objects .iter() .take(take) .filter_map(|h| match &h.kind { HitObjectKind::Circle => { max_combo += 1; Some(OsuObject::new(h.pos, h.start_time, false, clock_rate)) } HitObjectKind::Slider { pixel_len, repeats, .. } => { max_combo += state.count_ticks(h.start_time, *pixel_len, *repeats, &map); Some(OsuObject::new(h.pos, h.start_time, false, clock_rate)) } HitObjectKind::Spinner { .. } => { max_combo += 1; Some(OsuObject::new(h.pos, h.start_time, true, clock_rate)) } HitObjectKind::Hold { .. } => None, }); let mut aim = Skill::new(SkillKind::Aim); let mut speed = Skill::new(SkillKind::Speed); let mut prev_prev = None; let mut prev = hit_objects.next().unwrap(); let mut prev_vals = None; let mut current_section_end = (prev.time / SECTION_LEN).ceil() * SECTION_LEN; let curr = hit_objects.next().unwrap(); let h = DifficultyObject::new(&curr, &prev, prev_vals, prev_prev, scaling_factor); while h.base.time > current_section_end { current_section_end += SECTION_LEN; } aim.process(&h); speed.process(&h); prev_prev = Some(prev); prev_vals = Some((h.jump_dist, h.strain_time)); prev = curr; for curr in hit_objects { let h = DifficultyObject::new(&curr, &prev, prev_vals, prev_prev, scaling_factor); while h.base.time > current_section_end { aim.save_current_peak(); aim.start_new_section_from(current_section_end); speed.save_current_peak(); speed.start_new_section_from(current_section_end); current_section_end += SECTION_LEN; } aim.process(&h); speed.process(&h); prev_prev = Some(prev); prev_vals = Some((h.jump_dist, h.strain_time)); prev = curr; } aim.save_current_peak(); speed.save_current_peak(); let aim_strain = aim.difficulty_value().sqrt() * DIFFICULTY_MULTIPLIER; let speed_strain = speed.difficulty_value().sqrt() * DIFFICULTY_MULTIPLIER; let stars = aim_strain + speed_strain + (aim_strain - speed_strain).abs() / 2.0; StarResult::Osu(DifficultyAttributes { stars, ar: attributes.ar, od, speed_strain, aim_strain, max_combo, n_circles: map.n_circles as usize, n_spinners: map.n_spinners as usize, }) } pub fn strains(map: &Beatmap, mods: impl Mods) -> Strains { let attributes = map.attributes().mods(mods);
if map.hit_objects.len() < 2 { return Strains::default(); } let radius = OBJECT_RADIUS * (1.0 - 0.7 * (attributes.cs - 5.0) / 5.0) / 2.0; let mut scaling_factor = NORMALIZED_RADIUS / radius; if radius < 30.0 { let small_circle_bonus = (30.0 - radius).min(5.0) / 50.0; scaling_factor *= 1.0 + small_circle_bonus; } let clock_rate = attributes.clock_rate; let mut hit_objects = map.hit_objects.iter().filter_map(|h| match &h.kind { HitObjectKind::Circle | HitObjectKind::Slider { .. } => { Some(OsuObject::new(h.pos, h.start_time, false, clock_rate)) } HitObjectKind::Spinner { .. } => { Some(OsuObject::new(h.pos, h.start_time, true, clock_rate)) } HitObjectKind::Hold { .. } => None, }); let mut aim = Skill::new(SkillKind::Aim); let mut speed = Skill::new(SkillKind::Speed); let mut prev_prev = None; let mut prev = hit_objects.next().unwrap(); let mut prev_vals = None; let mut current_section_end = (prev.time / SECTION_LEN).ceil() * SECTION_LEN; let curr = hit_objects.next().unwrap(); let h = DifficultyObject::new(&curr, &prev, prev_vals, prev_prev, scaling_factor); while h.base.time > current_section_end { current_section_end += SECTION_LEN; } aim.process(&h); speed.process(&h); prev_prev = Some(prev); prev_vals = Some((h.jump_dist, h.strain_time)); prev = curr; for curr in hit_objects { let h = DifficultyObject::new(&curr, &prev, prev_vals, prev_prev, scaling_factor); while h.base.time > current_section_end { aim.save_current_peak(); aim.start_new_section_from(current_section_end); speed.save_current_peak(); speed.start_new_section_from(current_section_end); current_section_end += SECTION_LEN; } aim.process(&h); speed.process(&h); prev_prev = Some(prev); prev_vals = Some((h.jump_dist, h.strain_time)); prev = curr; } aim.save_current_peak(); speed.save_current_peak(); let strains = aim .strain_peaks .into_iter() .zip(speed.strain_peaks.into_iter()) .map(|(aim, speed)| aim + speed) .collect(); Strains { section_length: SECTION_LEN, strains, } }
function_block-function_prefix_line
[]
Rust
src/instruction.rs
kevinheavey/solders
82171e0d34b913efed9f0eb4e5421bc99d3f000e
use pyo3::{basic::CompareOp, prelude::*, types::PyBytes}; use serde::{Deserialize, Serialize}; use solana_sdk::{ instruction::{ AccountMeta as AccountMetaOriginal, CompiledInstruction as CompiledInstructionOriginal, Instruction as InstructionOriginal, }, pubkey::Pubkey as PubkeyOriginal, }; use crate::{handle_py_err, pubkey::Pubkey, RichcmpEqualityOnly}; #[pyclass(module = "solders", subclass)] #[derive(PartialEq, Debug, Clone)] pub struct AccountMeta(AccountMetaOriginal); #[pymethods] impl AccountMeta { #[new] pub fn new(pubkey: &Pubkey, is_signer: bool, is_writable: bool) -> Self { let underlying_pubkey = pubkey.into(); let underlying = if is_writable { AccountMetaOriginal::new(underlying_pubkey, is_signer) } else { AccountMetaOriginal::new_readonly(underlying_pubkey, is_signer) }; underlying.into() } #[getter] pub fn pubkey(&self) -> Pubkey { self.0.pubkey.into() } #[getter] pub fn is_signer(&self) -> bool { self.0.is_signer } #[getter] pub fn is_writable(&self) -> bool { self.0.is_writable } pub fn __repr__(&self) -> String { format!("{:#?}", self) } pub fn __str__(&self) -> String { format!("{:?}", self) } pub fn __richcmp__(&self, other: &Self, op: CompareOp) -> PyResult<bool> { self.richcmp(other, op) } } impl RichcmpEqualityOnly for AccountMeta {} impl From<AccountMetaOriginal> for AccountMeta { fn from(am: AccountMetaOriginal) -> Self { Self(am) } } impl From<AccountMeta> for AccountMetaOriginal { fn from(am: AccountMeta) -> Self { am.0 } } #[pyclass(module = "solders", subclass)] #[derive(PartialEq, Debug, Clone, Serialize, Deserialize)] pub struct Instruction(pub InstructionOriginal); #[pymethods] impl Instruction { #[new] pub fn new(program_id: &Pubkey, data: &[u8], accounts: Vec<AccountMeta>) -> Self { let underlying_accounts: Vec<AccountMetaOriginal> = accounts.into_iter().map(|x| x.0).collect(); let underlying = InstructionOriginal::new_with_bytes(program_id.into(), data, underlying_accounts); underlying.into() } #[getter] pub fn program_id(&self) -> Pubkey { self.0.program_id.into() } #[getter] pub fn data<'a>(&self, py: Python<'a>) -> &'a PyBytes { PyBytes::new(py, &self.0.data) } #[getter] pub fn accounts(&self) -> Vec<AccountMeta> { self.0 .accounts .clone() .into_iter() .map(AccountMeta) .collect() } #[setter] pub fn set_accounts(&mut self, accounts: Vec<AccountMeta>) { self.0.accounts = accounts .into_iter() .map(AccountMetaOriginal::from) .collect(); } pub fn __repr__(&self) -> String { format!("{:#?}", self) } pub fn __str__(&self) -> String { format!("{:?}", self) } pub fn __richcmp__(&self, other: &Self, op: CompareOp) -> PyResult<bool> { self.richcmp(other, op) } pub fn serialize<'a>(&self, py: Python<'a>) -> &'a PyBytes { let ser = bincode::serialize(&self).unwrap(); PyBytes::new(py, &ser) } #[staticmethod] pub fn deserialize(data: &[u8]) -> PyResult<Self> { let deser = bincode::deserialize::<Self>(data); handle_py_err(deser) } } impl RichcmpEqualityOnly for Instruction {} impl From<InstructionOriginal> for Instruction { fn from(ix: InstructionOriginal) -> Self { Self(ix) } } impl From<Instruction> for InstructionOriginal { fn from(ix: Instruction) -> InstructionOriginal { ix.0 } } impl AsRef<InstructionOriginal> for Instruction { fn as_ref(&self) -> &InstructionOriginal { &self.0 } } #[pyclass(module = "solders", subclass)] #[derive(PartialEq, Eq, Debug, Clone, Serialize, Deserialize)] pub struct CompiledInstruction(CompiledInstructionOriginal); #[pymethods] impl CompiledInstruction { #[new] pub fn new(program_id_index: u8, data: &[u8], accounts: &[u8]) -> Self { CompiledInstructionOriginal::new_from_raw_parts( program_id_index, data.to_vec(), accounts.to_vec(), ) .into() } pub fn program_id(&self, program_ids: Vec<Pubkey>) -> Pubkey { let underlying_pubkeys: Vec<PubkeyOriginal> = program_ids.iter().map(PubkeyOriginal::from).collect(); let underlying = *self.0.program_id(&underlying_pubkeys); underlying.into() } #[getter] pub fn program_id_index(&self) -> u8 { self.0.program_id_index } #[getter] pub fn accounts<'a>(&self, py: Python<'a>) -> &'a PyBytes { PyBytes::new(py, &self.0.accounts) } #[setter] pub fn set_accounts(&mut self, accounts: Vec<u8>) { self.0.accounts = accounts } #[getter] pub fn data<'a>(&self, py: Python<'a>) -> &'a PyBytes { PyBytes::new(py, &self.0.data) } pub fn __repr__(&self) -> String { format!("{:#?}", self) } pub fn __str__(&self) -> String { format!("{:?}", self) } pub fn __richcmp__(&self, other: &Self, op: CompareOp) -> PyResult<bool> { self.richcmp(other, op) } pub fn serialize<'a>(&self, py: Python<'a>) -> &'a PyBytes { let ser = bincode::serialize(&self).unwrap(); PyBytes::new(py, &ser) } #[staticmethod] pub fn deserialize(data: &[u8]) -> PyResult<Self> { let deser = bincode::deserialize::<Self>(data); handle_py_err(deser) } } impl RichcmpEqualityOnly for CompiledInstruction {} impl From<CompiledInstructionOriginal> for CompiledInstruction { fn from(ix: CompiledInstructionOriginal) -> Self { Self(ix) } } impl From<CompiledInstruction> for CompiledInstructionOriginal { fn from(ix: CompiledInstruction) -> Self { ix.0 } } impl AsRef<CompiledInstructionOriginal> for CompiledInstruction { fn as_ref(&self) -> &CompiledInstructionOriginal { &self.0 } }
use pyo3::{basic::CompareOp, prelude::*, types::PyBytes}; use serde::{Deserialize, Serialize}; use solana_sdk::{ instruction::{ AccountMeta as AccountMetaOriginal, CompiledInstruction as CompiledInstructionOriginal, Instruction as InstructionOriginal, }, pubkey::Pubkey as PubkeyOriginal, }; use crate::{handle_py_err, pubkey::Pubkey, RichcmpEqualityOnly}; #[pyclass(module = "solders", subclass)] #[derive(PartialEq, Debug, Clone)] pub struct AccountMeta(AccountMetaOriginal); #[pymethods] impl AccountMeta { #[new] pub fn new(pubkey: &Pubkey, is_signer: bool, is_writable: bool) -> Self { let underlying_pubkey = pubkey.into(); let underlying = if is_writable { AccountMetaOriginal::new(underlying_pubkey, is_signer) } else { AccountMetaOriginal::new_readonly(underlying_pubkey, is_signer) }; underlying.into() } #[getter] pub fn pubkey(&self) -> Pubkey { self.0.pubkey.into() } #[getter] pub fn is_signer(&self) -> bool { self.0.is_signer } #[getter] pub fn is_writable(&self) -> bool { self.0.is_writable } pub fn __repr__(&self) -> String { format!("{:#?}", self) } pub fn __str__(&self) -> String { format!("{:?}", self) } pub fn __richcmp__(&self, other: &Self, op: CompareOp) -> PyResult<bool> { self.richcmp(other, op) } } impl RichcmpEqualityOnly for AccountMeta {} impl From<AccountMetaOriginal> for AccountMeta { fn from(am: AccountMetaOriginal) -> Self { Self(am) } } impl From<AccountMeta> for AccountMetaOriginal { fn from(am: AccountMeta) -> Self { am.0 } } #[pyclass(module = "solders", subclass)] #[derive(PartialEq, Debug, Clone, Serialize, Deserialize)] pub struct Instruction(pub InstructionOriginal); #[pymethods] impl Instruction { #[new] pub fn new(program_id: &Pubkey, data: &[u8], accounts: Vec<Accoun
#[getter] pub fn program_id(&self) -> Pubkey { self.0.program_id.into() } #[getter] pub fn data<'a>(&self, py: Python<'a>) -> &'a PyBytes { PyBytes::new(py, &self.0.data) } #[getter] pub fn accounts(&self) -> Vec<AccountMeta> { self.0 .accounts .clone() .into_iter() .map(AccountMeta) .collect() } #[setter] pub fn set_accounts(&mut self, accounts: Vec<AccountMeta>) { self.0.accounts = accounts .into_iter() .map(AccountMetaOriginal::from) .collect(); } pub fn __repr__(&self) -> String { format!("{:#?}", self) } pub fn __str__(&self) -> String { format!("{:?}", self) } pub fn __richcmp__(&self, other: &Self, op: CompareOp) -> PyResult<bool> { self.richcmp(other, op) } pub fn serialize<'a>(&self, py: Python<'a>) -> &'a PyBytes { let ser = bincode::serialize(&self).unwrap(); PyBytes::new(py, &ser) } #[staticmethod] pub fn deserialize(data: &[u8]) -> PyResult<Self> { let deser = bincode::deserialize::<Self>(data); handle_py_err(deser) } } impl RichcmpEqualityOnly for Instruction {} impl From<InstructionOriginal> for Instruction { fn from(ix: InstructionOriginal) -> Self { Self(ix) } } impl From<Instruction> for InstructionOriginal { fn from(ix: Instruction) -> InstructionOriginal { ix.0 } } impl AsRef<InstructionOriginal> for Instruction { fn as_ref(&self) -> &InstructionOriginal { &self.0 } } #[pyclass(module = "solders", subclass)] #[derive(PartialEq, Eq, Debug, Clone, Serialize, Deserialize)] pub struct CompiledInstruction(CompiledInstructionOriginal); #[pymethods] impl CompiledInstruction { #[new] pub fn new(program_id_index: u8, data: &[u8], accounts: &[u8]) -> Self { CompiledInstructionOriginal::new_from_raw_parts( program_id_index, data.to_vec(), accounts.to_vec(), ) .into() } pub fn program_id(&self, program_ids: Vec<Pubkey>) -> Pubkey { let underlying_pubkeys: Vec<PubkeyOriginal> = program_ids.iter().map(PubkeyOriginal::from).collect(); let underlying = *self.0.program_id(&underlying_pubkeys); underlying.into() } #[getter] pub fn program_id_index(&self) -> u8 { self.0.program_id_index } #[getter] pub fn accounts<'a>(&self, py: Python<'a>) -> &'a PyBytes { PyBytes::new(py, &self.0.accounts) } #[setter] pub fn set_accounts(&mut self, accounts: Vec<u8>) { self.0.accounts = accounts } #[getter] pub fn data<'a>(&self, py: Python<'a>) -> &'a PyBytes { PyBytes::new(py, &self.0.data) } pub fn __repr__(&self) -> String { format!("{:#?}", self) } pub fn __str__(&self) -> String { format!("{:?}", self) } pub fn __richcmp__(&self, other: &Self, op: CompareOp) -> PyResult<bool> { self.richcmp(other, op) } pub fn serialize<'a>(&self, py: Python<'a>) -> &'a PyBytes { let ser = bincode::serialize(&self).unwrap(); PyBytes::new(py, &ser) } #[staticmethod] pub fn deserialize(data: &[u8]) -> PyResult<Self> { let deser = bincode::deserialize::<Self>(data); handle_py_err(deser) } } impl RichcmpEqualityOnly for CompiledInstruction {} impl From<CompiledInstructionOriginal> for CompiledInstruction { fn from(ix: CompiledInstructionOriginal) -> Self { Self(ix) } } impl From<CompiledInstruction> for CompiledInstructionOriginal { fn from(ix: CompiledInstruction) -> Self { ix.0 } } impl AsRef<CompiledInstructionOriginal> for CompiledInstruction { fn as_ref(&self) -> &CompiledInstructionOriginal { &self.0 } }
tMeta>) -> Self { let underlying_accounts: Vec<AccountMetaOriginal> = accounts.into_iter().map(|x| x.0).collect(); let underlying = InstructionOriginal::new_with_bytes(program_id.into(), data, underlying_accounts); underlying.into() }
function_block-function_prefixed
[ { "content": "#[pyfunction]\n\npub fn transfer_many(from_pubkey: &Pubkey, to_lamports: Vec<(Pubkey, u64)>) -> Vec<Instruction> {\n\n let to_lamports_converted: Vec<(PubkeyOriginal, u64)> = to_lamports\n\n .into_iter()\n\n .map(|x| (PubkeyOriginal::from(x.0), x.1))\n\n .collect();\n\n ...
Rust
alvr/settings-schema-derive/src/lib.rs
SonicZY/ALVR
4beff45eec4af6f0683439948d0e1091ce7130c0
mod higher_order; mod ty; use darling::{ast::Fields, util::Flag, FromDeriveInput, FromField, FromMeta, FromVariant}; use proc_macro::TokenStream; use proc_macro2::TokenStream as TokenStream2; use quote::{quote, ToTokens}; use std::string::ToString; use syn::{DeriveInput, Error, Ident, Lit, Type, Visibility}; type TResult<T = TokenStream2> = Result<T, TokenStream>; fn error<T, TT: ToTokens>(message: &str, tokens: TT) -> TResult<T> { Err( Error::new_spanned(tokens, format!("[SettingsSchema] {}", message)) .to_compile_error() .into(), ) } fn suffix_ident(ty_ident: &Ident, suffix: &str) -> Ident { Ident::new( &format!("{}{}", ty_ident.to_string(), suffix), ty_ident.span(), ) } #[derive(FromField)] #[darling(attributes(schema))] struct FieldMeta { vis: Visibility, ident: Option<Ident>, ty: Type, #[darling(multiple)] #[darling(rename = "placeholder")] placeholders: Vec<String>, #[darling(multiple)] higher_order: Vec<higher_order::HigherOrderSetting>, #[darling(default)] advanced: Flag, #[darling(default)] switch_advanced: Flag, #[darling(default)] min: Option<Lit>, #[darling(default)] max: Option<Lit>, #[darling(default)] step: Option<Lit>, #[darling(default)] gui: Option<ty::NumericGuiType>, } #[derive(FromMeta)] enum ChoiceControlType { Dropdown, ButtonGroup, } #[derive(FromVariant)] #[darling(attributes(schema), supports(unit, newtype, named))] struct VariantMeta { ident: Ident, fields: darling::ast::Fields<FieldMeta>, } #[derive(FromDeriveInput)] #[darling(attributes(schema), supports(struct_named, enum_any))] struct DeriveInputMeta { data: darling::ast::Data<VariantMeta, FieldMeta>, #[darling(default)] gui: Option<ChoiceControlType>, } struct SchemaData { default_fields_ts: TokenStream2, schema_code_ts: TokenStream2, aux_objects_ts: Option<TokenStream2>, } fn named_fields_schema(meta: Vec<FieldMeta>) -> TResult<SchemaData> { let mut vis = vec![]; let mut idents = vec![]; let mut tys_ts = vec![]; let mut keys = vec![]; let mut entry_types_ts = vec![]; for meta in meta { for ph in &meta.placeholders { keys.push(ph.clone()); entry_types_ts.push(quote!(settings_schema::EntryType::Placeholder)) } for setting in &meta.higher_order { let higher_order::Entry { key, entry_type_ts } = higher_order::schema(setting)?; keys.push(key); entry_types_ts.push(entry_type_ts); } let ident = meta.ident.as_ref().unwrap().clone(); let advanced = meta.advanced.is_some(); let ty::SchemaData { default_ty_ts, schema_code_ts, } = ty::schema(&meta.ty, &meta)?; vis.push(meta.vis); idents.push(ident.clone()); tys_ts.push(default_ty_ts); keys.push(ident.to_string()); entry_types_ts.push(quote!( EntryType::Data(EntryData { advanced: #advanced, content: { let default = default.#ident; #schema_code_ts } }) )); } Ok(SchemaData { default_fields_ts: quote!(#(#vis #idents: #tys_ts,)*), schema_code_ts: quote!(SchemaNode::Section( vec![#((#keys.into(), #entry_types_ts)),*] )), aux_objects_ts: None, }) } fn variants_schema( gui_type: Option<ChoiceControlType>, vis: &Visibility, ident: &Ident, meta: Vec<VariantMeta>, ) -> TResult<SchemaData> { let mut variants = vec![]; let mut data_variants = vec![]; let mut data_tys_ts = vec![]; let mut keys = vec![]; let mut entry_data_ts = vec![]; let mut aux_variants_structs_ts = vec![]; let gui_ts = match gui_type { None => quote!(None), Some(ChoiceControlType::Dropdown) => { quote!(Some(ChoiceControlType::Dropdown)) } Some(ChoiceControlType::ButtonGroup) => { quote!(Some(ChoiceControlType::ButtonGroup)) } }; for meta in meta { let variant_ident = meta.ident; let snake_case_variant_ident = Ident::new(&variant_ident.to_string(), variant_ident.span()); variants.push(variant_ident.clone()); keys.push(variant_ident.to_string()); match meta.fields.style { darling::ast::Style::Tuple => { let field_meta = &meta.fields.fields[0]; if !field_meta.higher_order.is_empty() { error( "'higher_order' attributes not supported in this position", &variant_ident, )?; } if !field_meta.placeholders.is_empty() { error( "'placeholder' attributes not supported in this position", &variant_ident, )?; } let advanced = field_meta.advanced.is_some(); let ty::SchemaData { default_ty_ts, schema_code_ts, } = ty::schema(&field_meta.ty, &field_meta)?; data_variants.push(snake_case_variant_ident.clone()); data_tys_ts.push(default_ty_ts); entry_data_ts.push(quote!(Some(settings_schema::EntryData { advanced: #advanced, content: { let default = default.#snake_case_variant_ident; #schema_code_ts } }))); } darling::ast::Style::Struct => { let default_ty_ts = suffix_ident(&suffix_ident(ident, &variant_ident.to_string()), "Default") .to_token_stream(); let SchemaData { default_fields_ts, schema_code_ts, .. } = named_fields_schema(meta.fields.fields)?; data_variants.push(snake_case_variant_ident.clone()); data_tys_ts.push(default_ty_ts.clone()); entry_data_ts.push(quote!(Some(settings_schema::EntryData { advanced: false, content: { let default = default.#snake_case_variant_ident; #schema_code_ts } }))); aux_variants_structs_ts.push(quote! { #[derive( settings_schema::Serialize, settings_schema::Deserialize, Clone, PartialEq )] #vis struct #default_ty_ts { #default_fields_ts } }); } darling::ast::Style::Unit => { entry_data_ts.push(quote!(None)); } } } let default_variant_ty = suffix_ident(&ident, "DefaultVariant"); Ok(SchemaData { default_fields_ts: quote! { #(#vis #data_variants: #data_tys_ts,)* #vis variant: #default_variant_ty, }, schema_code_ts: quote!(SchemaNode::Choice(SchemaChoice { default: settings_schema::to_json_value(default.variant) .unwrap() .as_str() .unwrap() .into(), variants: vec![#((#keys.into(), #entry_data_ts)),*], gui: #gui_ts })), aux_objects_ts: Some(quote! { #(#aux_variants_structs_ts)* #[derive(settings_schema::Serialize, settings_schema::Deserialize, Clone, PartialEq)] #vis enum #default_variant_ty { #(#variants,)* } }), }) } fn schema(derive_input: DeriveInput) -> TResult { if !derive_input.generics.params.is_empty() { return error("Generics not supported", &derive_input.generics); } let meta: DeriveInputMeta = FromDeriveInput::from_derive_input(&derive_input).map_err(|e| e.write_errors())?; let gui_type = meta.gui; let vis = derive_input.vis; let derive_input_ident = derive_input.ident; let default_ty_ident = suffix_ident(&derive_input_ident, "Default"); let SchemaData { default_fields_ts, schema_code_ts, aux_objects_ts, } = match meta.data { darling::ast::Data::Enum(variants) => { variants_schema(gui_type, &vis, &derive_input_ident, variants)? } darling::ast::Data::Struct(Fields { fields, .. }) => named_fields_schema(fields)?, }; Ok(quote! { #aux_objects_ts #[allow(non_snake_case)] #[derive(settings_schema::Serialize, settings_schema::Deserialize, Clone, PartialEq)] #vis struct #default_ty_ident { #default_fields_ts } impl #derive_input_ident { #vis fn schema(default: #default_ty_ident) -> settings_schema::SchemaNode { use settings_schema::*; #schema_code_ts } } }) } #[proc_macro_derive(SettingsSchema, attributes(schema))] pub fn create_settings_schema_fn_and_default_ty(input: TokenStream) -> TokenStream { let input = syn::parse_macro_input!(input as DeriveInput); match schema(input) { Ok(tokens) => tokens.into(), Err(e) => e, } }
mod higher_order; mod ty; use darling::{ast::Fields, util::Flag, FromDeriveInput, FromField, FromMeta, FromVariant}; use proc_macro::TokenStream; use proc_macro2::TokenStream as TokenStream2; use quote::{quote, ToTokens}; use std::string::ToString; use syn::{DeriveInput, Error, Ident, Lit, Type, Visibility}; type TResult<T = TokenStream2> = Result<T, TokenStream>; fn error<T, TT: ToTokens>(message: &str, tokens: TT) -> TResult<T> { Err( Error::new_spanned(tokens, format!("[SettingsSchema] {}", message)) .to_compile_error() .into(), ) } fn suffix_ident(ty_ident: &Ident, suffix: &str) -> Ident { Ident::new( &format!("{}{}", ty_ident.to_string(), suffix), ty_ident.span(), ) } #[derive(FromField)] #[darling(attributes(schema))] struct FieldMeta { vis: Visibility, ident: Option<Ident>, ty: Type, #[darling(multiple)] #[darling(rename = "placeholder")] placeholders: Vec<String>, #[darling(multiple)] higher_order: Vec<higher_order::HigherOrderSetting>, #[darling(default)] advanced: Flag, #[darling(default)] switch_advanced: Flag, #[darling(default)] min: Option<Lit>, #[darling(default)] max: Option<Lit>, #[darling(default)] step: Option<Lit>, #[darling(default)] gui: Option<ty::NumericGuiType>, } #[derive(FromMeta)] enum ChoiceControlType { Dropdown, ButtonGroup, } #[derive(FromVariant)] #[darling(attributes(schema), supports(unit, newtype, named))] struct VariantMeta { ident: Ident, fields: darling::ast::Fields<FieldMeta>, } #[derive(FromDeriveInput)] #[darling(attributes(schema), supports(struct_named, enum_any))] struct DeriveInputMeta { data: darling::ast::Data<VariantMeta, FieldMeta>, #[darling(default)] gui: Option<ChoiceControlType>, } struct SchemaData { default_fields_ts: TokenStream2, schema_code_ts: TokenStream2, aux_objects_ts: Option<TokenStream2>, } fn named_fields_schema(meta: Vec<FieldMeta>) -> TResult<SchemaData> { let mut vis = vec![]; let mut idents = vec![]; let mut tys_ts = vec![]; let mut keys = vec![]; let mut entry_types_ts = vec![]; for meta in meta { for ph in &meta.placeholders { keys.push(ph.clone()); entry_types_ts.push(quote!(settings_schema::EntryType::Placeholder)) } for setting in &meta.higher_order { let higher_order::Entry { key, entry_type_ts } = higher_order::schema(setting)?; keys.push(key); entry_types_ts.push(entry_type_ts); } let ident = meta.ident.as_ref().unwrap().clone(); let advanced = meta.advanced.is_some(); let ty::SchemaData { default_ty_ts, schema_code_ts, } = ty::schema(&meta.ty, &meta)?; vis.push(meta.vis); idents.push(ident.clone()); tys_ts.push(default_ty_ts); keys.push(ident.to_string()); entry_types_ts.push(quote!( EntryType::Data(EntryData { advanced: #advanced, content: { let default = default.#ident; #schema_code_ts } }) )); } Ok(SchemaData { default_fields_ts: quote!(#(#vis #idents: #tys_ts,)*), schema_code_ts: quote!(SchemaNode::Section( vec![#((#keys.into(), #entry_types_ts)),*] )), aux_objects_ts: None, }) } fn variants_schema( gui_type: Option<ChoiceControlType>, vis: &Visibility, ident: &Ident, meta: Vec<VariantMeta>, ) -> TResult<SchemaData> { let mut variants = vec![]; let mut data_variants = vec![]; let mut data_tys_ts = vec![]; let mut keys = vec![]; let mut entry_data_ts = vec![]; let mut aux_variants_structs_ts = vec![]; let gui_ts = match gui_type { None => quote!(None), Some(ChoiceControlType::Dropdown) => { quote!(Some(ChoiceControlType::Dropdown)) } Some(ChoiceControlType::ButtonGroup) => { quote!(Some(ChoiceControlType::ButtonGroup)) } }; for meta in meta { let variant_ident = meta.ident; let snake_case_variant_ident = Ident::new(&variant_ident.to_string(), variant_ident.span()); variants.push(variant_ident.clone()); keys.push(variant_ident.to_string()); match meta.fields.style { darling::ast::Style::Tuple => { let field_meta = &meta.fields.fields[0]; if !field_meta.higher_order.is_empty() { error( "'higher_order' attributes not supported in this position", &variant_ident, )?; } if !field_meta.placeholders.is_empty() { error( "'placeholder' attributes not supported in this position", &variant_ident, )?; } let advanced = field_meta.advanced.is_some(); let ty::SchemaData { default_ty_ts, schema_code_ts, } = ty::schema(&field_meta.ty, &field_meta)?; data_variants.push(snake_case_variant_ident.clone()); data_tys_ts.push(default_ty_ts); entry_data_ts.push(quote!(Some(settings_schema::EntryData { advanced: #advanced, content: { let default = default.#snake_case_variant_ident; #schema_code_ts } }))); }
fn schema(derive_input: DeriveInput) -> TResult { if !derive_input.generics.params.is_empty() { return error("Generics not supported", &derive_input.generics); } let meta: DeriveInputMeta = FromDeriveInput::from_derive_input(&derive_input).map_err(|e| e.write_errors())?; let gui_type = meta.gui; let vis = derive_input.vis; let derive_input_ident = derive_input.ident; let default_ty_ident = suffix_ident(&derive_input_ident, "Default"); let SchemaData { default_fields_ts, schema_code_ts, aux_objects_ts, } = match meta.data { darling::ast::Data::Enum(variants) => { variants_schema(gui_type, &vis, &derive_input_ident, variants)? } darling::ast::Data::Struct(Fields { fields, .. }) => named_fields_schema(fields)?, }; Ok(quote! { #aux_objects_ts #[allow(non_snake_case)] #[derive(settings_schema::Serialize, settings_schema::Deserialize, Clone, PartialEq)] #vis struct #default_ty_ident { #default_fields_ts } impl #derive_input_ident { #vis fn schema(default: #default_ty_ident) -> settings_schema::SchemaNode { use settings_schema::*; #schema_code_ts } } }) } #[proc_macro_derive(SettingsSchema, attributes(schema))] pub fn create_settings_schema_fn_and_default_ty(input: TokenStream) -> TokenStream { let input = syn::parse_macro_input!(input as DeriveInput); match schema(input) { Ok(tokens) => tokens.into(), Err(e) => e, } }
darling::ast::Style::Struct => { let default_ty_ts = suffix_ident(&suffix_ident(ident, &variant_ident.to_string()), "Default") .to_token_stream(); let SchemaData { default_fields_ts, schema_code_ts, .. } = named_fields_schema(meta.fields.fields)?; data_variants.push(snake_case_variant_ident.clone()); data_tys_ts.push(default_ty_ts.clone()); entry_data_ts.push(quote!(Some(settings_schema::EntryData { advanced: false, content: { let default = default.#snake_case_variant_ident; #schema_code_ts } }))); aux_variants_structs_ts.push(quote! { #[derive( settings_schema::Serialize, settings_schema::Deserialize, Clone, PartialEq )] #vis struct #default_ty_ts { #default_fields_ts } }); } darling::ast::Style::Unit => { entry_data_ts.push(quote!(None)); } } } let default_variant_ty = suffix_ident(&ident, "DefaultVariant"); Ok(SchemaData { default_fields_ts: quote! { #(#vis #data_variants: #data_tys_ts,)* #vis variant: #default_variant_ty, }, schema_code_ts: quote!(SchemaNode::Choice(SchemaChoice { default: settings_schema::to_json_value(default.variant) .unwrap() .as_str() .unwrap() .into(), variants: vec![#((#keys.into(), #entry_data_ts)),*], gui: #gui_ts })), aux_objects_ts: Some(quote! { #(#aux_variants_structs_ts)* #[derive(settings_schema::Serialize, settings_schema::Deserialize, Clone, PartialEq)] #vis enum #default_variant_ty { #(#variants,)* } }), }) }
function_block-function_prefix_line
[ { "content": "fn custom_leaf_type_schema(ty_ident: &Ident, field: &FieldMeta) -> TResult {\n\n forbid_numeric_attrs(field, \"custom\")?;\n\n\n\n Ok(quote!(#ty_ident::schema(default)))\n\n}\n\n\n\n// Generate a default representation type and corresponding schema instantiation code.\n\n// This function cal...
Rust
src/lib.rs
wngr/libp2p-maybe-transport
395f519d8b9040c050c1441c8f97b36856a49fe1
#![allow(clippy::type_complexity)] use std::{fmt, marker::PhantomData, sync::Arc}; use futures::{ channel::mpsc, future::{self, BoxFuture}, stream::{self, BoxStream}, FutureExt, StreamExt, TryFutureExt, TryStreamExt, }; use libp2p::{ core::{either::EitherOutput, transport::ListenerEvent}, Multiaddr, Transport, TransportError, }; use parking_lot::Mutex; pub struct CombinedTransport<TBase, TOuter> where TBase: Transport + Clone, TBase::Error: Send + 'static, TBase::Output: 'static, { base: TBase, outer: TOuter, construct_outer: fn(ProxyTransport<TBase>) -> TOuter, proxy: ProxyTransport<TBase>, try_upgrade: MaybeUpgrade<TBase>, map_base_addr_to_outer: fn(Multiaddr) -> Multiaddr, } impl<TBase, TOuter> CombinedTransport<TBase, TOuter> where TBase: Transport + Clone, TBase::Error: Send + 'static, TBase::Output: 'static, { pub fn new( base: TBase, construct_outer: fn(ProxyTransport<TBase>) -> TOuter, try_upgrade: MaybeUpgrade<TBase>, map_base_addr_to_outer: fn(Multiaddr) -> Multiaddr, ) -> Self { let proxy = ProxyTransport::<TBase>::new(base.clone()); let mut proxy_clone = proxy.clone(); proxy_clone.pending = proxy.pending.clone(); let outer = construct_outer(proxy_clone); Self { base, proxy, outer, construct_outer, try_upgrade, map_base_addr_to_outer, } } } impl<TBase, TOuter> Clone for CombinedTransport<TBase, TOuter> where TBase: Transport + Clone, TBase::Error: Send + 'static, TBase::Output: 'static, { fn clone(&self) -> Self { Self::new( self.base.clone(), self.construct_outer, self.try_upgrade, self.map_base_addr_to_outer, ) } } type MaybeUpgrade<TBase> = fn( <TBase as Transport>::Output, ) -> BoxFuture<'static, Result<<TBase as Transport>::Output, <TBase as Transport>::Output>>; #[derive(Debug, Copy, Clone)] pub enum CombinedError<Base, Outer> { UpgradedToOuterTransport, Base(Base), Outer(Outer), } impl<A, B> fmt::Display for CombinedError<A, B> where A: fmt::Display, B: fmt::Display, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { CombinedError::Base(a) => a.fmt(f), CombinedError::Outer(b) => b.fmt(f), CombinedError::UpgradedToOuterTransport => write!(f, "Upgraded to outer transport"), } } } impl<A, B> std::error::Error for CombinedError<A, B> where A: std::error::Error, B: std::error::Error, { fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { match self { CombinedError::Base(a) => a.source(), CombinedError::Outer(b) => b.source(), CombinedError::UpgradedToOuterTransport => None, } } } impl<TBase, TOuter> Transport for CombinedTransport<TBase, TOuter> where TBase: Transport + Clone, TBase::Listener: Send + 'static, TBase::ListenerUpgrade: Send + 'static, TBase::Error: Send + 'static, TBase::Output: Send + 'static, TBase::Dial: Send + 'static, TOuter: Transport, TOuter::Listener: Send + 'static, TOuter::ListenerUpgrade: Send + 'static, TOuter::Error: 'static, TOuter::Output: 'static, TOuter::Dial: Send + 'static, { type Output = EitherOutput<TBase::Output, TOuter::Output>; type Error = CombinedError<TBase::Error, TOuter::Error>; type Listener = BoxStream<'static, Result<ListenerEvent<Self::ListenerUpgrade, Self::Error>, Self::Error>>; type ListenerUpgrade = BoxFuture<'static, Result<Self::Output, Self::Error>>; type Dial = BoxFuture<'static, Result<Self::Output, Self::Error>>; fn listen_on( self, addr: libp2p::Multiaddr, ) -> Result<Self::Listener, libp2p::TransportError<Self::Error>> where Self: Sized, { let base_listener = self .base .listen_on(addr.clone()) .map_err(|e| e.map(CombinedError::Base))?; let (mut tx, rx) = mpsc::channel(256); let x = self.proxy.pending.lock().replace(rx); debug_assert!(x.is_none()); let outer_listener = self .outer .listen_on((self.map_base_addr_to_outer)(addr)) .map_err(|e| e.map(CombinedError::Outer))?; debug_assert!(self.proxy.pending.lock().is_none()); let upgrader = self.try_upgrade; let combined_listener = stream::select( base_listener .map_ok(move |ev| { let cloned = match &ev { ListenerEvent::NewAddress(a) => Some(ListenerEvent::NewAddress(a.clone())), ListenerEvent::AddressExpired(a) => { Some(ListenerEvent::AddressExpired(a.clone())) } ListenerEvent::Error(_) => None, ListenerEvent::Upgrade { .. } => None, }; if let Some(ev) = cloned { tx.start_send(ev).unwrap(); } let ev = match ev { ListenerEvent::Upgrade { upgrade, local_addr, remote_addr, } => { let local_addr_c = local_addr.clone(); let remote_addr_c = remote_addr.clone(); let mut tx_c = tx.clone(); let upgrade = async move { match upgrade.await { Ok(u) => { match upgrader(u).await { Ok(u) => { tx_c.start_send(ListenerEvent::Upgrade { upgrade: future::ok(u).boxed(), local_addr: local_addr_c, remote_addr: remote_addr_c, }) .expect("Out of sync with proxy"); Err(CombinedError::UpgradedToOuterTransport) } Err(u) => { Ok(EitherOutput::First(u)) } } } Err(e) => Err(CombinedError::Base(e)), } } .boxed(); ListenerEvent::Upgrade { local_addr, remote_addr, upgrade, } } ListenerEvent::NewAddress(a) => ListenerEvent::NewAddress(a), ListenerEvent::AddressExpired(a) => ListenerEvent::AddressExpired(a), ListenerEvent::Error(e) => ListenerEvent::Error(e), }; ev.map_err(CombinedError::Base) }) .map_err(CombinedError::Base) .boxed(), outer_listener .map_ok(|ev| { ev.map(|upgrade_fut| { upgrade_fut .map_ok(EitherOutput::Second) .map_err(CombinedError::Outer) .boxed() }) .map_err(CombinedError::Outer) }) .map_err(CombinedError::Outer) .boxed(), ) .boxed(); Ok(combined_listener) } fn dial( self, addr: libp2p::Multiaddr, ) -> Result<Self::Dial, libp2p::TransportError<Self::Error>> where Self: Sized, { let addr = match self.outer.dial(addr) { Ok(connec) => { return Ok(connec .map_ok(EitherOutput::Second) .map_err(CombinedError::Outer) .boxed()) } Err(TransportError::MultiaddrNotSupported(addr)) => addr, Err(TransportError::Other(err)) => { return Err(TransportError::Other(CombinedError::Outer(err))) } }; let addr = match self.base.dial(addr) { Ok(connec) => { return Ok(connec .map_ok(EitherOutput::First) .map_err(CombinedError::Base) .boxed()) } Err(TransportError::MultiaddrNotSupported(addr)) => addr, Err(TransportError::Other(err)) => { return Err(TransportError::Other(CombinedError::Base(err))) } }; Err(TransportError::MultiaddrNotSupported(addr)) } fn address_translation( &self, listen: &libp2p::Multiaddr, observed: &libp2p::Multiaddr, ) -> Option<libp2p::Multiaddr> { self.outer .address_translation(listen, observed) .or_else(|| self.base.address_translation(listen, observed)) } } pub struct ProxyTransport<TBase> where Self: Transport, { _marker: PhantomData<TBase>, pub(crate) pending: Arc< Mutex< Option< mpsc::Receiver< ListenerEvent<<Self as Transport>::ListenerUpgrade, <Self as Transport>::Error>, >, >, >, >, base: TBase, } impl<TBase> Clone for ProxyTransport<TBase> where TBase: Transport + Clone, TBase::Output: 'static, TBase::Error: Send + 'static, { fn clone(&self) -> Self { Self { _marker: Default::default(), pending: Default::default(), base: self.base.clone(), } } } impl<TBase> ProxyTransport<TBase> where TBase: Transport + Clone, TBase::Output: 'static, TBase::Error: Send + 'static, { fn new(base: TBase) -> Self { Self { pending: Default::default(), _marker: Default::default(), base, } } } impl<TBase> Transport for ProxyTransport<TBase> where TBase: Transport + Clone, TBase::Output: 'static, TBase::Error: Send + 'static, { type Output = TBase::Output; type Error = TBase::Error; type Listener = BoxStream<'static, Result<ListenerEvent<Self::ListenerUpgrade, Self::Error>, Self::Error>>; type ListenerUpgrade = BoxFuture<'static, Result<Self::Output, Self::Error>>; type Dial = TBase::Dial; fn listen_on( self, _addr: libp2p::Multiaddr, ) -> Result<Self::Listener, libp2p::TransportError<Self::Error>> where Self: Sized, { let listener = self .pending .lock() .take() .expect("Only called after successful base listen"); Ok(listener.map(Ok).boxed()) } fn dial( self, addr: libp2p::Multiaddr, ) -> Result<Self::Dial, libp2p::TransportError<Self::Error>> where Self: Sized, { self.base.dial(addr) } fn address_translation( &self, listen: &libp2p::Multiaddr, observed: &libp2p::Multiaddr, ) -> Option<libp2p::Multiaddr> { self.base.address_translation(listen, observed) } }
#![allow(clippy::type_complexity)] use std::{fmt, marker::PhantomData, sync::Arc}; use futures::{ channel::mpsc, future::{self, BoxFuture}, stream::{self, BoxStream}, FutureExt, StreamExt, TryFutureExt, TryStreamExt, }; use libp2p::{ core::{either::EitherOutput, transport::ListenerEvent}, Multiaddr, Transport, TransportError, }; use parking_lot::Mutex; pub struct CombinedTransport<TBase, TOuter> where TBase: Transport + Clone, TBase::Error: Send + 'static, TBase::Output: 'static, { base: TBase, outer: TOuter, construct_outer: fn(ProxyTransport<TBase>) -> TOuter, proxy: ProxyTransport<TBase>, try_upgrade: MaybeUpgrade<TBase>, map_base_addr_to_outer: fn(Multiaddr) -> Multiaddr, } impl<TBase, TOuter> CombinedTransport<TBase, TOuter> where TBase: Transport + Clone, TBase::Error: Send + 'static, TBase::Output: 'static, { pub fn new( base: TBase, construct_outer: fn(ProxyTransport<TBase>) -> TOuter, try_upgrade: MaybeUpgrade<TBase>, map_base_addr_to_outer: fn(Multiaddr) -> Multiaddr, ) -> Self { let proxy = ProxyTransport::<TBase>::new(base.clone()); let mut proxy_clone = proxy.clone(); proxy_clone.pending = proxy.pending.clone(); let outer = construct_outer(proxy_clone); Self { base, proxy, outer, construct_outer, try_upgrade, map_base_addr_to_outer, } } } impl<TBase, TOuter> Clone for CombinedTransport<TBase, TOuter> where TBase: Transport + Clone, TBase::Error: Send + 'static, TBase::Output: 'static, { fn clone(&self) -> Self { Self::new( self.base.clone(), self.construct_outer, self.try_upgrade, self.map_base_addr_to_outer, ) } } type MaybeUpgrade<TBase> = fn( <TBase as Transport>::Output, ) -> BoxFuture<'static, Result<<TBase as Transport>::Output, <TBase as Transport>::Output>>; #[derive(Debug, Copy, Clone)] pub enum CombinedError<Base, Outer> { UpgradedToOuterTransport, Base(Base), Outer(Outer), } impl<A, B> fmt::Display for CombinedError<A, B> where A: fmt::Display, B: fmt::Display, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { CombinedError::Base(a) => a.fmt(f), CombinedError::Outer(b) => b.fmt(f), CombinedError::UpgradedToOuterTransport => write!(f, "Upgraded to outer transport"), } } } impl<A, B> std::error::Error for CombinedError<A, B> where A: std::error::Error, B: std::error::Error, { fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { match self { CombinedError::Base(a) => a.source(), CombinedError::Outer(b) => b.source(), CombinedError::UpgradedToOuterTransport => None, } } } impl<TBase, TOuter> Transport for CombinedTransport<TBase, TOuter> where TBase: Transport + Clone, TBase::Listener: Send + 'static, TBase::ListenerUpgrade: Send + 'static, TBase::Error: Send + 'static, TBase::Output: Send + 'static, TBase::Dial: Send + 'static, TOuter: Transport, TOuter::Listener: Send + 'static, TOuter::ListenerUpgrade: Send + 'static, TOuter::Error: 'static, TOuter::Output: 'static, TOuter::Dial: Send + 'static, { type Output = EitherOutput<TBase::Output, TOuter::Output>; type Error = CombinedError<TBase::Error, TOuter::Error>; type Listener = BoxStream<'static, Result<ListenerEvent<Self::ListenerUpgrade, Self::Error>, Self::Error>>; type ListenerUpgrade = BoxFuture<'static, Result<Self::Output, Self::Error>>; type Dial = BoxFuture<'static, Result<Self::Output, Self::Error>>; fn listen_on( self, addr: libp2p::Multiaddr, ) -> Result<Self::Listener, libp2p::TransportError<Self::Error>> where Self: Sized, { let base_listener = self .base .listen_on(addr.clone()) .map_err(|e| e.map(CombinedError::Base))?; let (mut tx, rx) = mpsc::channel(256); let x = self.proxy.pending.lock().replace(rx); debug_assert!(x.is_none()); let outer_listener = self .outer .listen_on((self.map_base_addr_to_outer)(addr)) .map_err(|e| e.map(CombinedError::Outer))?; debug_assert!(self.proxy.pending.lock().is_none()); let upgrader = self.try_upgrade; let combined_listener = stream::select( base_listener .map_ok(move |ev| { let cloned =
; if let Some(ev) = cloned { tx.start_send(ev).unwrap(); } let ev = match ev { ListenerEvent::Upgrade { upgrade, local_addr, remote_addr, } => { let local_addr_c = local_addr.clone(); let remote_addr_c = remote_addr.clone(); let mut tx_c = tx.clone(); let upgrade = async move { match upgrade.await { Ok(u) => { match upgrader(u).await { Ok(u) => { tx_c.start_send(ListenerEvent::Upgrade { upgrade: future::ok(u).boxed(), local_addr: local_addr_c, remote_addr: remote_addr_c, }) .expect("Out of sync with proxy"); Err(CombinedError::UpgradedToOuterTransport) } Err(u) => { Ok(EitherOutput::First(u)) } } } Err(e) => Err(CombinedError::Base(e)), } } .boxed(); ListenerEvent::Upgrade { local_addr, remote_addr, upgrade, } } ListenerEvent::NewAddress(a) => ListenerEvent::NewAddress(a), ListenerEvent::AddressExpired(a) => ListenerEvent::AddressExpired(a), ListenerEvent::Error(e) => ListenerEvent::Error(e), }; ev.map_err(CombinedError::Base) }) .map_err(CombinedError::Base) .boxed(), outer_listener .map_ok(|ev| { ev.map(|upgrade_fut| { upgrade_fut .map_ok(EitherOutput::Second) .map_err(CombinedError::Outer) .boxed() }) .map_err(CombinedError::Outer) }) .map_err(CombinedError::Outer) .boxed(), ) .boxed(); Ok(combined_listener) } fn dial( self, addr: libp2p::Multiaddr, ) -> Result<Self::Dial, libp2p::TransportError<Self::Error>> where Self: Sized, { let addr = match self.outer.dial(addr) { Ok(connec) => { return Ok(connec .map_ok(EitherOutput::Second) .map_err(CombinedError::Outer) .boxed()) } Err(TransportError::MultiaddrNotSupported(addr)) => addr, Err(TransportError::Other(err)) => { return Err(TransportError::Other(CombinedError::Outer(err))) } }; let addr = match self.base.dial(addr) { Ok(connec) => { return Ok(connec .map_ok(EitherOutput::First) .map_err(CombinedError::Base) .boxed()) } Err(TransportError::MultiaddrNotSupported(addr)) => addr, Err(TransportError::Other(err)) => { return Err(TransportError::Other(CombinedError::Base(err))) } }; Err(TransportError::MultiaddrNotSupported(addr)) } fn address_translation( &self, listen: &libp2p::Multiaddr, observed: &libp2p::Multiaddr, ) -> Option<libp2p::Multiaddr> { self.outer .address_translation(listen, observed) .or_else(|| self.base.address_translation(listen, observed)) } } pub struct ProxyTransport<TBase> where Self: Transport, { _marker: PhantomData<TBase>, pub(crate) pending: Arc< Mutex< Option< mpsc::Receiver< ListenerEvent<<Self as Transport>::ListenerUpgrade, <Self as Transport>::Error>, >, >, >, >, base: TBase, } impl<TBase> Clone for ProxyTransport<TBase> where TBase: Transport + Clone, TBase::Output: 'static, TBase::Error: Send + 'static, { fn clone(&self) -> Self { Self { _marker: Default::default(), pending: Default::default(), base: self.base.clone(), } } } impl<TBase> ProxyTransport<TBase> where TBase: Transport + Clone, TBase::Output: 'static, TBase::Error: Send + 'static, { fn new(base: TBase) -> Self { Self { pending: Default::default(), _marker: Default::default(), base, } } } impl<TBase> Transport for ProxyTransport<TBase> where TBase: Transport + Clone, TBase::Output: 'static, TBase::Error: Send + 'static, { type Output = TBase::Output; type Error = TBase::Error; type Listener = BoxStream<'static, Result<ListenerEvent<Self::ListenerUpgrade, Self::Error>, Self::Error>>; type ListenerUpgrade = BoxFuture<'static, Result<Self::Output, Self::Error>>; type Dial = TBase::Dial; fn listen_on( self, _addr: libp2p::Multiaddr, ) -> Result<Self::Listener, libp2p::TransportError<Self::Error>> where Self: Sized, { let listener = self .pending .lock() .take() .expect("Only called after successful base listen"); Ok(listener.map(Ok).boxed()) } fn dial( self, addr: libp2p::Multiaddr, ) -> Result<Self::Dial, libp2p::TransportError<Self::Error>> where Self: Sized, { self.base.dial(addr) } fn address_translation( &self, listen: &libp2p::Multiaddr, observed: &libp2p::Multiaddr, ) -> Option<libp2p::Multiaddr> { self.base.address_translation(listen, observed) } }
match &ev { ListenerEvent::NewAddress(a) => Some(ListenerEvent::NewAddress(a.clone())), ListenerEvent::AddressExpired(a) => { Some(ListenerEvent::AddressExpired(a.clone())) } ListenerEvent::Error(_) => None, ListenerEvent::Upgrade { .. } => None, }
if_condition
[ { "content": "fn maybe_upgrade(r: TcpStream) -> BoxFuture<'static, Result<TcpStream, TcpStream>> {\n\n async move {\n\n let mut buffer = [0; 3];\n\n if r.0.peek(&mut buffer).await.is_ok() && buffer == *b\"GET\" {\n\n println!(\"It's probably HTTP\");\n\n Ok(r)\n\n }...
Rust
src/gfx.rs
1HPorange/maboy
e8dc60b776edc95d71c234d641aceb3ab0ce1bab
use super::hresult_error::*; use super::window::Window; use maboy::MemPixel; use std::marker::PhantomData; use std::mem::MaybeUninit; use std::pin::Pin; use std::ptr; use winapi::shared::dxgi::*; use winapi::shared::dxgiformat::*; use winapi::shared::minwindef::*; use winapi::shared::winerror::*; use winapi::shared::{dxgi1_2::*, dxgitype::*}; use winapi::um::d3d11::*; use winapi::um::d3dcommon::*; use winapi::um::unknwnbase::IUnknown; use winapi::Interface; use wio::com::ComPtr; pub struct GfxDevice { d: ComPtr<ID3D11Device>, dc: ComPtr<ID3D11DeviceContext>, dxgi_factory: ComPtr<IDXGIFactory2>, } impl GfxDevice { pub fn new() -> Result<GfxDevice, HResultError> { unsafe { let mut flags = D3D11_CREATE_DEVICE_SINGLETHREADED; if cfg!(debug_assertions) { flags |= D3D11_CREATE_DEVICE_DEBUG; } let mut d = ptr::null_mut(); let mut dc = ptr::null_mut(); D3D11CreateDevice( ptr::null_mut(), D3D_DRIVER_TYPE_HARDWARE, ptr::null_mut(), flags, ptr::null(), 0, D3D11_SDK_VERSION, &mut d, ptr::null_mut(), &mut dc, ) .into_result()?; let d = ComPtr::from_raw(d); let dc = ComPtr::from_raw(dc); let mut dxgi_device = ptr::null_mut(); d.QueryInterface(&IDXGIDevice2::uuidof(), &mut dxgi_device) .into_result()?; let dxgi_device = ComPtr::from_raw(dxgi_device as *mut IDXGIDevice2); let mut dxgi_adapter = ptr::null_mut(); dxgi_device.GetAdapter(&mut dxgi_adapter).into_result()?; let dxgi_adapter = ComPtr::from_raw(dxgi_adapter as *mut IDXGIAdapter2); let mut dxgi_factory = ptr::null_mut(); dxgi_adapter .GetParent(&IDXGIFactory2::uuidof(), &mut dxgi_factory) .into_result()?; let dxgi_factory = ComPtr::from_raw(dxgi_factory as *mut IDXGIFactory2); Ok(GfxDevice { d, dc, dxgi_factory, }) } } pub fn create_gfx_window<I: Into<Option<u32>>>( &self, window: &Pin<Box<Window>>, width: I, height: I, ) -> Result<GfxWindow, HResultError> { unsafe { let scd = DXGI_SWAP_CHAIN_DESC1 { Width: width.into().unwrap_or(0), Height: height.into().unwrap_or(0), Format: DXGI_FORMAT_R8G8B8A8_UNORM, Stereo: FALSE, SampleDesc: DXGI_SAMPLE_DESC { Count: 1, Quality: 0, }, BufferUsage: DXGI_USAGE_RENDER_TARGET_OUTPUT, BufferCount: 2, Scaling: DXGI_SCALING_STRETCH, SwapEffect: DXGI_SWAP_EFFECT_FLIP_SEQUENTIAL, AlphaMode: DXGI_ALPHA_MODE_UNSPECIFIED, Flags: DXGI_SWAP_CHAIN_FLAG_ALLOW_TEARING, }; let mut swap_chain = ptr::null_mut(); self.dxgi_factory .CreateSwapChainForHwnd( self.d.as_raw() as *mut IUnknown, window.hwnd(), &scd, ptr::null(), ptr::null_mut(), &mut swap_chain, ) .into_result()?; let swap_chain = ComPtr::from_raw(swap_chain); let mut backbuffer = ptr::null_mut(); swap_chain .GetBuffer(0, &ID3D11Texture2D::uuidof(), &mut backbuffer) .into_result()?; let backbuffer = ComPtr::from_raw(backbuffer as *mut ID3D11Texture2D); let mut backbuffer_desc: D3D11_TEXTURE2D_DESC = MaybeUninit::zeroed().assume_init(); backbuffer.GetDesc(&mut backbuffer_desc); let mut viewport: D3D11_VIEWPORT = MaybeUninit::zeroed().assume_init(); viewport.Height = backbuffer_desc.Height as f32; viewport.Width = backbuffer_desc.Width as f32; viewport.MinDepth = 0.0; viewport.MaxDepth = 1.0; let mut backbuffer_rtv = ptr::null_mut(); self.d .CreateRenderTargetView( backbuffer.as_raw() as *mut ID3D11Resource, ptr::null(), &mut backbuffer_rtv, ) .into_result()?; let backbuffer_rtv = ComPtr::from_raw(backbuffer_rtv); Ok(GfxWindow { device_context: self.dc.clone(), swap_chain, backbuffer, backbuffer_rtv, viewport, _window: PhantomData, }) } } } pub struct GfxWindow<'w> { device_context: ComPtr<ID3D11DeviceContext>, swap_chain: ComPtr<IDXGISwapChain1>, backbuffer: ComPtr<ID3D11Texture2D>, backbuffer_rtv: ComPtr<ID3D11RenderTargetView>, viewport: D3D11_VIEWPORT, _window: PhantomData<&'w ()>, } impl<'w> GfxWindow<'w> { pub fn next_frame(&mut self) -> GfxFrame<'_, 'w> { GfxFrame(self) } } pub struct GfxFrame<'a, 'w>(&'a mut GfxWindow<'w>); impl GfxFrame<'_, '_> { pub fn clear(&mut self, color: &[f32; 4]) { unsafe { self.0 .device_context .ClearRenderTargetView(self.0.backbuffer_rtv.as_raw(), color); } } pub fn copy_from_slice(&mut self, data: &[MemPixel]) { unsafe { assert_eq!( data.len(), self.0.viewport.Width as usize * self.0.viewport.Height as usize, "Slice does not have the exact number of pixels that the window backbuffer requires" ); self.0.device_context.UpdateSubresource( self.0.backbuffer.as_raw() as *mut ID3D11Resource, 0, ptr::null(), data as *const _ as *const std::ffi::c_void, self.0.viewport.Width as u32 * 4, 0, ); } } pub fn present(self, blocking: bool) -> Result<(), HResultError> { unsafe { let (sync_interval, flags) = if blocking { (1, 0) } else { (0, DXGI_PRESENT_ALLOW_TEARING) }; let result = self .0 .swap_chain .Present(sync_interval, flags) .into_result(); if matches!(result, Err(HResultError(DXGI_ERROR_WAS_STILL_DRAWING))) { return Ok(()); } else { result } } } }
use super::hresult_error::*; use super::window::Window; use maboy::MemPixel; use std::marker::PhantomData; use std::mem::MaybeUninit; use std::pin::Pin; use std::ptr; use winapi::shared::dxgi::*; use winapi::shared::dxgiformat::*; use winapi::shared::minwindef::*; use winapi::shared::winerror::*; use winapi::shared::{dxgi1_2::*, dxgitype::*}; use winapi::um::d3d11::*; use winapi::um::d3dcommon::*; use winapi::um::unknwnbase::IUnknown; use winapi::Interface; use wio::com::ComPtr; pub struct GfxDevice { d: ComPtr<ID3D11Device>, dc: ComPtr<ID3D11DeviceContext>, dxgi_factory: ComPtr<IDXGIFactory2>, } impl GfxDevice { pub fn new() -> Result<GfxDevice, HResultError> { unsafe { let mut flags = D3D11_CREATE_DEVICE_SINGLETHREADED; if cfg!(debug_assertions) { flags |= D3D11_CREATE_DEVICE_DEBUG; } let mut d = ptr::null_mut(); let mut dc = ptr::null_mut(); D3D11CreateDevice( ptr::null_mut(), D3D_DRIVER_TYPE_HARDWARE, ptr::null_mut(), flags, ptr::null(), 0, D3D11_SDK_VERSION, &mut d, ptr::null_mut(), &mut dc, ) .into_result()?; let d = ComPtr::from_raw(d); let d
ckbuffer_rtv, ) .into_result()?; let backbuffer_rtv = ComPtr::from_raw(backbuffer_rtv); Ok(GfxWindow { device_context: self.dc.clone(), swap_chain, backbuffer, backbuffer_rtv, viewport, _window: PhantomData, }) } } } pub struct GfxWindow<'w> { device_context: ComPtr<ID3D11DeviceContext>, swap_chain: ComPtr<IDXGISwapChain1>, backbuffer: ComPtr<ID3D11Texture2D>, backbuffer_rtv: ComPtr<ID3D11RenderTargetView>, viewport: D3D11_VIEWPORT, _window: PhantomData<&'w ()>, } impl<'w> GfxWindow<'w> { pub fn next_frame(&mut self) -> GfxFrame<'_, 'w> { GfxFrame(self) } } pub struct GfxFrame<'a, 'w>(&'a mut GfxWindow<'w>); impl GfxFrame<'_, '_> { pub fn clear(&mut self, color: &[f32; 4]) { unsafe { self.0 .device_context .ClearRenderTargetView(self.0.backbuffer_rtv.as_raw(), color); } } pub fn copy_from_slice(&mut self, data: &[MemPixel]) { unsafe { assert_eq!( data.len(), self.0.viewport.Width as usize * self.0.viewport.Height as usize, "Slice does not have the exact number of pixels that the window backbuffer requires" ); self.0.device_context.UpdateSubresource( self.0.backbuffer.as_raw() as *mut ID3D11Resource, 0, ptr::null(), data as *const _ as *const std::ffi::c_void, self.0.viewport.Width as u32 * 4, 0, ); } } pub fn present(self, blocking: bool) -> Result<(), HResultError> { unsafe { let (sync_interval, flags) = if blocking { (1, 0) } else { (0, DXGI_PRESENT_ALLOW_TEARING) }; let result = self .0 .swap_chain .Present(sync_interval, flags) .into_result(); if matches!(result, Err(HResultError(DXGI_ERROR_WAS_STILL_DRAWING))) { return Ok(()); } else { result } } } }
c = ComPtr::from_raw(dc); let mut dxgi_device = ptr::null_mut(); d.QueryInterface(&IDXGIDevice2::uuidof(), &mut dxgi_device) .into_result()?; let dxgi_device = ComPtr::from_raw(dxgi_device as *mut IDXGIDevice2); let mut dxgi_adapter = ptr::null_mut(); dxgi_device.GetAdapter(&mut dxgi_adapter).into_result()?; let dxgi_adapter = ComPtr::from_raw(dxgi_adapter as *mut IDXGIAdapter2); let mut dxgi_factory = ptr::null_mut(); dxgi_adapter .GetParent(&IDXGIFactory2::uuidof(), &mut dxgi_factory) .into_result()?; let dxgi_factory = ComPtr::from_raw(dxgi_factory as *mut IDXGIFactory2); Ok(GfxDevice { d, dc, dxgi_factory, }) } } pub fn create_gfx_window<I: Into<Option<u32>>>( &self, window: &Pin<Box<Window>>, width: I, height: I, ) -> Result<GfxWindow, HResultError> { unsafe { let scd = DXGI_SWAP_CHAIN_DESC1 { Width: width.into().unwrap_or(0), Height: height.into().unwrap_or(0), Format: DXGI_FORMAT_R8G8B8A8_UNORM, Stereo: FALSE, SampleDesc: DXGI_SAMPLE_DESC { Count: 1, Quality: 0, }, BufferUsage: DXGI_USAGE_RENDER_TARGET_OUTPUT, BufferCount: 2, Scaling: DXGI_SCALING_STRETCH, SwapEffect: DXGI_SWAP_EFFECT_FLIP_SEQUENTIAL, AlphaMode: DXGI_ALPHA_MODE_UNSPECIFIED, Flags: DXGI_SWAP_CHAIN_FLAG_ALLOW_TEARING, }; let mut swap_chain = ptr::null_mut(); self.dxgi_factory .CreateSwapChainForHwnd( self.d.as_raw() as *mut IUnknown, window.hwnd(), &scd, ptr::null(), ptr::null_mut(), &mut swap_chain, ) .into_result()?; let swap_chain = ComPtr::from_raw(swap_chain); let mut backbuffer = ptr::null_mut(); swap_chain .GetBuffer(0, &ID3D11Texture2D::uuidof(), &mut backbuffer) .into_result()?; let backbuffer = ComPtr::from_raw(backbuffer as *mut ID3D11Texture2D); let mut backbuffer_desc: D3D11_TEXTURE2D_DESC = MaybeUninit::zeroed().assume_init(); backbuffer.GetDesc(&mut backbuffer_desc); let mut viewport: D3D11_VIEWPORT = MaybeUninit::zeroed().assume_init(); viewport.Height = backbuffer_desc.Height as f32; viewport.Width = backbuffer_desc.Width as f32; viewport.MinDepth = 0.0; viewport.MaxDepth = 1.0; let mut backbuffer_rtv = ptr::null_mut(); self.d .CreateRenderTargetView( backbuffer.as_raw() as *mut ID3D11Resource, ptr::null(), &mut ba
random
[ { "content": "pub fn rlca(cpu: &mut CPU) {\n\n let old = cpu.reg.get_r8(R8::A);\n\n cpu.reg.set_r8(R8::A, old.rotate_left(1));\n\n\n\n cpu.reg.flags.remove(Flags::Z | Flags::N | Flags::H);\n\n cpu.reg.flags.set(Flags::C, old.bit(7));\n\n}\n\n\n", "file_path": "maboy/src/cpu/execute.rs", "ran...
Rust
2020/src/bin/day19.rs
sebnow/adventofcode
9193b7f9181cd2249fd889c8e6723054f4e5b789
use anyhow::{anyhow, Result}; use std::collections::HashMap; use rayon::str::ParallelString; use rayon::iter::ParallelIterator; #[derive(Clone, PartialEq, Debug)] enum Rule { Char(char), Seq(Vec<i64>), Alt(Vec<i64>, Vec<i64>), } impl std::str::FromStr for Rule { type Err = anyhow::Error; fn from_str(s: &str) -> Result<Self, Self::Err> { if s.contains('|') { let mut parts = s.split(" | "); let left = parts .next() .ok_or_else(|| anyhow!("missing left alternate"))? .split(' ') .map(|n| n.parse().unwrap()) .collect(); let right = parts .next() .ok_or_else(|| anyhow!("missing right alternate"))? .split(' ') .map(|n| n.parse().unwrap()) .collect(); Ok(Rule::Alt(left, right)) } else if s.starts_with('"') { let c = s .chars() .nth(1) .ok_or_else(|| anyhow!("missing reference"))?; Ok(Rule::Char(c)) } else { let ids = s .split(' ') .map(|id| { id.parse() .map_err(|err| anyhow!("invalid reference: {}", err)) }) .collect::<Result<_>>()?; Ok(Rule::Seq(ids)) } } } struct RuleEngine { rules: HashMap<i64, Rule>, } impl RuleEngine { pub fn new(rules: HashMap<i64, Rule>) -> Self { RuleEngine { rules } } pub fn matches(&self, s: &str) -> bool { self.match_rule_id(s, 0).contains(&Some("")) } fn match_rule_id<'a>(&self, s: &'a str, rule_id: i64) -> Vec<Option<&'a str>> { let rule = self.rules.get(&rule_id).unwrap(); self.match_rule(s, rule) } fn match_rule<'a>(&self, s: &'a str, rule: &Rule) -> Vec<Option<&'a str>> { match rule { Rule::Char(c) if s.chars().next() == Some(*c) => vec![Some(&s[1..])], Rule::Char(_) => vec![None], Rule::Seq(rs) => self.match_seq(s, rs), Rule::Alt(left, right) => self.match_alt(s, left, right), } } fn match_seq<'a>(&self, s: &'a str, rules: &[i64]) -> Vec<Option<&'a str>> { rules.iter().fold(vec![Some(s)], |ss, r| { ss.iter() .flat_map(|s| match s { Some(s) if !s.is_empty() => self.match_rule_id(s, *r), _ => vec![None], }) .collect() }) } fn match_alt<'a>(&self, s: &'a str, left: &[i64], right: &[i64]) -> Vec<Option<&'a str>> { [left, right] .iter() .flat_map(|rs| self.match_seq(s, rs)) .collect() } } impl std::str::FromStr for RuleEngine { type Err = anyhow::Error; fn from_str(s: &str) -> Result<Self, Self::Err> { let rules = s .par_lines() .map(|l| { let mut parts = l.split(": "); Ok(( parts.next().ok_or_else(|| anyhow!("missing id"))?.parse()?, parts .next() .ok_or_else(|| anyhow!("missing definition"))? .parse()?, )) }) .collect::<Result<_>>()?; Ok(RuleEngine::new(rules)) } } fn parse_input<'a>(input: &'a str) -> (RuleEngine, impl ParallelIterator<Item = &'a str> + 'a) { let mut parts = input.split("\n\n"); ( parts.next().unwrap().parse().unwrap(), parts.next().unwrap().par_lines(), ) } fn part_one(input: &str) -> String { let (rules, messages) = parse_input(input); messages.filter(|m| rules.matches(m)).count().to_string() } fn part_two(input: &str) -> String { let input: String = input .lines() .map(|l| { if l.starts_with("8: ") { "8: 42 | 42 8" } else if l.starts_with("11: ") { "11: 42 31 | 42 11 31" } else { l } }) .collect::<Vec<&str>>() .join("\n"); part_one(&input) } fn main() { let input = include_str!("../../input/day19.txt"); println!("Part one: {}", part_one(&input)); println!("Part two: {}", part_two(&input)); } #[cfg(test)] mod test { use super::*; use aocutil::test_example; use std::str::FromStr; test_example!(example_one_1, part_one, 19, 1, 1); test_example!(example_one_2, part_one, 19, 1, 2); test_example!(example_one_3, part_one, 19, 1, 3); test_example!(example_one_4, part_one, 19, 1, 4); test_example!(example_one_5, part_one, 19, 1, 5); test_example!(example_one_6, part_one, 19, 1, 6); test_example!(example_two_1, part_two, 19, 2, 1); test_example!(example_two_2, part_two, 19, 2, 2); test_example!(example_two_3, part_two, 19, 2, 3); test_example!(example_two_4, part_two, 19, 2, 4); test_example!(example_two_5, part_two, 19, 2, 5); #[test] fn parse_char() -> Result<()> { assert_eq!(Rule::from_str("\"a\"")?, Rule::Char('a')); Ok(()) } #[test] fn parse_seq() -> Result<()> { assert_eq!(Rule::from_str("1 2")?, Rule::Seq(vec![1, 2])); Ok(()) } #[test] fn parse_alt() -> Result<()> { assert_eq!(Rule::from_str("1 | 2")?, alt(vec![1], vec![2]),); Ok(()) } #[test] fn match_char() { let mut rules = HashMap::new(); rules.insert(0, Rule::Char('a')); assert!(RuleEngine::new(rules).matches("a")); } #[test] fn match_seq() { let mut rules = HashMap::new(); rules.insert(0, Rule::Seq(vec![1, 2])); rules.insert(1, Rule::Char('a')); rules.insert(2, Rule::Char('b')); assert!(RuleEngine::new(rules).matches("ab")); } #[test] fn match_alt() { let mut rules = HashMap::new(); rules.insert(0, alt(vec![1], vec![2])); rules.insert(1, Rule::Char('a')); rules.insert(2, Rule::Char('b')); let engine = RuleEngine::new(rules); assert!(engine.matches("a")); assert!(engine.matches("b")); } #[test] fn match_seq_alt() { let mut rules = HashMap::new(); rules.insert(0, Rule::Seq(vec![1, 1])); rules.insert(1, alt(vec![2], vec![3])); rules.insert(2, Rule::Char('a')); rules.insert(3, Rule::Char('b')); let engine = RuleEngine::new(rules); assert!(engine.matches("aa")); assert!(engine.matches("ab")); assert!(engine.matches("ba")); assert!(engine.matches("bb")); } fn alt(left: Vec<i64>, right: Vec<i64>) -> Rule { Rule::Alt(left, right) } }
use anyhow::{anyhow, Result}; use std::collections::HashMap; use rayon::str::ParallelString; use rayon::iter::ParallelIterator; #[derive(Clone, PartialEq, Debug)] enum Rule { Char(char), Seq(Vec<i64>), Alt(Vec<i64>, Vec<i64>), } impl std::str::FromStr for Rule { type Err = anyhow::Error; fn from_str(s: &str) -> Result<Self, Self::Err> { if s.contains('|') { let mut parts = s.split(" | "); let left = parts .next() .ok_or_else(|| anyhow!("missing left alternate"))? .split(' ') .map(|n| n.parse().unwrap()) .collect(); let right = parts .next() .ok_or_else(|| anyhow!("missing right alternate"))? .split(' ') .map(|n| n.parse().unwrap()) .collect(); Ok(Rule::Alt(left, right)) } else if s.starts_with('"') { let c = s .chars() .nth(1) .ok_or_else(|| anyhow!("missing reference"))?; Ok(Rule::Char(c)) } else { let ids = s .split(' ') .map(|id| { id.parse() .map_err(|err| anyhow!("invalid reference: {}", err)) }) .collect::<Result<_>>()?; Ok(Rule::Seq(ids)) } } } struct RuleEngine { rules: HashMap<i64, Rule>, } impl RuleEngine { pub fn new(rules: HashMap<i64, Rule>) -> Self { RuleEngine { rules } } pub fn matches(&self, s: &str) -> bool { self.match_rule_id(s, 0).contains(&Some("")) } fn match_rule_id<'a>(&self, s: &'a str, rule_id: i64) -> Vec<Option<&'a str>> { let rule = self.rules.get(&rule_id).unwrap(); self.match_rule(s, rule) } fn match_rule<'a>(&self, s: &'a str, rule: &Rule) -> Vec<Option<&'a str>> { match rule { Rule::Char(c) if s.chars().next() == Some(*c) => vec![Some(&s[1..])], Rule::Char(_) => vec![None], Rule::Seq(rs) => self.match_seq(s, rs), Rule::Alt(left, right) => self.match_alt(s, left, right), } } fn match_seq<'a>(&self, s: &'a str, rules: &[i64]) -> Vec<Option<&'a str>> { rules.iter().fold(vec![Some(s)], |ss, r| { ss.iter() .flat_map(|s| match s { Some(s) if !s.is_empty() => self.match_rule_id(s, *r), _ => vec![None], }) .collect() }) } fn match_alt<'a>(&self, s: &'a str, left: &[i64], right: &[i64]) -> Vec<Option<&'a str>> { [left, right] .iter() .flat_map(|rs| self.match_seq(s, rs)) .collect() } } impl std::str::FromStr for RuleEngine { type Err = anyhow::Error; fn from_str(s: &str) -> Result<Self, Self::Err> { let rules = s .par_lines() .map(|l| { let mut parts = l.split(": ");
}) .collect::<Result<_>>()?; Ok(RuleEngine::new(rules)) } } fn parse_input<'a>(input: &'a str) -> (RuleEngine, impl ParallelIterator<Item = &'a str> + 'a) { let mut parts = input.split("\n\n"); ( parts.next().unwrap().parse().unwrap(), parts.next().unwrap().par_lines(), ) } fn part_one(input: &str) -> String { let (rules, messages) = parse_input(input); messages.filter(|m| rules.matches(m)).count().to_string() } fn part_two(input: &str) -> String { let input: String = input .lines() .map(|l| { if l.starts_with("8: ") { "8: 42 | 42 8" } else if l.starts_with("11: ") { "11: 42 31 | 42 11 31" } else { l } }) .collect::<Vec<&str>>() .join("\n"); part_one(&input) } fn main() { let input = include_str!("../../input/day19.txt"); println!("Part one: {}", part_one(&input)); println!("Part two: {}", part_two(&input)); } #[cfg(test)] mod test { use super::*; use aocutil::test_example; use std::str::FromStr; test_example!(example_one_1, part_one, 19, 1, 1); test_example!(example_one_2, part_one, 19, 1, 2); test_example!(example_one_3, part_one, 19, 1, 3); test_example!(example_one_4, part_one, 19, 1, 4); test_example!(example_one_5, part_one, 19, 1, 5); test_example!(example_one_6, part_one, 19, 1, 6); test_example!(example_two_1, part_two, 19, 2, 1); test_example!(example_two_2, part_two, 19, 2, 2); test_example!(example_two_3, part_two, 19, 2, 3); test_example!(example_two_4, part_two, 19, 2, 4); test_example!(example_two_5, part_two, 19, 2, 5); #[test] fn parse_char() -> Result<()> { assert_eq!(Rule::from_str("\"a\"")?, Rule::Char('a')); Ok(()) } #[test] fn parse_seq() -> Result<()> { assert_eq!(Rule::from_str("1 2")?, Rule::Seq(vec![1, 2])); Ok(()) } #[test] fn parse_alt() -> Result<()> { assert_eq!(Rule::from_str("1 | 2")?, alt(vec![1], vec![2]),); Ok(()) } #[test] fn match_char() { let mut rules = HashMap::new(); rules.insert(0, Rule::Char('a')); assert!(RuleEngine::new(rules).matches("a")); } #[test] fn match_seq() { let mut rules = HashMap::new(); rules.insert(0, Rule::Seq(vec![1, 2])); rules.insert(1, Rule::Char('a')); rules.insert(2, Rule::Char('b')); assert!(RuleEngine::new(rules).matches("ab")); } #[test] fn match_alt() { let mut rules = HashMap::new(); rules.insert(0, alt(vec![1], vec![2])); rules.insert(1, Rule::Char('a')); rules.insert(2, Rule::Char('b')); let engine = RuleEngine::new(rules); assert!(engine.matches("a")); assert!(engine.matches("b")); } #[test] fn match_seq_alt() { let mut rules = HashMap::new(); rules.insert(0, Rule::Seq(vec![1, 1])); rules.insert(1, alt(vec![2], vec![3])); rules.insert(2, Rule::Char('a')); rules.insert(3, Rule::Char('b')); let engine = RuleEngine::new(rules); assert!(engine.matches("aa")); assert!(engine.matches("ab")); assert!(engine.matches("ba")); assert!(engine.matches("bb")); } fn alt(left: Vec<i64>, right: Vec<i64>) -> Rule { Rule::Alt(left, right) } }
Ok(( parts.next().ok_or_else(|| anyhow!("missing id"))?.parse()?, parts .next() .ok_or_else(|| anyhow!("missing definition"))? .parse()?, ))
call_expression
[ { "content": "pub fn answer_2(input: &str) -> Result<i64, failure::Error> {\n\n let map = parse_input(input);\n\n traverse(&map).map(|r| r.1)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn example_1() {\n\n let input = [\n\n \" | \",...
Rust
src/unix.rs
dropbox/rust-subprocess-communicate
f6d5c664d4f4210a60e113f1300230ffa6e8ff4d
#![cfg(unix)] extern crate mio; use std::mem; use mio::*; use std::io; use std::process; use std::cmp; use mio::deprecated::{TryRead, TryWrite}; use mio::deprecated::{PipeReader, PipeWriter}; #[allow(unused_imports)] use std::process::{Command, Stdio, Child}; struct SubprocessClient { stdin: Option<PipeWriter>, stdout: Option<PipeReader>, stderr: Option<PipeReader>, stdin_token : Token, stdout_token : Token, stderr_token : Token, output : Vec<u8>, output_stderr : Vec<u8>, input : Vec<u8>, input_offset : usize, buf : [u8; 65536], stdout_bound : Option<usize>, stderr_bound : Option<usize>, return_on_stdout_fill : bool, has_shutdown : bool, child_shutdown : bool, } impl SubprocessClient { fn new(stdin: Option<PipeWriter>, stdout : Option<PipeReader>, stderr : Option<PipeReader>, data : &[u8], stdout_bound : Option<usize>, stderr_bound : Option<usize>, return_on_stdout_fill : bool) -> SubprocessClient { SubprocessClient { stdin: stdin, stdout: stdout, stderr: stderr, stdin_token : Token(0), stdout_token : Token(1), stderr_token : Token(2), output : Vec::<u8>::new(), output_stderr : Vec::<u8>::new(), buf : [0; 65536], input : data.to_vec(), input_offset : 0, stdout_bound : stdout_bound, stderr_bound : stderr_bound, return_on_stdout_fill : return_on_stdout_fill, has_shutdown : false, child_shutdown : false, } } fn readable(&mut self, poll: &mut Poll) -> io::Result<()> { if self.has_shutdown { return Ok(()); } let mut eof = false; let mut buf_bound : usize = cmp::min(self.stdout_bound.unwrap_or(self.buf.len()), self.buf.len()); if buf_bound == 0 { buf_bound = self.buf.len(); } match self.stdout { None => unreachable!(), Some (ref mut stdout) => match stdout.try_read(&mut self.buf[..buf_bound]) { Ok(Some(r)) => { if r == 0 { eof = true; } else { let do_extend : bool; match self.stdout_bound { None => do_extend = true, Some(ref mut bound) => { if *bound >= r { *bound = *bound - r; do_extend = true; } else { *bound = 0; do_extend = false; if self.return_on_stdout_fill || self.stderr.is_none() || self.stderr_bound.unwrap_or(1) == 0 { match self.stderr { Some(ref sub_stderr) => match poll.deregister(sub_stderr){ Err(e) => return Err(e), _ => {}, }, _ => {}, } drop(self.stderr.take()); eof = true; } } }, } if do_extend { self.output.extend(&self.buf[0..r]); } } }, Ok(None) => {}, Err(e) => { return Err(e); } } }; if eof { match self.stdout { Some(ref sub_stdout) => match poll.deregister(sub_stdout) { Err(e) => return Err(e), _ => {}, }, _ => {}, } drop(self.stdout.take()); if self.stderr.is_none() { self.has_shutdown = true; self.child_shutdown = true; } } return Ok(()); } fn readable_stderr(&mut self, poll: &mut Poll) -> io::Result<()> { if self.has_shutdown { return Ok(()); } let mut eof = false; let mut buf_bound : usize = cmp::min(self.stderr_bound.unwrap_or(self.buf.len()), self.buf.len()); if buf_bound == 0 { buf_bound = self.buf.len(); } match self.stderr { None => unreachable!(), Some(ref mut stderr) => match stderr.try_read(&mut self.buf[..buf_bound]) { Ok(None) => { } Ok(Some(r)) => { if r == 0 { eof = true; } else { let do_extend : bool; match self.stderr_bound { None => do_extend = true, Some(ref mut bound) => { if *bound >= r { *bound = *bound - r; do_extend = true; } else { *bound = 0; do_extend = false; if self.stdout.is_none() || self.stdout_bound.unwrap_or(1) == 0 { match self.stdout { Some(ref sub_stdout) => match poll.deregister(sub_stdout){ Err(e) => return Err(e), _ => {}, }, _ => {}, } drop(self.stdout.take()); eof = true; } } }, } if do_extend { self.output_stderr.extend(&self.buf[0..r]); } } } Err(e) => { return Err(e); } } }; if eof { match self.stderr { Some(ref sub_stderr) => match poll.deregister(sub_stderr){ Err(e) => return Err(e), _ => {}, }, _ => {}, } drop(self.stderr.take()); if self.stdout.is_none() { self.has_shutdown = true; self.child_shutdown = true; } } return Ok(()); } fn writable(&mut self, poll: &mut Poll) -> io::Result<()> { if self.has_shutdown { return Ok(()); } let mut ok = true; match self.stdin { None => unreachable!(), Some(ref mut stdin) => match stdin.try_write(&(&self.input)[self.input_offset..]) { Ok(None) => { }, Ok(Some(r)) => { if r == 0 { ok = false; } else { self.input_offset += r; } }, Err(_e) => { ok = false; }, } } if self.input_offset == self.input.len() || !ok { match self.stdin { Some(ref sub_stdin) => match poll.deregister(sub_stdin) { Err(e) => return Err(e), _ => {}, }, _ => {}, } drop(self.stdin.take()); match self.stderr { None => match self.stdout { None => { self.has_shutdown = true; self.child_shutdown = true }, Some(_) => {}, }, Some(_) => {}, } } return Ok(()); } fn ready(&mut self, poll: &mut Poll, token: Token, _events: Ready) { if token == self.stderr_token { let _x = self.readable_stderr(poll); } else { let _x = self.readable(poll); } if token == self.stdin_token { let _y = self.writable(poll); } } } pub fn from_stdin(mut stdin: Option<process::ChildStdin>) -> io::Result<Option<PipeWriter> > { match stdin { None => return Ok(None), Some(_) => {}, } Ok(Some(PipeWriter::from_stdin(stdin.take().unwrap()).unwrap())) } pub fn from_stdout(mut stdout: Option<process::ChildStdout>) -> io::Result<Option<PipeReader> > { match stdout { None => return Ok(None), Some(_) => {}, } Ok(Some(PipeReader::from_stdout(stdout.take().unwrap()).unwrap())) } pub fn from_stderr(mut stderr: Option<process::ChildStderr>) -> io::Result<Option<PipeReader> > { match stderr { None => return Ok(None), Some(_) => {}, } Ok(Some(PipeReader::from_stderr(stderr.take().unwrap()).unwrap())) } pub fn subprocess_communicate(process : &mut Child, input : &[u8], stdout_bound : Option<usize>, stderr_bound : Option<usize>, return_on_stdout_fill : bool) -> (Vec<u8>, Vec<u8>, io::Result<()>) { let stdin : Option<PipeWriter>; match from_stdin(process.stdin.take()) { Err(e) => return (Vec::<u8>::new(), Vec::<u8>::new(), Err(e)), Ok(pipe) => stdin = pipe, } let stdout : Option<PipeReader>; match from_stdout(process.stdout.take()) { Err(e) => return (Vec::<u8>::new(), Vec::<u8>::new(), Err(e)), Ok(pipe) => stdout = pipe, } let stderr : Option<PipeReader>; match from_stderr(process.stderr.take()) { Err(e) => return (Vec::<u8>::new(), Vec::<u8>::new(), Err(e)), Ok(pipe) => stderr = pipe, } let mut subprocess = SubprocessClient::new(stdin, stdout, stderr, input, stdout_bound, stderr_bound, return_on_stdout_fill); let mut poll = Poll::new().unwrap(); match subprocess.stdout { Some(ref sub_stdout) => match poll.register(sub_stdout, subprocess.stdout_token, Ready::readable(), PollOpt::level()) { Err(e) => return (Vec::<u8>::new(), Vec::<u8>::new(), Err(e)), Ok(_) =>{}, }, None => {}, } match subprocess.stderr { Some(ref sub_stderr) => match poll.register(sub_stderr, subprocess.stderr_token, Ready::readable(), PollOpt::level()) { Err(e) => return (Vec::<u8>::new(), Vec::<u8>::new(), Err(e)), Ok(_) => {}, }, None => {}, } match subprocess.stdin { Some (ref sub_stdin) => match poll.register(sub_stdin, subprocess.stdin_token, Ready::writable(), PollOpt::level()) { Err(e) => return (Vec::<u8>::new(), Vec::<u8>::new(), Err(e)), Ok(_) => {}, }, None => {}, } let mut events = Events::with_capacity(1024); while !subprocess.child_shutdown { poll.poll(&mut events, None).unwrap(); for event in events.iter() { subprocess.ready(&mut poll, event.token(), event.kind()) } } let ret_stdout = mem::replace(&mut subprocess.output, Vec::<u8>::new()); let ret_stderr = mem::replace(&mut subprocess.output_stderr, Vec::<u8>::new()); return (ret_stdout, ret_stderr, Ok(())); } #[allow(dead_code)] const TEST_DATA : [u8; 1024 * 4096] = [42; 1024 * 4096]; #[test] fn test_subprocess_pipe() { let mut process = Command::new("/bin/cat") .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::piped()) .spawn().unwrap(); let (ret_stdout, ret_stderr, err) = subprocess_communicate(&mut process, &TEST_DATA[..], None, None, true); process.wait().unwrap(); err.unwrap(); assert_eq!(TEST_DATA.len(), ret_stdout.len()); assert_eq!(0usize, ret_stderr.len()); let mut i : usize = 0; for item in TEST_DATA.iter() { assert_eq!(*item, ret_stdout[i]); i += 1; } } #[test] fn test_subprocess_bounded_pipe() { let mut process = Command::new("/bin/cat") .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::piped()) .spawn().unwrap(); let (ret_stdout, ret_stderr, err) = subprocess_communicate(&mut process, &TEST_DATA[..], Some(TEST_DATA.len() - 1), None, true); process.wait().unwrap(); err.unwrap(); assert_eq!(TEST_DATA.len() - 1, ret_stdout.len()); assert_eq!(0usize, ret_stderr.len()); let mut i : usize = 0; for item in TEST_DATA[0..TEST_DATA.len() - 1].iter() { assert_eq!(*item, ret_stdout[i]); i += 1; } } #[test] fn test_subprocess_bounded_yes_stderr0() { let mut process = Command::new("/usr/bin/yes") .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::piped()) .spawn().unwrap(); let bound : usize = 130000; let (ret_stdout, ret_stderr, err) = subprocess_communicate(&mut process, &TEST_DATA[..], Some(bound), Some(0), false); err.unwrap(); assert_eq!(bound, ret_stdout.len()); assert_eq!(0usize, ret_stderr.len()); let mut i : usize = 0; for item in ret_stdout.iter() { let val : u8; if (i & 1) == 1 { val = '\n' as u8; } else { val = 'y' as u8; } assert_eq!(*item, val); i += 1; } } #[test] fn test_subprocess_bounded_yes() { let mut process = Command::new("/usr/bin/yes") .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::piped()) .spawn().unwrap(); let bound : usize = 130000; let (ret_stdout, ret_stderr, err) = subprocess_communicate(&mut process, &TEST_DATA[..], Some(bound), Some(bound), true); err.unwrap(); assert_eq!(bound, ret_stdout.len()); assert_eq!(0usize, ret_stderr.len()); let mut i : usize = 0; for item in ret_stdout.iter() { let val : u8; if (i & 1) == 1 { val = '\n' as u8; } else { val = 'y' as u8; } assert_eq!(*item, val); i += 1; } } #[test] fn test_subprocess_bounded_yes_no_stderr() { let mut process = Command::new("/usr/bin/yes") .stdin(Stdio::piped()) .stdout(Stdio::piped()) .spawn().unwrap(); let bound : usize = 130000; let (ret_stdout, ret_stderr, err) = subprocess_communicate(&mut process, &TEST_DATA[..], Some(bound), None, false); err.unwrap(); assert_eq!(bound, ret_stdout.len()); assert_eq!(0usize, ret_stderr.len()); let mut i : usize = 0; for item in ret_stdout.iter() { let val : u8; if (i & 1) == 1 { val = '\n' as u8; } else { val = 'y' as u8; } assert_eq!(*item, val); i += 1; } }
#![cfg(unix)] extern crate mio; use std::mem; use mio::*; use std::io; use std::process; use std::cmp; use mio::deprecated::{TryRead, TryWrite}; use mio::deprecated::{PipeReader, PipeWriter}; #[allow(unused_imports)] use std::process::{Command, Stdio, Child}; struct SubprocessClient { stdin: Option<PipeWriter>, stdout: Option<PipeReader>, stderr: Option<PipeReader>, stdin_token : Token, stdout_token : Token, stderr_token : Token, output : Vec<u8>, output_stderr : Vec<u8>, input : Vec<u8>, input_offset : usize, buf : [u8; 65536], stdout_bound : Option<usize>, stderr_bound : Option<usize>, return_on_stdout_fill : bool, has_shutdown : bool, child_shutdown : bool, } impl SubprocessClient { fn new(stdin: Option<PipeWriter>, stdout : Option<PipeReader>, stderr : Option<PipeReader>, data : &[u8], stdout_bound : Option<usize>, stderr_bound : Option<usize>, return_on_stdout_fill : bool) -> SubprocessClient { SubprocessClient { stdin: stdin, stdout: stdout, stderr: stderr, stdin_token : Token(0), stdout_token : Token(1), stderr_token : Token(2), output : Vec::<u8>::new(), output_stderr : Vec::<u8>::new(), buf : [0; 65536], input : data.to_vec(), input_offset : 0, stdout_bound : stdout_bound, stderr_bound : stderr_bound, return_on_stdout_fill : return_on_stdout_fill, has_shutdown : false, child_shutdown : false, } } fn readable(&mut self, poll: &mut Poll) -> io::Result<()> { if self.has_shutdown { return Ok(()); } let mut eof = false; let mut buf_bound : usize = cmp::min(self.stdout_bound.unwrap_or(self.buf.len()), self.buf.len()); if buf_bound == 0 { buf_bound = self.buf.len(); } match self.stdout { None => unreachable!(), Some (ref mut stdout) => match stdout.try_read(&mut self.buf[..buf_bound]) { Ok(Some(r)) => { if r == 0 { eof = true; } else { let do_extend : bool; match self.stdout_bound { None => do_extend = true, Some(ref mut bound) => { if *bound >= r { *bound = *bound - r; do_extend = true; } else { *bound = 0; do_extend = false; if self.return_on_stdout_fill || self.stderr.is_none() || self.stderr_bound.unwrap_or(1) == 0 { match self.stderr { Some(ref sub_stderr) => match poll.deregister(sub_stderr){ Err(e) => return Err(e), _ => {}, }, _ => {}, } drop(self.stderr.take()); eof = true; } } }, } if do_extend { self.output.extend(&self.buf[0..r]); } } }, Ok(None) => {}, Err(e) => { return Err(e); } } }; if eof { match self.stdout { Some(ref sub_stdout) => match poll.deregister(sub_stdout) { Err(e) => return Err(e), _ => {}, }, _ => {}, } drop(self.stdout.take()); if self.stderr.is_none() { self.has_shutdown = true; self.child_shutdown = true; } } return Ok(()); } fn readable_stderr(&mut self, poll: &mut Poll) -> io::Result<()> { if self.has_shutdown { return Ok(()); } let mut eof = false; let mut buf_bound : usize = cmp::min(self.stderr_bound.unwrap_or(self.buf.len()), self.buf.len()); if buf_bound == 0 { buf_bound = self.buf.len(); } match self.stderr { None => unreachable!(), Some(ref mut stderr) => match stderr.try_read(&mut self.buf[..buf_bound]) { Ok(None) => { } Ok(Some(r)) => { if r == 0 { eof = true; } else { let do_extend : bool; match self.stderr_bound { None => do_extend = true, Some(ref mut bound) => { if *bound >= r { *bound = *bound - r; do_extend = true; }
fn writable(&mut self, poll: &mut Poll) -> io::Result<()> { if self.has_shutdown { return Ok(()); } let mut ok = true; match self.stdin { None => unreachable!(), Some(ref mut stdin) => match stdin.try_write(&(&self.input)[self.input_offset..]) { Ok(None) => { }, Ok(Some(r)) => { if r == 0 { ok = false; } else { self.input_offset += r; } }, Err(_e) => { ok = false; }, } } if self.input_offset == self.input.len() || !ok { match self.stdin { Some(ref sub_stdin) => match poll.deregister(sub_stdin) { Err(e) => return Err(e), _ => {}, }, _ => {}, } drop(self.stdin.take()); match self.stderr { None => match self.stdout { None => { self.has_shutdown = true; self.child_shutdown = true }, Some(_) => {}, }, Some(_) => {}, } } return Ok(()); } fn ready(&mut self, poll: &mut Poll, token: Token, _events: Ready) { if token == self.stderr_token { let _x = self.readable_stderr(poll); } else { let _x = self.readable(poll); } if token == self.stdin_token { let _y = self.writable(poll); } } } pub fn from_stdin(mut stdin: Option<process::ChildStdin>) -> io::Result<Option<PipeWriter> > { match stdin { None => return Ok(None), Some(_) => {}, } Ok(Some(PipeWriter::from_stdin(stdin.take().unwrap()).unwrap())) } pub fn from_stdout(mut stdout: Option<process::ChildStdout>) -> io::Result<Option<PipeReader> > { match stdout { None => return Ok(None), Some(_) => {}, } Ok(Some(PipeReader::from_stdout(stdout.take().unwrap()).unwrap())) } pub fn from_stderr(mut stderr: Option<process::ChildStderr>) -> io::Result<Option<PipeReader> > { match stderr { None => return Ok(None), Some(_) => {}, } Ok(Some(PipeReader::from_stderr(stderr.take().unwrap()).unwrap())) } pub fn subprocess_communicate(process : &mut Child, input : &[u8], stdout_bound : Option<usize>, stderr_bound : Option<usize>, return_on_stdout_fill : bool) -> (Vec<u8>, Vec<u8>, io::Result<()>) { let stdin : Option<PipeWriter>; match from_stdin(process.stdin.take()) { Err(e) => return (Vec::<u8>::new(), Vec::<u8>::new(), Err(e)), Ok(pipe) => stdin = pipe, } let stdout : Option<PipeReader>; match from_stdout(process.stdout.take()) { Err(e) => return (Vec::<u8>::new(), Vec::<u8>::new(), Err(e)), Ok(pipe) => stdout = pipe, } let stderr : Option<PipeReader>; match from_stderr(process.stderr.take()) { Err(e) => return (Vec::<u8>::new(), Vec::<u8>::new(), Err(e)), Ok(pipe) => stderr = pipe, } let mut subprocess = SubprocessClient::new(stdin, stdout, stderr, input, stdout_bound, stderr_bound, return_on_stdout_fill); let mut poll = Poll::new().unwrap(); match subprocess.stdout { Some(ref sub_stdout) => match poll.register(sub_stdout, subprocess.stdout_token, Ready::readable(), PollOpt::level()) { Err(e) => return (Vec::<u8>::new(), Vec::<u8>::new(), Err(e)), Ok(_) =>{}, }, None => {}, } match subprocess.stderr { Some(ref sub_stderr) => match poll.register(sub_stderr, subprocess.stderr_token, Ready::readable(), PollOpt::level()) { Err(e) => return (Vec::<u8>::new(), Vec::<u8>::new(), Err(e)), Ok(_) => {}, }, None => {}, } match subprocess.stdin { Some (ref sub_stdin) => match poll.register(sub_stdin, subprocess.stdin_token, Ready::writable(), PollOpt::level()) { Err(e) => return (Vec::<u8>::new(), Vec::<u8>::new(), Err(e)), Ok(_) => {}, }, None => {}, } let mut events = Events::with_capacity(1024); while !subprocess.child_shutdown { poll.poll(&mut events, None).unwrap(); for event in events.iter() { subprocess.ready(&mut poll, event.token(), event.kind()) } } let ret_stdout = mem::replace(&mut subprocess.output, Vec::<u8>::new()); let ret_stderr = mem::replace(&mut subprocess.output_stderr, Vec::<u8>::new()); return (ret_stdout, ret_stderr, Ok(())); } #[allow(dead_code)] const TEST_DATA : [u8; 1024 * 4096] = [42; 1024 * 4096]; #[test] fn test_subprocess_pipe() { let mut process = Command::new("/bin/cat") .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::piped()) .spawn().unwrap(); let (ret_stdout, ret_stderr, err) = subprocess_communicate(&mut process, &TEST_DATA[..], None, None, true); process.wait().unwrap(); err.unwrap(); assert_eq!(TEST_DATA.len(), ret_stdout.len()); assert_eq!(0usize, ret_stderr.len()); let mut i : usize = 0; for item in TEST_DATA.iter() { assert_eq!(*item, ret_stdout[i]); i += 1; } } #[test] fn test_subprocess_bounded_pipe() { let mut process = Command::new("/bin/cat") .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::piped()) .spawn().unwrap(); let (ret_stdout, ret_stderr, err) = subprocess_communicate(&mut process, &TEST_DATA[..], Some(TEST_DATA.len() - 1), None, true); process.wait().unwrap(); err.unwrap(); assert_eq!(TEST_DATA.len() - 1, ret_stdout.len()); assert_eq!(0usize, ret_stderr.len()); let mut i : usize = 0; for item in TEST_DATA[0..TEST_DATA.len() - 1].iter() { assert_eq!(*item, ret_stdout[i]); i += 1; } } #[test] fn test_subprocess_bounded_yes_stderr0() { let mut process = Command::new("/usr/bin/yes") .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::piped()) .spawn().unwrap(); let bound : usize = 130000; let (ret_stdout, ret_stderr, err) = subprocess_communicate(&mut process, &TEST_DATA[..], Some(bound), Some(0), false); err.unwrap(); assert_eq!(bound, ret_stdout.len()); assert_eq!(0usize, ret_stderr.len()); let mut i : usize = 0; for item in ret_stdout.iter() { let val : u8; if (i & 1) == 1 { val = '\n' as u8; } else { val = 'y' as u8; } assert_eq!(*item, val); i += 1; } } #[test] fn test_subprocess_bounded_yes() { let mut process = Command::new("/usr/bin/yes") .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::piped()) .spawn().unwrap(); let bound : usize = 130000; let (ret_stdout, ret_stderr, err) = subprocess_communicate(&mut process, &TEST_DATA[..], Some(bound), Some(bound), true); err.unwrap(); assert_eq!(bound, ret_stdout.len()); assert_eq!(0usize, ret_stderr.len()); let mut i : usize = 0; for item in ret_stdout.iter() { let val : u8; if (i & 1) == 1 { val = '\n' as u8; } else { val = 'y' as u8; } assert_eq!(*item, val); i += 1; } } #[test] fn test_subprocess_bounded_yes_no_stderr() { let mut process = Command::new("/usr/bin/yes") .stdin(Stdio::piped()) .stdout(Stdio::piped()) .spawn().unwrap(); let bound : usize = 130000; let (ret_stdout, ret_stderr, err) = subprocess_communicate(&mut process, &TEST_DATA[..], Some(bound), None, false); err.unwrap(); assert_eq!(bound, ret_stdout.len()); assert_eq!(0usize, ret_stderr.len()); let mut i : usize = 0; for item in ret_stdout.iter() { let val : u8; if (i & 1) == 1 { val = '\n' as u8; } else { val = 'y' as u8; } assert_eq!(*item, val); i += 1; } }
else { *bound = 0; do_extend = false; if self.stdout.is_none() || self.stdout_bound.unwrap_or(1) == 0 { match self.stdout { Some(ref sub_stdout) => match poll.deregister(sub_stdout){ Err(e) => return Err(e), _ => {}, }, _ => {}, } drop(self.stdout.take()); eof = true; } } }, } if do_extend { self.output_stderr.extend(&self.buf[0..r]); } } } Err(e) => { return Err(e); } } }; if eof { match self.stderr { Some(ref sub_stderr) => match poll.deregister(sub_stderr){ Err(e) => return Err(e), _ => {}, }, _ => {}, } drop(self.stderr.take()); if self.stdout.is_none() { self.has_shutdown = true; self.child_shutdown = true; } } return Ok(()); }
function_block-function_prefix_line
[ { "content": "#subprocess-communicate\n\n[![crates.io](http://meritbadge.herokuapp.com/subprocess-communicate)](https://crates.io/crates/subprocess-communicate)\n\n[![Build Status](https://travis-ci.org/dropbox/rust-subprocess-communicate.svg?branch=master)](https://travis-ci.org/dropbox/rust-subprocess-communi...
Rust
liblumen_alloc/src/erts/process/alloc.rs
mlwilkerson/lumen
048df6c0840c11496e2d15aa9af2e4a8d07a6e0f
mod heap; mod iter; mod process_heap_alloc; mod semispace; mod stack_alloc; mod stack_primitives; mod term_alloc; mod virtual_alloc; mod virtual_binary_heap; pub use self::heap::{Heap, HeapAlloc}; pub use self::iter::HeapIter; pub use self::process_heap_alloc::ProcessHeapAlloc; pub use self::semispace::{GenerationalHeap, SemispaceHeap}; pub use self::stack_alloc::StackAlloc; pub use self::stack_primitives::StackPrimitives; pub use self::term_alloc::TermAlloc; pub use self::virtual_alloc::{VirtualAlloc, VirtualAllocator, VirtualHeap}; pub use self::virtual_binary_heap::VirtualBinaryHeap; use core::alloc::{AllocErr, Layout}; use core::ffi::c_void; use core::mem::transmute; use core::ptr; use lazy_static::lazy_static; use liblumen_core::sys::dynamic_call::DynamicCallee; use crate::erts::exception::AllocResult; use crate::erts::term::prelude::Term; use super::Frame; pub const DEFAULT_STACK_SIZE: usize = 1; pub const STACK_ALIGNMENT: usize = 16; lazy_static! { static ref PROC_ALLOC: ProcessHeapAlloc = ProcessHeapAlloc::new(); } pub struct Stack { pub base: *mut u8, pub top: *mut u8, pub size: usize, pub end: *mut u8, } impl Stack { fn new(base: *mut u8, pages: usize) -> Self { use liblumen_core::alloc::utils::align_up_to; use liblumen_core::sys::sysconf; let page_size = sysconf::pagesize(); let size = (pages + 1) * page_size; let bottom = unsafe { base.offset(page_size as isize) }; let with_red_zone = unsafe { bottom.offset(128) }; let end = align_up_to(with_red_zone, STACK_ALIGNMENT); let top = unsafe { base.offset(size as isize) }; Self { base, top, size, end, } } pub unsafe fn push_frame(&mut self, frame: &Frame) { let symbol = frame.native().ptr(); let dynamic_callee = transmute::<*const c_void, DynamicCallee>(symbol); self.push64(dynamic_callee as u64) } pub unsafe fn push64(&mut self, value: u64) { let mut top64 = self.top as *mut u64; top64 = top64.offset(-1); top64.write(value); self.top = top64 as *mut u8; } #[inline] pub fn limit(&self) -> *mut u8 { self.end } #[inline] pub fn is_guard_page<T>(&self, addr: *mut T) -> bool { use liblumen_core::util::pointer::in_area_inclusive; in_area_inclusive(addr, self.base, self.end) } } impl Default for Stack { fn default() -> Self { Self { base: ptr::null_mut(), top: ptr::null_mut(), size: 0, end: ptr::null_mut(), } } } unsafe impl Sync for Stack {} impl Drop for Stack { fn drop(&mut self) { use liblumen_core::alloc::mmap; use liblumen_core::sys::sysconf; if self.base.is_null() { return; } let page_size = sysconf::pagesize(); let pages = (self.size / page_size) - 1; let (layout, _offset) = Layout::from_size_align(page_size, page_size) .unwrap() .repeat(pages) .unwrap(); unsafe { mmap::unmap(self.base, layout); } } } #[inline] pub fn default_heap() -> AllocResult<(*mut Term, usize)> { let size = default_heap_size(); PROC_ALLOC.alloc(size).map(|ptr| (ptr, size)) } pub fn default_heap_size() -> usize { ProcessHeapAlloc::HEAP_SIZES[ProcessHeapAlloc::MIN_HEAP_SIZE_INDEX] } #[inline] pub fn heap(size: usize) -> AllocResult<*mut Term> { PROC_ALLOC.alloc(size) } #[inline] pub fn stack(num_pages: usize) -> AllocResult<Stack> { use liblumen_core::alloc::mmap; debug_assert!(num_pages > 0, "stack size in pages must be greater than 0"); let ptr = unsafe { mmap::map_stack(num_pages)? }; Ok(Stack::new(ptr.as_ptr(), num_pages)) } #[inline] pub unsafe fn realloc( heap: *mut Term, size: usize, new_size: usize, ) -> Result<*mut Term, AllocErr> { PROC_ALLOC.realloc_in_place(heap, size, new_size) } #[inline] pub unsafe fn free(heap: *mut Term, size: usize) { PROC_ALLOC.dealloc(heap, size) } #[inline] pub fn next_heap_size(size: usize) -> usize { ProcessHeapAlloc::next_heap_size(size) }
mod heap; mod iter; mod process_heap_alloc; mod semispace; mod stack_alloc; mod stack_primitives; mod term_alloc; mod virtual_alloc; mod virtual_binary_heap; pub use self::heap::{Heap, HeapAlloc}; pub use self::iter::HeapIter; pub use self::process_heap_alloc::ProcessHeapAlloc; pub use s
b unsafe fn realloc( heap: *mut Term, size: usize, new_size: usize, ) -> Result<*mut Term, AllocErr> { PROC_ALLOC.realloc_in_place(heap, size, new_size) } #[inline] pub unsafe fn free(heap: *mut Term, size: usize) { PROC_ALLOC.dealloc(heap, size) } #[inline] pub fn next_heap_size(size: usize) -> usize { ProcessHeapAlloc::next_heap_size(size) }
elf::semispace::{GenerationalHeap, SemispaceHeap}; pub use self::stack_alloc::StackAlloc; pub use self::stack_primitives::StackPrimitives; pub use self::term_alloc::TermAlloc; pub use self::virtual_alloc::{VirtualAlloc, VirtualAllocator, VirtualHeap}; pub use self::virtual_binary_heap::VirtualBinaryHeap; use core::alloc::{AllocErr, Layout}; use core::ffi::c_void; use core::mem::transmute; use core::ptr; use lazy_static::lazy_static; use liblumen_core::sys::dynamic_call::DynamicCallee; use crate::erts::exception::AllocResult; use crate::erts::term::prelude::Term; use super::Frame; pub const DEFAULT_STACK_SIZE: usize = 1; pub const STACK_ALIGNMENT: usize = 16; lazy_static! { static ref PROC_ALLOC: ProcessHeapAlloc = ProcessHeapAlloc::new(); } pub struct Stack { pub base: *mut u8, pub top: *mut u8, pub size: usize, pub end: *mut u8, } impl Stack { fn new(base: *mut u8, pages: usize) -> Self { use liblumen_core::alloc::utils::align_up_to; use liblumen_core::sys::sysconf; let page_size = sysconf::pagesize(); let size = (pages + 1) * page_size; let bottom = unsafe { base.offset(page_size as isize) }; let with_red_zone = unsafe { bottom.offset(128) }; let end = align_up_to(with_red_zone, STACK_ALIGNMENT); let top = unsafe { base.offset(size as isize) }; Self { base, top, size, end, } } pub unsafe fn push_frame(&mut self, frame: &Frame) { let symbol = frame.native().ptr(); let dynamic_callee = transmute::<*const c_void, DynamicCallee>(symbol); self.push64(dynamic_callee as u64) } pub unsafe fn push64(&mut self, value: u64) { let mut top64 = self.top as *mut u64; top64 = top64.offset(-1); top64.write(value); self.top = top64 as *mut u8; } #[inline] pub fn limit(&self) -> *mut u8 { self.end } #[inline] pub fn is_guard_page<T>(&self, addr: *mut T) -> bool { use liblumen_core::util::pointer::in_area_inclusive; in_area_inclusive(addr, self.base, self.end) } } impl Default for Stack { fn default() -> Self { Self { base: ptr::null_mut(), top: ptr::null_mut(), size: 0, end: ptr::null_mut(), } } } unsafe impl Sync for Stack {} impl Drop for Stack { fn drop(&mut self) { use liblumen_core::alloc::mmap; use liblumen_core::sys::sysconf; if self.base.is_null() { return; } let page_size = sysconf::pagesize(); let pages = (self.size / page_size) - 1; let (layout, _offset) = Layout::from_size_align(page_size, page_size) .unwrap() .repeat(pages) .unwrap(); unsafe { mmap::unmap(self.base, layout); } } } #[inline] pub fn default_heap() -> AllocResult<(*mut Term, usize)> { let size = default_heap_size(); PROC_ALLOC.alloc(size).map(|ptr| (ptr, size)) } pub fn default_heap_size() -> usize { ProcessHeapAlloc::HEAP_SIZES[ProcessHeapAlloc::MIN_HEAP_SIZE_INDEX] } #[inline] pub fn heap(size: usize) -> AllocResult<*mut Term> { PROC_ALLOC.alloc(size) } #[inline] pub fn stack(num_pages: usize) -> AllocResult<Stack> { use liblumen_core::alloc::mmap; debug_assert!(num_pages > 0, "stack size in pages must be greater than 0"); let ptr = unsafe { mmap::map_stack(num_pages)? }; Ok(Stack::new(ptr.as_ptr(), num_pages)) } #[inline] pu
random
[]
Rust
connectorx-python/src/pandas/types.rs
ritchie46/connector-x
89c61beb1c2d782ca07445124caa1ca3db3608df
use chrono::{DateTime, Utc}; use connectorx::errors::{ConnectorAgentError, Result}; use connectorx::impl_typesystem; use fehler::throws; #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] pub enum PandasTypeSystem { F64(bool), I64(bool), Bool(bool), Char(bool), Str(bool), BoxStr(bool), String(bool), Bytes(bool), DateTime(bool), } impl_typesystem! { system = PandasTypeSystem, mappings = { { F64 => f64 } { I64 => i64 } { Bool => bool } { Char => char } { Str => &'r str } { BoxStr => Box<str> } { String => String } { Bytes => Vec<u8> } { DateTime => DateTime<Utc> } } } pub trait PandasDType: Sized { fn dtype(&self) -> &'static str; fn npdtype(&self) -> &'static str; fn parse(ty: &str) -> Result<Self>; fn is_extension(&self) -> bool; fn block_name(&self) -> &'static str; } impl PandasDType for PandasTypeSystem { fn dtype(&self) -> &'static str { match *self { PandasTypeSystem::I64(false) => "int64", PandasTypeSystem::I64(true) => "Int64", PandasTypeSystem::F64(_) => "float64", PandasTypeSystem::Bool(false) => "bool", PandasTypeSystem::Bool(true) => "boolean", PandasTypeSystem::Char(_) => "object", PandasTypeSystem::Str(_) => "object", PandasTypeSystem::BoxStr(_) => "object", PandasTypeSystem::String(_) => "object", PandasTypeSystem::Bytes(_) => "object", PandasTypeSystem::DateTime(_) => "datetime64[ns]", } } fn npdtype(&self) -> &'static str { match *self { PandasTypeSystem::I64(_) => "i8", PandasTypeSystem::F64(_) => "f8", PandasTypeSystem::Bool(_) => "b1", PandasTypeSystem::Char(_) => "O", PandasTypeSystem::Str(_) => "O", PandasTypeSystem::BoxStr(_) => "O", PandasTypeSystem::String(_) => "O", PandasTypeSystem::Bytes(_) => "O", PandasTypeSystem::DateTime(_) => "M8[ns]", } } #[throws(ConnectorAgentError)] fn parse(ty: &str) -> Self { match ty { "int64" => PandasTypeSystem::I64(false), "Int64" => PandasTypeSystem::I64(true), "float64" => PandasTypeSystem::F64(true), "bool" => PandasTypeSystem::Bool(false), "boolean" => PandasTypeSystem::Bool(true), "object" => PandasTypeSystem::String(true), "datetime" => PandasTypeSystem::DateTime(true), ty => unimplemented!("{}", ty), } } fn is_extension(&self) -> bool { match *self { PandasTypeSystem::I64(false) => false, PandasTypeSystem::I64(true) => true, PandasTypeSystem::F64(_) => false, PandasTypeSystem::Bool(false) => false, PandasTypeSystem::Bool(true) => true, PandasTypeSystem::Char(_) => false, PandasTypeSystem::Str(_) => false, PandasTypeSystem::BoxStr(_) => false, PandasTypeSystem::String(_) => false, PandasTypeSystem::Bytes(_) => false, PandasTypeSystem::DateTime(_) => false, } } fn block_name(&self) -> &'static str { match *self { PandasTypeSystem::I64(false) => "IntBlock", PandasTypeSystem::I64(true) => "ExtensionBlock", PandasTypeSystem::F64(_) => "FloatBlock", PandasTypeSystem::Bool(false) => "BoolBlock", PandasTypeSystem::Bool(true) => "ExtensionBlock", PandasTypeSystem::Char(_) => "ObjectBlock", PandasTypeSystem::Str(_) => "ObjectBlock", PandasTypeSystem::BoxStr(_) => "ObjectBlock", PandasTypeSystem::String(_) => "ObjectBlock", PandasTypeSystem::Bytes(_) => "ObjectBlock", PandasTypeSystem::DateTime(_) => "DatetimeBlock", } } }
use chrono::{DateTime, Utc}; use connectorx::errors::{ConnectorAgentError, Result}; use connectorx::impl_typesystem; use fehler::throws; #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] pub enum PandasTypeSystem { F64(bool), I64(bool), Bool(bool), Char(bool), Str(bool), BoxStr(bool), String(bool), Bytes(bool), DateTime(bool), } impl_typesystem! { system = PandasTypeSystem, mappings = { { F64 => f64 } { I64 => i64 } { Bool => bool } { Char => char } { Str => &'r str } { BoxStr => Box<str> } { String => String } { Bytes => Vec<u8> } { DateTime => DateTime<Utc> } } } pub trait PandasDType: Sized { fn dtype(&self) -> &'static str; fn npdtype(&self) -> &'static str; fn parse(ty: &str) -> Result<Self>; fn is_extension(&self) -> bool; fn block_name(&self) -> &'static str; } impl PandasDType for PandasTypeSystem { fn dtype(&self) -> &'static str {
} fn npdtype(&self) -> &'static str { match *self { PandasTypeSystem::I64(_) => "i8", PandasTypeSystem::F64(_) => "f8", PandasTypeSystem::Bool(_) => "b1", PandasTypeSystem::Char(_) => "O", PandasTypeSystem::Str(_) => "O", PandasTypeSystem::BoxStr(_) => "O", PandasTypeSystem::String(_) => "O", PandasTypeSystem::Bytes(_) => "O", PandasTypeSystem::DateTime(_) => "M8[ns]", } } #[throws(ConnectorAgentError)] fn parse(ty: &str) -> Self { match ty { "int64" => PandasTypeSystem::I64(false), "Int64" => PandasTypeSystem::I64(true), "float64" => PandasTypeSystem::F64(true), "bool" => PandasTypeSystem::Bool(false), "boolean" => PandasTypeSystem::Bool(true), "object" => PandasTypeSystem::String(true), "datetime" => PandasTypeSystem::DateTime(true), ty => unimplemented!("{}", ty), } } fn is_extension(&self) -> bool { match *self { PandasTypeSystem::I64(false) => false, PandasTypeSystem::I64(true) => true, PandasTypeSystem::F64(_) => false, PandasTypeSystem::Bool(false) => false, PandasTypeSystem::Bool(true) => true, PandasTypeSystem::Char(_) => false, PandasTypeSystem::Str(_) => false, PandasTypeSystem::BoxStr(_) => false, PandasTypeSystem::String(_) => false, PandasTypeSystem::Bytes(_) => false, PandasTypeSystem::DateTime(_) => false, } } fn block_name(&self) -> &'static str { match *self { PandasTypeSystem::I64(false) => "IntBlock", PandasTypeSystem::I64(true) => "ExtensionBlock", PandasTypeSystem::F64(_) => "FloatBlock", PandasTypeSystem::Bool(false) => "BoolBlock", PandasTypeSystem::Bool(true) => "ExtensionBlock", PandasTypeSystem::Char(_) => "ObjectBlock", PandasTypeSystem::Str(_) => "ObjectBlock", PandasTypeSystem::BoxStr(_) => "ObjectBlock", PandasTypeSystem::String(_) => "ObjectBlock", PandasTypeSystem::Bytes(_) => "ObjectBlock", PandasTypeSystem::DateTime(_) => "DatetimeBlock", } } }
match *self { PandasTypeSystem::I64(false) => "int64", PandasTypeSystem::I64(true) => "Int64", PandasTypeSystem::F64(_) => "float64", PandasTypeSystem::Bool(false) => "bool", PandasTypeSystem::Bool(true) => "boolean", PandasTypeSystem::Char(_) => "object", PandasTypeSystem::Str(_) => "object", PandasTypeSystem::BoxStr(_) => "object", PandasTypeSystem::String(_) => "object", PandasTypeSystem::Bytes(_) => "object", PandasTypeSystem::DateTime(_) => "datetime64[ns]", }
if_condition
[ { "content": "/// `TypeSystem` describes all the types a source or destination support\n\n/// using enum variants.\n\n/// The variant can be used to type check with a static type `T` through the `check` method.\n\npub trait TypeSystem: Copy + Clone + Send + Sync {\n\n /// Check whether T is the same type as ...
Rust
src/config.rs
theotherjimmy/lanta
2579205c210b9bc58d0636e1af3538f433eff736
use std::collections::HashMap; use std::fs::File; use std::os::raw::c_uint; use std::path::PathBuf; use std::str::FromStr; use log::warn; use miette::IntoDiagnostic; use serde::de; use serde::{Deserialize, Deserializer}; use crate::keysym::*; use crate::layout::*; use crate::{ self as lanta, cmd::Command, Borders, KeyHandlers, ModKey, WindowId, }; #[derive(Hash, PartialEq, Eq, Debug)] struct KeyInner { mods: Vec<ModKey>, key: c_uint, } impl FromStr for KeyInner { type Err = String; fn from_str(frm: &str) -> Result<Self, String> { let mut iter = frm.rsplit("-"); let key = match iter.next().ok_or(String::from("no key found"))? { "a" => XK_a, "b" => XK_b, "c" => XK_c, "d" => XK_d, "e" => XK_e, "f" => XK_f, "g" => XK_g, "h" => XK_h, "i" => XK_i, "j" => XK_j, "k" => XK_k, "l" => XK_l, "m" => XK_m, "n" => XK_n, "o" => XK_o, "p" => XK_p, "q" => XK_q, "r" => XK_r, "s" => XK_s, "t" => XK_t, "u" => XK_u, "v" => XK_v, "w" => XK_w, "x" => XK_x, "y" => XK_y, "z" => XK_z, "space" => XK_space, "enter" => XK_Return, "tab" => XK_Tab, "down" => XK_Down, "up" => XK_Up, "right" => XK_Right, "left" => XK_Left, "print" => XK_Print, a => Err(format!("Could not match key {}", a))?, }; let mods = iter .map(|mod_key| match mod_key { "C" => Ok(ModKey::Control), "M" => Ok(ModKey::Mod1), "S" => Ok(ModKey::Shift), "H" => Ok(ModKey::Mod4), a => Err(format!("Did not understand modifier {}", a)), }) .collect::<Result<Vec<_>, String>>()?; Ok(KeyInner { mods, key }) } } impl<'de> Deserialize<'de> for KeyInner { fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error> where D: Deserializer<'de>, { let s = String::deserialize(deserializer)?; s.parse().map_err(de::Error::custom) } } #[derive(Deserialize, Debug)] #[serde(tag = "type")] enum LayoutSelectInner { ThreeColumn { #[serde(default)] padding: u32, }, Stack { #[serde(default)] padding: u32, }, } #[derive(Deserialize, Debug)] struct LayoutSelect { name: String, #[serde(flatten)] layout: LayoutSelectInner, } impl Into<Box<dyn Layout<WindowId>>> for LayoutSelect { fn into(self) -> Box<dyn Layout<WindowId>> { match self.layout { LayoutSelectInner::ThreeColumn { padding } => { Box::new(ThreeColumn::new(self.name, padding, 40)) } LayoutSelectInner::Stack { padding } => { Box::new(StackLayout::new(self.name, padding)) } } } } #[derive(Deserialize, Debug)] struct Config { keys: HashMap<KeyInner, Command>, layouts: Vec<LayoutSelect>, borders: Borders, } pub fn load_config_yaml(config_path: PathBuf) -> miette::Result<lanta::Config> { let config_file = File::open(config_path).into_diagnostic()?; let Config { keys, layouts, borders, } = serde_yaml::from_reader(config_file).into_diagnostic()?; let keys: KeyHandlers = keys .into_iter() .map(|(k, v)| (k.mods, k.key, v.into())) .collect(); let layouts: Vec<_> = layouts.into_iter().map(|l| l.into()).collect(); Ok(lanta::Config { keys, layouts, borders, }) } pub fn load_state(state_path: PathBuf) -> std::io::Result<lanta::State> { let state_file = File::open(state_path)?; Ok(match serde_yaml::from_reader(state_file) { Ok(state) => state, Err(e) => { warn!("Could not deserialize state: {}", e); Default::default() } }) }
use std::collections::HashMap; use std::fs::File; use std::os::raw::c_uint; use std::path::PathBuf; use std::str::FromStr; use log::warn; use miette::IntoDiagnostic; use serde::de; use serde::{Deserialize, Deserializer}; use crate::keysym::*; use crate::layout::*; use crate::{ self as lanta, cmd::Command, Borders, KeyHandlers, ModKey, WindowId, }; #[derive(Hash, PartialEq, Eq, Debug)] struct KeyInner { mods: Vec<ModKey>, key: c_uint, } impl FromStr for KeyInner { type Err = String; fn from_str(frm: &str) -> Result<Self, String> { let mut iter = frm.rsplit("-"); let key = match iter.next().ok_or(String::from("no key found"))? { "a" => XK_a, "b" => XK_b, "c" => XK_c, "d" => XK_d, "e" => XK_e, "f" => XK_f, "g" => XK_g, "h" => XK_h, "i" => XK_i, "j" => XK_j, "k" => XK_k, "l" => XK_l, "m" => XK_m, "n" => XK_n, "o" => XK_o, "p" => XK_p, "q" => XK_q, "r" => XK_r, "s" => XK_s, "t" => XK_t, "u" => XK_u, "v" => XK_v, "w" => XK_w, "x" => XK_x, "y" => XK_y, "z" => XK_z, "space" => XK_space, "enter" => XK_Return, "tab" => XK_Tab, "down" => XK_Down, "up" => XK_Up, "right" => XK_Right, "left" => XK_Left,
, Command>, layouts: Vec<LayoutSelect>, borders: Borders, } pub fn load_config_yaml(config_path: PathBuf) -> miette::Result<lanta::Config> { let config_file = File::open(config_path).into_diagnostic()?; let Config { keys, layouts, borders, } = serde_yaml::from_reader(config_file).into_diagnostic()?; let keys: KeyHandlers = keys .into_iter() .map(|(k, v)| (k.mods, k.key, v.into())) .collect(); let layouts: Vec<_> = layouts.into_iter().map(|l| l.into()).collect(); Ok(lanta::Config { keys, layouts, borders, }) } pub fn load_state(state_path: PathBuf) -> std::io::Result<lanta::State> { let state_file = File::open(state_path)?; Ok(match serde_yaml::from_reader(state_file) { Ok(state) => state, Err(e) => { warn!("Could not deserialize state: {}", e); Default::default() } }) }
"print" => XK_Print, a => Err(format!("Could not match key {}", a))?, }; let mods = iter .map(|mod_key| match mod_key { "C" => Ok(ModKey::Control), "M" => Ok(ModKey::Mod1), "S" => Ok(ModKey::Shift), "H" => Ok(ModKey::Mod4), a => Err(format!("Did not understand modifier {}", a)), }) .collect::<Result<Vec<_>, String>>()?; Ok(KeyInner { mods, key }) } } impl<'de> Deserialize<'de> for KeyInner { fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error> where D: Deserializer<'de>, { let s = String::deserialize(deserializer)?; s.parse().map_err(de::Error::custom) } } #[derive(Deserialize, Debug)] #[serde(tag = "type")] enum LayoutSelectInner { ThreeColumn { #[serde(default)] padding: u32, }, Stack { #[serde(default)] padding: u32, }, } #[derive(Deserialize, Debug)] struct LayoutSelect { name: String, #[serde(flatten)] layout: LayoutSelectInner, } impl Into<Box<dyn Layout<WindowId>>> for LayoutSelect { fn into(self) -> Box<dyn Layout<WindowId>> { match self.layout { LayoutSelectInner::ThreeColumn { padding } => { Box::new(ThreeColumn::new(self.name, padding, 40)) } LayoutSelectInner::Stack { padding } => { Box::new(StackLayout::new(self.name, padding)) } } } } #[derive(Deserialize, Debug)] struct Config { keys: HashMap<KeyInner
random
[ { "content": "/// A single key, of the same type as the `x11::keysym` constants.\n\ntype Key = c_uint;\n\n\n\n/// A combination of zero or more mods and a key.\n\n#[derive(Clone, Debug, Eq, Hash, PartialEq)]\n\npub struct KeyCombo {\n\n pub mod_mask: ModMask,\n\n pub keysym: Key,\n\n}\n\n\n\nimpl KeyCombo...
Rust
migration/src/audio.rs
NEU-DSG/dailp-encoding
3cbfca2538e65ab1b797e120781252368063a755
use dailp::{AudioSlice, DocumentAudioId}; use reqwest::Client; use serde::{Deserialize, Serialize}; extern crate pretty_env_logger; use itertools::Itertools; use log::{error, info}; use serde_json::Value; use std::collections::{HashMap, HashSet}; #[derive(Serialize, Deserialize, Clone, Debug)] struct DrsId(String); #[derive(Serialize, Deserialize, Clone, Debug)] struct ComplexDrsObject(HashMap<String, Value>); #[derive(Serialize, Deserialize, Clone, Debug)] struct DrsRes { pid: DrsId, parent: DrsId, thumbnails: Vec<String>, canonical_object: ComplexDrsObject, } impl DrsRes { pub async fn new(client: &Client, drs_id: &str) -> Result<Self, anyhow::Error> { let drs = "https://repository.library.northeastern.edu/api/v1/files/"; Ok(client .get(format!("{}{}", drs, drs_id)) .send() .await? .json::<DrsRes>() .await?) } } #[derive(Serialize, Deserialize, Clone, Debug)] struct AudioAnnotationRow { layer: Option<String>, start_time: f64, end_time: f64, word: String, } #[non_exhaustive] struct AudioLayer; impl AudioLayer { pub const UNLABELLED: &'static str = ""; pub const DOCUMENT: &'static str = "Document"; pub const WORD: &'static str = "Syllabary Source"; } #[derive(Serialize, Deserialize, Clone, Debug)] pub struct AudioRes { audio_url: String, annotations: String, } impl AudioRes { pub async fn new(audio_drs_id: &str, annotation_drs_id: &str) -> Result<Self, anyhow::Error> { info!("Creating new Audio Resource"); let client = Client::new(); let audio_response = DrsRes::new(&client, audio_drs_id).await?; let annotation_response = DrsRes::new(&client, annotation_drs_id).await?; Ok(Self { audio_url: audio_response .canonical_object .0 .keys() .next() .unwrap() .clone(), annotations: client .get( annotation_response .canonical_object .0 .keys() .next() .unwrap() .clone(), ) .send() .await? .text() .await?, }) } pub fn into_document_audio(self) -> AudioSlice { AudioSlice { resource_url: self.audio_url.clone(), parent_track: Some(DocumentAudioId("".to_string())), annotations: Some(self.into_audio_slices()), index: 0, start_time: None, end_time: None, } } pub fn into_audio_slices(self /*from_layer: String*/) -> Vec<AudioSlice> { let mut result: Vec<AudioSlice> = vec![]; use csv::{Error, ReaderBuilder}; let mut reader = ReaderBuilder::new() .delimiter(b'\t') .has_headers(false) .from_reader(self.annotations.as_bytes()); for (annotation_line, i) in reader.deserialize::<AudioAnnotationRow>().zip(0..) { if annotation_line.is_err() { error!("Failed to add line {}", i); result.push(AudioSlice { resource_url: self.audio_url.clone(), parent_track: Some(DocumentAudioId("".to_string())), annotations: None, index: i, start_time: None, end_time: None, }); } else { let annotation = annotation_line.unwrap(); result.push(AudioSlice { resource_url: self.audio_url.clone(), parent_track: Some(DocumentAudioId("".to_string())), annotations: None, index: i, start_time: Some((annotation.start_time * 1000.0) as i32), end_time: Some((annotation.end_time * 1000.0) as i32), }); info!( "Successfully added from line {}.\nURL: {}\nStart:{}ms\nEnd:{}ms", i, self.audio_url.clone(), annotation.start_time * 1000.0, annotation.end_time * 1000.0 ); }; } result } }
use dailp::{AudioSlice, DocumentAudioId}; use reqwest::Client; use serde::{Deserialize, Serialize}; extern crate pretty_env_logger; use itertools::Itertools; use log::{error, info}; use serde_json::Value; use std::collections::{HashMap, HashSet}; #[derive(Serialize, Deserialize, Clone, Debug)] struct DrsId(String); #[derive(Serialize, Deserialize, Clone, Debug)] struct ComplexDrsObject(HashMap<String, Value>); #[derive(Serialize, Deserialize, Clone, Debug)] struct DrsRes { pid: DrsId, parent: DrsId, thumbnails: Vec<String>, canonical_object: ComplexDrsObject, } impl DrsRes { pub async fn new(client: &Client, drs_id: &str) -> Result<Self, anyhow::Error> { let drs = "https://repository.library.northeastern.edu/api/v1/files/"; Ok(client .get(format!("{}{}", drs, drs_id)) .send() .await? .json::<DrsRes>() .await?) } } #[derive(Serialize, Deserialize, Clone, Debug)] struct AudioAnnotationRow { layer: Option<String>, start_time: f64, end_time: f64, word: String, } #[non_exhaustive] struct AudioLayer; impl AudioLayer { pub const UNLABELLED: &'static str = ""; pub const DOCUMENT: &'static str = "Document"; pub const WORD: &'static str = "Syllabary Source"; } #[derive(Serialize, Deserialize, Clone, Debug)] pub struct AudioRes { audio_url: String, annotations: String, } impl AudioRes { pub async fn new(audio_drs_id: &str, annotation_drs_id: &str) -> Result<Self, anyhow::Error> {
pub fn into_document_audio(self) -> AudioSlice { AudioSlice { resource_url: self.audio_url.clone(), parent_track: Some(DocumentAudioId("".to_string())), annotations: Some(self.into_audio_slices()), index: 0, start_time: None, end_time: None, } } pub fn into_audio_slices(self /*from_layer: String*/) -> Vec<AudioSlice> { let mut result: Vec<AudioSlice> = vec![]; use csv::{Error, ReaderBuilder}; let mut reader = ReaderBuilder::new() .delimiter(b'\t') .has_headers(false) .from_reader(self.annotations.as_bytes()); for (annotation_line, i) in reader.deserialize::<AudioAnnotationRow>().zip(0..) { if annotation_line.is_err() { error!("Failed to add line {}", i); result.push(AudioSlice { resource_url: self.audio_url.clone(), parent_track: Some(DocumentAudioId("".to_string())), annotations: None, index: i, start_time: None, end_time: None, }); } else { let annotation = annotation_line.unwrap(); result.push(AudioSlice { resource_url: self.audio_url.clone(), parent_track: Some(DocumentAudioId("".to_string())), annotations: None, index: i, start_time: Some((annotation.start_time * 1000.0) as i32), end_time: Some((annotation.end_time * 1000.0) as i32), }); info!( "Successfully added from line {}.\nURL: {}\nStart:{}ms\nEnd:{}ms", i, self.audio_url.clone(), annotation.start_time * 1000.0, annotation.end_time * 1000.0 ); }; } result } }
info!("Creating new Audio Resource"); let client = Client::new(); let audio_response = DrsRes::new(&client, audio_drs_id).await?; let annotation_response = DrsRes::new(&client, annotation_drs_id).await?; Ok(Self { audio_url: audio_response .canonical_object .0 .keys() .next() .unwrap() .clone(), annotations: client .get( annotation_response .canonical_object .0 .keys() .next() .unwrap() .clone(), ) .send() .await? .text() .await?, }) }
function_block-function_prefix_line
[ { "content": "pub fn simple_phonetics_to_worcester(input: &str) -> String {\n\n use {\n\n lazy_static::lazy_static,\n\n regex::{Captures, Regex},\n\n };\n\n // Convert the t/th consonants to d/t\n\n lazy_static! {\n\n static ref TTH_PATTERN: Regex = Regex::new(r\"(gw|kw|j|ʔ|:)\"...
Rust
src/lib.rs
SolarLiner/buffers
43483f3e5401648babc85faf734907835280e7d3
use std::{fs, io}; use std::io::{Cursor, Error, Read, Write}; pub enum Input { Standard(io::Stdin), Memory(io::Cursor<Vec<u8>>), File(fs::File), } pub enum Output { Standard(io::Stdout), Memory(io::Cursor<Vec<u8>>), File(fs::File), } pub enum InputOutput { Standard(io::Stdin, io::Stdout), Memory(io::Cursor<Vec<u8>>), File(fs::File), } impl Input { pub fn stdin() -> Self { Input::Standard(io::stdin()) } pub fn memory() -> Self { Input::Memory(Cursor::new(vec![])) } pub fn file(path: &str) -> io::Result<Self> { fs::OpenOptions::new() .read(true) .open(path) .map(Input::File) } pub fn from_arg(arg: Option<&str>) -> io::Result<Self> { match arg { None | Some("-") => Ok(Self::stdin()), Some(fname) => Self::file(fname), } } } impl Read for Input { fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> { match self { Input::Standard(ref mut s) => s.read(buf), Input::Memory(ref mut m) => m.read(buf), Input::File(ref mut f) => f.read(buf), } } } impl Output { pub fn stdout() -> Self { Output::Standard(io::stdout()) } pub fn memory() -> Self { Output::Memory(Cursor::new(vec![])) } pub fn file(path: &str) -> io::Result<Self> { fs::OpenOptions::new() .write(true) .create(true) .open(path) .map(Output::File) } pub fn from_arg(arg: Option<&str>) -> io::Result<Self> { match arg { None | Some("-") => Ok(Self::stdout()), Some(fname) => Self::file(fname), } } } impl Write for Output { fn write(&mut self, buf: &[u8]) -> io::Result<usize> { match self { Output::Standard(ref mut s) => s.write(buf), Output::Memory(ref mut m) => m.write(buf), Output::File(ref mut f) => f.write(buf), } } fn flush(&mut self) -> Result<(), Error> { match self { Output::Standard(ref mut s) => s.flush(), Output::Memory(ref mut m) => m.flush(), Output::File(ref mut f) => f.flush(), } } } impl InputOutput { pub fn stdio() -> InputOutput { InputOutput::Standard(io::stdin(), io::stdout()) } pub fn memory() -> InputOutput { InputOutput::Memory(Cursor::new(vec![])) } pub fn file(path: &str) -> io::Result<InputOutput> { fs::OpenOptions::new().read(true).write(true).open(path).map(InputOutput::File) } pub fn from_arg(arg: Option<&str>) -> io::Result<InputOutput> { match arg { None | Some("-") => Ok(Self::stdio()), Some(path) => Self::file(path), } } } impl Read for InputOutput { fn read(&mut self, buf: &mut [u8]) -> Result<usize, Error> { match self { InputOutput::Standard(stdin, _) => stdin.read(buf), InputOutput::Memory(c) => c.read(buf), InputOutput::File(f) => f.read(buf) } } } impl Write for InputOutput { fn write(&mut self, buf: &[u8]) -> Result<usize, Error> { match self { InputOutput::Standard(_, stdout) => stdout.write(buf), InputOutput::Memory(c) => c.write(buf), InputOutput::File(f) => f.write(buf), } } fn flush(&mut self) -> Result<(), Error> { match self { InputOutput::Standard(_, stdout) => stdout.flush(), InputOutput::Memory(m) => m.flush(), InputOutput::File(f) => f.flush() } } }
use std::{fs, io}; use std::io::{Cursor, Error, Read, Write}; pub enum Input { Standard(io::Stdin), Memory(io::Cursor<Vec<u8>>), File(fs::File), } pub enum Output { Standard(io::Stdout), Memory(io::Cursor<Vec<u8>>), File(fs::File), } pub enum InputOutput { Standard(io::Stdin, io::Stdout), Memory(io::Cursor<Vec<u8>>), File(fs::File), } impl Input { pub fn stdin() -> Self { Input::Standard(io::stdin()) } pub fn memory() -> Self { Input::Memory(Cursor::new(vec![])) } pub fn file(path: &str) -> io::Result<Self> { fs::OpenOptions::new() .read(true) .open(path) .map(Input::File) } pub fn from_arg(arg: Option<&str>) -> io::Result<Self> { match arg { None | Some("-") => Ok(Self::stdin()), Some(fname) => Self::file(fname), } } } impl Read for Input { fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> { match self { Input::Standard(ref mut s) => s.read(buf), Input::Memory(ref mut m) => m.read(buf), Input::File(ref mut f) => f.read(buf), } } } impl Output { pub fn stdout() -> Self { Output::Standard(io::stdout()) } pub fn memory() -> Self { Output::Memory(Cursor::new(vec![])) } pub fn file(path: &str) -> io::Result<Self> { fs::OpenOptions::new() .write(true) .create(true) .open(path) .map(Output::File) } pub fn from_arg(arg: Option<&str>) -> io::Result<Self> { match arg { None | Some("-") => Ok(Self::stdout()), Some(fname) => Self::file(fname), } } } impl Write for Output { fn write(&mut self, buf: &[u8]) -> io::Result<usize> { match self { Output::Standard(ref mut s) => s.write(buf), Output::Memory(ref mut m) => m.write(buf), Output::File(ref mut f) => f.write(buf), } } fn flush(&mut self) -> Result<(), Error> { match self { Output::Standard(ref mut s) => s.flush(), Output::Memory(ref mut m) => m.flush(), Output::File(ref mut f) => f.flush(), } } } impl InputOutput { pub fn stdio() -> InputOutput { InputOutput::Standard(io::stdin(), io::stdout()) } pub fn memory() -> InputOutput { InputOutput::Memory(Cursor::new(vec![])) } pub fn file(path: &str) -> io::Result<InputOutput> { fs::OpenOptions::new().read(true).write(true).open(path).map(InputOutput::File) } pub fn from_arg(arg: Option<&str>) -> io::Result<InputOutput> { match arg { None | Some("-") => Ok(Self::stdio()), Some(path) => Self::file(path), } } } impl Read for InputOutput { fn read(&mut self, buf: &mut [u8]) -> Result<usize, Error> { match self { InputOutput::Standard(stdin, _) => stdin.read(buf), InputOutput::Memory(c) => c.read(buf), InputOutput::File(f) => f.read(buf) } } } impl Write for InputOutput { fn write(&mut self, buf: &[u8]) -> Result<usize, Error> { match self { InputOutput::Standard(_, stdout) => stdout.write(buf), InputOutput::Memory(c) => c.write(buf), InputOutput::File(f) => f.write(buf), } } fn flush(&mut self) -> Result<(), Error> {
} }
match self { InputOutput::Standard(_, stdout) => stdout.flush(), InputOutput::Memory(m) => m.flush(), InputOutput::File(f) => f.flush() }
if_condition
[ { "content": "# buffers\n\nCollection of unified buffers from stdio, file and memory buffers.\n\n\n\nThe `buffers` crate unifies standard IO, memory and file buffers into a unified type, allowing\n\nto effectively leave the type of buffer used to the user.\n\n\n\n## How to use\n\n\n\nThe `buffers` crate exposes...
Rust
src/locustdb.rs
virattara/LocustDB
42945df6f4313b9dbded35bcd2d3018a003af003
use std::str; use std::sync::Arc; use std::error::Error; use std::path::{Path, PathBuf}; use futures::channel::oneshot; use num_cpus; use crate::QueryError; use crate::QueryResult; use crate::disk_store::interface::*; use crate::disk_store::noop_storage::NoopStorage; use crate::engine::query_task::QueryTask; use crate::ingest::colgen::GenTable; use crate::ingest::csv_loader::{CSVIngestionTask, Options as LoadOptions}; use crate::mem_store::*; use crate::scheduler::*; use crate::syntax::parser; pub struct LocustDB { inner_locustdb: Arc<InnerLocustDB> } impl LocustDB { pub fn memory_only() -> LocustDB { LocustDB::new(&Options::default()) } pub fn new(opts: &Options) -> LocustDB { let disk_store = opts.db_path.as_ref() .map(|path| LocustDB::persistent_storage(path)) .unwrap_or_else(|| Arc::new(NoopStorage)); let locustdb = Arc::new(InnerLocustDB::new(disk_store, opts)); InnerLocustDB::start_worker_threads(&locustdb); LocustDB { inner_locustdb: locustdb } } pub async fn run_query(&self, query: &str, explain: bool, show: Vec<usize>) -> Result<QueryResult, oneshot::Canceled> { let (sender, receiver) = oneshot::channel(); let query = match parser::parse_query(query) { Ok(query) => query, Err(err) => return Ok(Err(err)), }; let mut data = match self.inner_locustdb.snapshot(&query.table) { Some(data) => data, None => return Ok(Err( QueryError::NotImplemented(format!("Table {} does not exist!", &query.table)))), }; if self.inner_locustdb.opts().seq_disk_read { self.inner_locustdb.disk_read_scheduler() .schedule_sequential_read(&mut data, &query.find_referenced_cols(), self.inner_locustdb.opts().readahead); let ldb = self.inner_locustdb.clone(); let (read_data, _) = Task::from_fn(move || ldb.disk_read_scheduler().service_reads(&ldb)); let _ = self.inner_locustdb.schedule(read_data); } let query_task = QueryTask::new( query, explain, show, data, self.inner_locustdb.disk_read_scheduler().clone(), SharedSender::new(sender) ); match query_task { Ok(task) => { self.schedule(task); Ok(receiver.await?) } Err(err) => Ok(Err(err)), } } pub async fn load_csv(&self, options: LoadOptions) -> Result<(), Box<dyn Error>> { let (sender, receiver) = oneshot::channel(); let task = CSVIngestionTask::new( options, self.inner_locustdb.clone(), SharedSender::new(sender)); let _ = self.schedule(task); Ok(receiver.await??) } pub async fn gen_table(&self, opts: GenTable) -> Result<(), oneshot::Canceled> { let mut receivers = Vec::new(); let opts = Arc::new(opts); for partition in 0..opts.partitions { let opts = opts.clone(); let inner = self.inner_locustdb.clone(); let (task, receiver) = Task::from_fn(move || inner.gen_partition(&opts, partition as u64)); let _ = self.schedule(task); receivers.push(receiver); } for receiver in receivers { receiver.await?; } Ok(()) } pub fn ast(&self, query: &str) -> String { match parser::parse_query(query) { Ok(query) => format!("{:#?}", query), Err(err) => format!("{:?}", err), } } pub async fn bulk_load(&self) -> Result<Vec<MemTreeTable>, oneshot::Canceled> { for table in self.inner_locustdb.full_snapshot() { self.inner_locustdb.disk_read_scheduler() .schedule_bulk_load(table, self.inner_locustdb.opts().readahead); } let mut receivers = Vec::new(); for _ in 0..self.inner_locustdb.opts().read_threads { let ldb = self.inner_locustdb.clone(); let (read_data, receiver) = Task::from_fn(move || ldb.disk_read_scheduler().service_reads(&ldb)); let _ = self.inner_locustdb.schedule(read_data); receivers.push(receiver); } for receiver in receivers { receiver.await?; } self.mem_tree(2).await } pub fn recover(&self) { self.inner_locustdb.drop_pending_tasks(); InnerLocustDB::start_worker_threads(&self.inner_locustdb); } pub async fn mem_tree(&self, depth: usize) -> Result<Vec<MemTreeTable>, oneshot::Canceled> { let inner = self.inner_locustdb.clone(); let (task, receiver) = Task::from_fn(move || inner.mem_tree(depth)); let _ = self.schedule(task); receiver.await } pub async fn table_stats(&self) -> Result<Vec<TableStats>, oneshot::Canceled> { let inner = self.inner_locustdb.clone(); let (task, receiver) = Task::from_fn(move || inner.stats()); let _ = self.schedule(task); receiver.await } pub fn schedule<T: Task + 'static>(&self, task: T) { self.inner_locustdb.schedule(task) } #[cfg(feature = "enable_rocksdb")] pub fn persistent_storage<P: AsRef<Path>>(db_path: P) -> Arc<dyn DiskStore> { use crate::disk_store::rocksdb; Arc::new(rocksdb::RocksDB::new(db_path)) } #[cfg(not(feature = "enable_rocksdb"))] pub fn persistent_storage<P: AsRef<Path>>(_: P) -> Arc<dyn DiskStore> { panic!("RocksDB storage backend is not enabled in this build of LocustDB. Create db with `memory_only`, or set the `enable_rocksdb` feature.") } } impl Drop for LocustDB { fn drop(&mut self) { self.inner_locustdb.stop(); } } #[derive(Clone)] pub struct Options { pub threads: usize, pub read_threads: usize, pub db_path: Option<PathBuf>, pub mem_size_limit_tables: usize, pub mem_lz4: bool, pub readahead: usize, pub seq_disk_read: bool, } impl Default for Options { fn default() -> Options { Options { threads: num_cpus::get(), read_threads: num_cpus::get(), db_path: None, mem_size_limit_tables: 8 * 1024 * 1024 * 1024, mem_lz4: true, readahead: 256 * 1024 * 1024, seq_disk_read: false, } } }
use std::str; use std::sync::Arc; use std::error::Error; use std::path::{Path, PathBuf}; use futures::channel::oneshot; use num_cpus; use crate::QueryError; use crate::QueryResult; use crate::disk_store::interface::*; use crate::disk_store::noop_storage::NoopStorage; use crate::engine::query_task::QueryTask; use crate::ingest::colgen::GenTable; use crate::ingest::csv_loader::{CSVIngestionTask, Options as LoadOptions}; use crate::mem_store::*; use crate::scheduler::*; use crate::syntax::parser; pub struct LocustDB { inner_locustdb: Arc<InnerLocustDB> } impl LocustDB { pub fn memory_only() -> LocustDB { LocustDB::new(&Options::default()) } pub fn new(opts: &Options) -> LocustDB { let disk_store = opts.db_path.as_ref() .map(|path| LocustDB::persistent_storage(path)) .
pub async fn run_query(&self, query: &str, explain: bool, show: Vec<usize>) -> Result<QueryResult, oneshot::Canceled> { let (sender, receiver) = oneshot::channel(); let query = match parser::parse_query(query) { Ok(query) => query, Err(err) => return Ok(Err(err)), }; let mut data = match self.inner_locustdb.snapshot(&query.table) { Some(data) => data, None => return Ok(Err( QueryError::NotImplemented(format!("Table {} does not exist!", &query.table)))), }; if self.inner_locustdb.opts().seq_disk_read { self.inner_locustdb.disk_read_scheduler() .schedule_sequential_read(&mut data, &query.find_referenced_cols(), self.inner_locustdb.opts().readahead); let ldb = self.inner_locustdb.clone(); let (read_data, _) = Task::from_fn(move || ldb.disk_read_scheduler().service_reads(&ldb)); let _ = self.inner_locustdb.schedule(read_data); } let query_task = QueryTask::new( query, explain, show, data, self.inner_locustdb.disk_read_scheduler().clone(), SharedSender::new(sender) ); match query_task { Ok(task) => { self.schedule(task); Ok(receiver.await?) } Err(err) => Ok(Err(err)), } } pub async fn load_csv(&self, options: LoadOptions) -> Result<(), Box<dyn Error>> { let (sender, receiver) = oneshot::channel(); let task = CSVIngestionTask::new( options, self.inner_locustdb.clone(), SharedSender::new(sender)); let _ = self.schedule(task); Ok(receiver.await??) } pub async fn gen_table(&self, opts: GenTable) -> Result<(), oneshot::Canceled> { let mut receivers = Vec::new(); let opts = Arc::new(opts); for partition in 0..opts.partitions { let opts = opts.clone(); let inner = self.inner_locustdb.clone(); let (task, receiver) = Task::from_fn(move || inner.gen_partition(&opts, partition as u64)); let _ = self.schedule(task); receivers.push(receiver); } for receiver in receivers { receiver.await?; } Ok(()) } pub fn ast(&self, query: &str) -> String { match parser::parse_query(query) { Ok(query) => format!("{:#?}", query), Err(err) => format!("{:?}", err), } } pub async fn bulk_load(&self) -> Result<Vec<MemTreeTable>, oneshot::Canceled> { for table in self.inner_locustdb.full_snapshot() { self.inner_locustdb.disk_read_scheduler() .schedule_bulk_load(table, self.inner_locustdb.opts().readahead); } let mut receivers = Vec::new(); for _ in 0..self.inner_locustdb.opts().read_threads { let ldb = self.inner_locustdb.clone(); let (read_data, receiver) = Task::from_fn(move || ldb.disk_read_scheduler().service_reads(&ldb)); let _ = self.inner_locustdb.schedule(read_data); receivers.push(receiver); } for receiver in receivers { receiver.await?; } self.mem_tree(2).await } pub fn recover(&self) { self.inner_locustdb.drop_pending_tasks(); InnerLocustDB::start_worker_threads(&self.inner_locustdb); } pub async fn mem_tree(&self, depth: usize) -> Result<Vec<MemTreeTable>, oneshot::Canceled> { let inner = self.inner_locustdb.clone(); let (task, receiver) = Task::from_fn(move || inner.mem_tree(depth)); let _ = self.schedule(task); receiver.await } pub async fn table_stats(&self) -> Result<Vec<TableStats>, oneshot::Canceled> { let inner = self.inner_locustdb.clone(); let (task, receiver) = Task::from_fn(move || inner.stats()); let _ = self.schedule(task); receiver.await } pub fn schedule<T: Task + 'static>(&self, task: T) { self.inner_locustdb.schedule(task) } #[cfg(feature = "enable_rocksdb")] pub fn persistent_storage<P: AsRef<Path>>(db_path: P) -> Arc<dyn DiskStore> { use crate::disk_store::rocksdb; Arc::new(rocksdb::RocksDB::new(db_path)) } #[cfg(not(feature = "enable_rocksdb"))] pub fn persistent_storage<P: AsRef<Path>>(_: P) -> Arc<dyn DiskStore> { panic!("RocksDB storage backend is not enabled in this build of LocustDB. Create db with `memory_only`, or set the `enable_rocksdb` feature.") } } impl Drop for LocustDB { fn drop(&mut self) { self.inner_locustdb.stop(); } } #[derive(Clone)] pub struct Options { pub threads: usize, pub read_threads: usize, pub db_path: Option<PathBuf>, pub mem_size_limit_tables: usize, pub mem_lz4: bool, pub readahead: usize, pub seq_disk_read: bool, } impl Default for Options { fn default() -> Options { Options { threads: num_cpus::get(), read_threads: num_cpus::get(), db_path: None, mem_size_limit_tables: 8 * 1024 * 1024 * 1024, mem_lz4: true, readahead: 256 * 1024 * 1024, seq_disk_read: false, } } }
unwrap_or_else(|| Arc::new(NoopStorage)); let locustdb = Arc::new(InnerLocustDB::new(disk_store, opts)); InnerLocustDB::start_worker_threads(&locustdb); LocustDB { inner_locustdb: locustdb } }
function_block-function_prefix_line
[ { "content": "fn parse_type(field_ident: &Ident, type_def: String) -> Option<(Expr, Option<FnArg>)> {\n\n lazy_static! {\n\n // E.g. `data` in `( t = \"data.nullable\" )`\n\n static ref T: Regex = Regex::new(r#\"t = \"(.*)\"\"#).unwrap();\n\n static ref BASE: Regex = Regex::new(r#\"base=...
Rust
demo/src/components/physics.rs
aclysma/atelier-legion-demo
658d1f6471cd41d48b13f7fc2db2f5ebabdb9429
use serde::{Deserialize, Serialize}; use serde_diff::SerdeDiff; use type_uuid::TypeUuid; use nphysics2d::object::DefaultBodyHandle; use legion_transaction::SpawnFrom; use crate::math::Vec2; use crate::resources::{PhysicsResource, OpenedPrefabState}; use legion::prelude::*; use std::ops::Range; use legion::storage::ComponentStorage; use skulpin_plugin_imgui::imgui; use imgui_inspect_derive::Inspect; use ncollide2d::shape::ShapeHandle; use ncollide2d::shape::{Ball, Cuboid}; use ncollide2d::pipeline::{CollisionGroups, GeometricQueryType}; use legion::index::ComponentIndex; use legion_transaction::iter_components_in_storage; use crate::components::{ Position2DComponent, UniformScale2DComponent, NonUniformScale2DComponent, Rotation2DComponent, }; use ncollide2d::world::CollisionWorld; #[derive(TypeUuid, Serialize, Deserialize, SerdeDiff, Debug, PartialEq, Clone, Inspect, Default)] #[uuid = "fa518c0a-a65a-44c8-9d35-3f4f336b4de4"] pub struct RigidBodyBallComponentDef { pub radius: f32, pub is_static: bool, } legion_prefab::register_component_type!(RigidBodyBallComponentDef); #[derive(TypeUuid, Serialize, Deserialize, SerdeDiff, Debug, PartialEq, Clone, Inspect, Default)] #[uuid = "36df3006-a5ad-4997-9ccc-0860f49195ad"] pub struct RigidBodyBoxComponentDef { #[serde_diff(opaque)] pub half_extents: Vec2, pub is_static: bool, } legion_prefab::register_component_type!(RigidBodyBoxComponentDef); pub struct RigidBodyComponent { pub handle: DefaultBodyHandle, delete_body_tx: crossbeam_channel::Sender<DefaultBodyHandle>, } impl Drop for RigidBodyComponent { fn drop(&mut self) { self.delete_body_tx.send(self.handle); } } fn transform_shape_to_rigid_body( physics: &mut PhysicsResource, into: &mut std::mem::MaybeUninit<RigidBodyComponent>, src_position: Option<&Position2DComponent>, src_rotation: Option<&Rotation2DComponent>, shape_handle: ShapeHandle<f32>, is_static: bool, ) { let position = if let Some(position) = src_position { position.position } else { Vec2::zero() }; let mut collider_offset = Vec2::zero(); let rigid_body_handle = if is_static { *collider_offset += *position; physics.bodies.insert(nphysics2d::object::Ground::new()) } else { physics.bodies.insert( nphysics2d::object::RigidBodyDesc::new() .translation(position.into()) .build(), ) }; let collider = nphysics2d::object::ColliderDesc::new(shape_handle.clone()) .density(1.0) .translation(collider_offset.into()) .build(nphysics2d::object::BodyPartHandle(rigid_body_handle, 0)); physics.colliders.insert(collider); *into = std::mem::MaybeUninit::new(RigidBodyComponent { handle: rigid_body_handle, delete_body_tx: physics.delete_body_tx().clone(), }) } impl SpawnFrom<RigidBodyBallComponentDef> for RigidBodyComponent { fn spawn_from( _src_world: &World, src_component_storage: &ComponentStorage, src_component_storage_indexes: Range<ComponentIndex>, resources: &Resources, _src_entities: &[Entity], _dst_entities: &[Entity], from: &[RigidBodyBallComponentDef], into: &mut [std::mem::MaybeUninit<Self>], ) { let mut physics = resources.get_mut::<PhysicsResource>().unwrap(); let position_components = iter_components_in_storage::<Position2DComponent>( src_component_storage, src_component_storage_indexes.clone(), ); let uniform_scale_components = iter_components_in_storage::<UniformScale2DComponent>( src_component_storage, src_component_storage_indexes.clone(), ); let rotation_components = iter_components_in_storage::<Rotation2DComponent>( src_component_storage, src_component_storage_indexes, ); for (src_position, src_uniform_scale, src_rotation, from, into) in izip!( position_components, uniform_scale_components, rotation_components, from, into ) { let mut radius = from.radius; if let Some(src_uniform_scale) = src_uniform_scale { radius *= src_uniform_scale.uniform_scale; } let shape_handle = ShapeHandle::new(Ball::new(radius.max(0.01))); transform_shape_to_rigid_body( &mut physics, into, src_position, src_rotation, shape_handle, from.is_static, ); } } } impl crate::selection::EditorSelectableTransformed<RigidBodyComponent> for RigidBodyBallComponentDef { fn create_editor_selection_world( &self, collision_world: &mut CollisionWorld<f32, Entity>, resources: &Resources, opened_prefab: &OpenedPrefabState, prefab_world: &World, prefab_entity: Entity, transformed_world: &World, transformed_entity: Entity, transformed_component: &RigidBodyComponent, ) { if let Some(position) = prefab_world.get_component::<Position2DComponent>(prefab_entity) { let mut radius = self.radius; if let Some(uniform_scale) = prefab_world.get_component::<UniformScale2DComponent>(prefab_entity) { radius *= uniform_scale.uniform_scale; } let shape_handle = ShapeHandle::new(Ball::new(radius.max(0.01))); collision_world.add( ncollide2d::math::Isometry::new(position.position.into(), 0.0), shape_handle, CollisionGroups::new(), GeometricQueryType::Proximity(0.001), transformed_entity, ); } } } impl SpawnFrom<RigidBodyBoxComponentDef> for RigidBodyComponent { fn spawn_from( _src_world: &World, src_component_storage: &ComponentStorage, src_component_storage_indexes: Range<ComponentIndex>, resources: &Resources, _src_entities: &[Entity], _dst_entities: &[Entity], from: &[RigidBodyBoxComponentDef], into: &mut [std::mem::MaybeUninit<Self>], ) { let mut physics = resources.get_mut::<PhysicsResource>().unwrap(); let position_components = iter_components_in_storage::<Position2DComponent>( src_component_storage, src_component_storage_indexes.clone(), ); let uniform_scale_components = iter_components_in_storage::<UniformScale2DComponent>( src_component_storage, src_component_storage_indexes.clone(), ); let non_uniform_scale_components = iter_components_in_storage::<NonUniformScale2DComponent>( src_component_storage, src_component_storage_indexes.clone(), ); let rotation_components = iter_components_in_storage::<Rotation2DComponent>( src_component_storage, src_component_storage_indexes, ); for (src_position, src_uniform_scale, src_non_uniform_scale, src_rotation, from, into) in izip!( position_components, uniform_scale_components, non_uniform_scale_components, rotation_components, from, into ) { let mut half_extents = *from.half_extents; if let Some(src_uniform_scale) = src_uniform_scale { half_extents *= glam::Vec2::splat(src_uniform_scale.uniform_scale); } if let Some(src_non_uniform_scale) = src_non_uniform_scale { half_extents *= *src_non_uniform_scale.non_uniform_scale; } let shape_handle = ShapeHandle::new(Cuboid::new(crate::math::vec2_glam_to_glm(half_extents))); transform_shape_to_rigid_body( &mut physics, into, src_position, src_rotation, shape_handle, from.is_static, ); } } } impl crate::selection::EditorSelectableTransformed<RigidBodyComponent> for RigidBodyBoxComponentDef { fn create_editor_selection_world( &self, collision_world: &mut CollisionWorld<f32, Entity>, resources: &Resources, opened_prefab: &OpenedPrefabState, prefab_world: &World, prefab_entity: Entity, transformed_world: &World, transformed_entity: Entity, transformed_component: &RigidBodyComponent, ) { if let Some(position) = prefab_world.get_component::<Position2DComponent>(prefab_entity) { let mut half_extents = *self.half_extents; if let Some(uniform_scale) = prefab_world.get_component::<UniformScale2DComponent>(prefab_entity) { half_extents *= uniform_scale.uniform_scale; } if let Some(non_uniform_scale) = prefab_world.get_component::<NonUniformScale2DComponent>(prefab_entity) { half_extents *= *non_uniform_scale.non_uniform_scale; } let mut rotation = 0.0; if let Some(rotation_component) = prefab_world.get_component::<Rotation2DComponent>(prefab_entity) { rotation = rotation_component.rotation; } let shape_handle = ShapeHandle::new(Cuboid::new(crate::math::vec2_glam_to_glm(half_extents))); collision_world.add( ncollide2d::math::Isometry::new(position.position.into(), rotation), shape_handle, CollisionGroups::new(), GeometricQueryType::Proximity(0.001), transformed_entity, ); } } }
use serde::{Deserialize, Serialize}; use serde_diff::SerdeDiff; use type_uuid::TypeUuid; use nphysics2d::object::DefaultBodyHandle; use legion_transaction::SpawnFrom; use crate::math::Vec2; use crate::resources::{PhysicsResource, OpenedPrefabState}; use legion::prelude::*; use std::ops::Range; use legion::storage::ComponentStorage; use skulpin_plugin_imgui::imgui; use imgui_inspect_derive::Inspect; use ncollide2d::shape::ShapeHandle; use ncollide2d::shape::{Ball, Cuboid}; use ncollide2d::pipeline::{CollisionGroups, GeometricQueryType}; use legion::index::ComponentIndex; use legion_transaction::iter_components_in_storage; use crate::components::{ Position2DComponent, UniformScale2DComponent, NonUniformScale2DComponent, Rotation2DComponent, }; use ncollide2d::world::CollisionWorld; #[derive(TypeUuid, Serialize, Deserialize, SerdeDiff, Debug, PartialEq, Clone, Inspect, Default)] #[uuid = "fa518c0a-a65a-44c8-9d35-3f4f336b4de4"] pub struct RigidBodyBallComponentDef { pub radius: f32, pub is_static: bool, } legion_prefab::register_component_type!(RigidBodyBallComponentDef); #[derive(TypeUuid, Serialize, Deserialize, SerdeDiff, Debug, PartialEq, Clone, Inspect, Default)] #[uuid = "36df3006-a5ad-4997-9ccc-0860f49195ad"] pub struct RigidBodyBoxComponentDef { #[serde_diff(opaque)] pub half_extents: Vec2, pub is_static: bool, } legion_prefab::register_component_type!(RigidBodyBoxComponentDef); pub struct RigidBodyComponent { pub handle: DefaultBodyHandle, delete_body_tx: crossbeam_channel::Sender<DefaultBodyHandle>, } impl Drop for RigidBodyComponent { fn drop(&mut self) { self.delete_body_tx.send(self.handle); } } fn transform_shape_to_rigid_body( physics: &mut PhysicsResource, into: &mut std::mem::MaybeUninit<RigidBodyComponent>, src_position: Option<&Position2DComponent>, src_rotation: Option<&Rotation2DComponent>, shape_handle: ShapeHandle<f32>, is_static: bool, ) { let position = if let Some(position) = src_position { position.position } else { Vec2::zero() }; let mut collider_offset = Vec2::zero(); let rigid_body_handle = if is_static { *collider_offset += *position; physics.bodies.insert(nphysics2d::object::Ground::new()) } else { physics.bodies.insert( nphysics2d::object::RigidBodyDesc::new() .translation(position.into()) .build(), ) }; let collider = nphysics2d::object::ColliderDesc::new(shape_handle.clone()) .density(1.0) .translation(collider_offset.into()) .build(nphysics2d::object::BodyPartHandle(rigid_body_handle, 0)); physics.colliders.insert(collider); *into = std::mem::MaybeUninit::new(RigidBodyComponent { handle: rigid_body_handle, delete_body_tx: physics.delete_body_tx().clone(), }) } impl SpawnFrom<RigidBodyBallComponentDef> for RigidBodyComponent { fn spawn_from( _src_world: &World, src_component_storage: &ComponentStorage, src_component_storage_indexes: Range<ComponentIndex>, resources: &Resources, _src_entities: &[Entity], _dst_entities: &[Entity], from: &[RigidBodyBallComponentDef], into: &mut [std::mem::MaybeUninit<Self>], ) { let mut physics = resources.get_mut::<PhysicsResource>().unwrap(); let position_components = iter_components_in_storage::<Position2DComponent>( src_component_storage, src_component_storage_indexes.clone(), ); let uniform_scale_components = iter_components_in_storage::<UniformScale2DComponent>( src_component_storage, src_component_storage_indexes.clone(), ); let rotation_components = iter_components_in_storage::<Rotation2DComponent>( src_component_storage, src_component_storage_indexes, ); for (src_position, src_uniform_scale, src_rotation, from, into) in izip!( position_components, uniform_scale_components, rotation_components, from, into ) { let mut radius = from.radius; if let Some(src_uniform_scale) = src_uniform_scale { radius *= src_uniform_scale.uniform_scale; } let shape_handle = ShapeHandle::new(Ball::new(radius.max(0.01))); transform_shape_to_rigid_body( &mut physics, into, src_position, src_rotation, shape_handle, from.is_static, ); } } } impl crate::selection::EditorSelectableTransformed<RigidBodyComponent> for RigidBodyBallComponentDef { fn create_editor_selection_world( &self, collision_world: &mut CollisionWorld<f32, Entity>, resources: &Resources, opened_prefab: &OpenedPrefabState, prefab_world: &World, prefab_entity: Entity, transformed_world: &World, transformed_entity: Entity, transformed_component: &RigidBodyComponent, ) { if let Some(position) = prefab_world.get_component::<Position2DComponent>(prefab_entity) { let mut radius = self.radius; if let Some(uniform_scale) = prefab_world.get_component::<UniformScale2DComponent>(prefab_entity) { radius *= uniform_scale.uniform_scale; } let shape_handle = ShapeHandle::new(Ball::new(radius.max(0.01))); collision_world.add( ncollide2d::math::Isometry::new(position.position.into(), 0.0), shape_handle, CollisionGroups::new(), GeometricQueryType::Proximity(0.001), transformed_entity, ); } } } impl SpawnFrom<RigidBodyBoxComponentDef> for RigidBodyComponent { fn spawn_from( _src_world: &World, src_component_storage: &ComponentStorage, src_component_storage_indexes: Range<ComponentIndex>, resources: &Resources, _src_entities: &[Entity], _dst_entities: &[Entity], from: &[RigidBodyBoxComponentDef], into: &mut [std::mem::MaybeUninit<Self>], ) { let mut physics = resources.get_mut::<PhysicsResource>().unwrap(); let position_components = iter_components_in_storage::<Position2DComponent>( src_component_storage, src_component_storage_indexes.clone(), ); let uniform_scale_components = iter_components_in_storage::<UniformScale2DComponent>( src_component_storage, src_component_storage_indexes.clone(), ); let non_uniform_scale_components = iter_components_in_storage::<NonUniformScale2DComponent>( src_component_storage, src_component_storage_indexes.clone(), ); let rotation_components = iter_components_in_storage::<Rotation2DComponent>( src_component_storage, src_component_storage_indexes, ); for (src_position, src_uniform_scale, src_non_uniform_scale, src_rotation, from, into) in izip!( position_components, uniform_scale_components, non_uniform_scale_components, rotation_components, from, into ) { let mut half_extents = *from.half_extents; if let Some(src_uniform_scale) = src_uniform_scale { half_extents *= glam::Vec2::splat(src_uniform_scale.uniform_scale); } if let Some(src_non_uniform_scale) = src_non_uniform_scale { half_extents *= *src_non_uniform_scale.non_uniform_scale; } let shape_handle = ShapeHandle::new(Cuboid::new(crate::math::vec2_glam_to_glm(half_extents))); transform_shape_to_rigid_body( &mut physics, into, src_position, src_rotation, shape_handle, from.is_static, ); } } } impl crate::selection::EditorSelectableTransformed<RigidBodyComponent> for RigidBodyBoxComponentDef { fn create_editor_selection_world( &self, collision_world: &mut CollisionWorld<f32, Entity>, resources: &Resources, opened_prefab: &OpenedPrefabState, prefab_world: &World, prefab_entity: Entity, transformed_world: &World, transformed_entity: Entity, transformed_component: &RigidBodyComponent, ) { if let Some(position) = prefab_world.get_component::<Position2DComponent>(prefab_entity) { let mut half_extents = *self.half_extents; if let Some(uniform_scale) = prefab_world.get_component::<UniformScale2DComponent>(prefab_entity) { half_extents *= uniform_scale.uniform_scale; }
}
if let Some(non_uniform_scale) = prefab_world.get_component::<NonUniformScale2DComponent>(prefab_entity) { half_extents *= *non_uniform_scale.non_uniform_scale; } let mut rotation = 0.0; if let Some(rotation_component) = prefab_world.get_component::<Rotation2DComponent>(prefab_entity) { rotation = rotation_component.rotation; } let shape_handle = ShapeHandle::new(Cuboid::new(crate::math::vec2_glam_to_glm(half_extents))); collision_world.add( ncollide2d::math::Isometry::new(position.position.into(), rotation), shape_handle, CollisionGroups::new(), GeometricQueryType::Proximity(0.001), transformed_entity, ); } }
function_block-function_prefix_line
[ { "content": "pub fn winit_position_to_glam(position: PhysicalPosition<f64>) -> glam::Vec2 {\n\n glam::Vec2::new(position.x as f32, position.y as f32)\n\n}\n\n\n\n#[derive(Copy, Clone, Serialize, Deserialize, Debug, PartialEq, Default)]\n\n#[repr(transparent)]\n\n#[serde(transparent)]\n\npub struct Vec2 {\n\...
Rust
src/view/root.rs
PENGUINLIONG/Writus
63f47f0730380f83cdfda69899b0aa6427c0c15a
use std::sync::Arc; use serde_json::Value as JsonValue; use pulldown_cmark::Parser; use pulldown_cmark::{Options as ParserOptions, OPTION_ENABLE_TABLES}; use writium::prelude::*; use writium_cache::{Cache, DumbCacheSource}; use api::index::Index; use super::template::*; pub struct RootView { index_template: Template, digest_template: Template, post_cache: Arc<Cache<String>>, metadata_cache: Arc<Cache<JsonValue>>, index: Index, entries_per_request: usize, } impl RootView { pub fn new() -> RootView { RootView { index_template: Template::default(), digest_template: Template::default(), post_cache: Arc::new(Cache::new(0, DumbCacheSource::new())), metadata_cache: Arc::new(Cache::new(0, DumbCacheSource::new())), index: Index::default(), entries_per_request: 5, } } pub fn set_post_cache(&mut self, cache: Arc<Cache<String>>) { self.post_cache = cache; } pub fn set_metadata_cache(&mut self, cache: Arc<Cache<JsonValue>>) { self.metadata_cache = cache; } pub fn set_digest_template(&mut self, template: Template) { self.digest_template = template; } pub fn set_index_template(&mut self, template: Template) { self.index_template = template; } pub fn set_index(&mut self, index: Index) { self.index = index; } pub fn set_entries_per_request(&mut self, epr: usize) { self.entries_per_request = epr; } fn render_digest(&self, id: &str, post: &str, meta: &JsonValue) -> String { fn get_digest(full_text: &str) -> (String, String) { let mut lines = full_text.lines(); let title = lines .next() .unwrap_or_default() .chars() .skip_while(|ch| ch == &'#') .skip_while(|ch| ch == &' ') .collect(); let mut content = String::new(); lines .skip_while(|line| line.trim().len() == 0) .take_while(|line| line.trim().len() > 0) .for_each(|x| content += x); (title, content) } fn md_to_html(md: &str) -> String { let mut buf = String::with_capacity(md.len()); let mut opts = ParserOptions::empty(); opts.insert(OPTION_ENABLE_TABLES); let parser = Parser::new_ext(&md, opts); ::pulldown_cmark::html::push_html(&mut buf, parser); buf } let path = format!("/posts/{}", id); let (title, content) = get_digest(&post); self.digest_template.render(meta, &[ ("link", &path), ("id", &id), ("title", &title), ("content", &md_to_html(&content)), ]) } fn render_index(&self, req: &mut Request) -> ApiResult { use self::header::ContentType; #[derive(Deserialize)] struct Param { page: Option<usize>, } let param = req.to_param::<Param>()?; let guard = self.index.read().unwrap(); let max_page = { let len = guard.len(); if len % self.entries_per_request == 0 { len / self.entries_per_request } else { len / self.entries_per_request + 1 } }; let page = param.page.unwrap_or_default() .min(max_page) .max(1); let skip = (page - 1) * self.entries_per_request; let take = self.entries_per_request; let ids = guard.get_range(skip, take); let mut digests = String::new(); for id in ids { let post_cache = self.post_cache.get(&id)?; let post_guard = post_cache.read().unwrap(); let post: &str = post_guard.as_ref(); let metadata_cache = self.metadata_cache.get(&id)?; let metadata_guard = metadata_cache.read().unwrap(); let metadata: &JsonValue = &metadata_guard; digests.push_str(&self.render_digest(&id, post, metadata)); } let current = page.to_string(); let (prev, prev_link) = if page - 1 > 0 { ((page - 1).to_string(), format!("?page={}", page - 1)) } else { (String::new(), String::new()) }; let (next, next_link) = if page + 1 <= max_page { ((page + 1).to_string(), format!("?page={}", page + 1)) } else { (String::new(), String::new()) }; let res = Response::new() .with_header(ContentType( "text/html; charset=UTF-8".parse().unwrap()) ) .with_body(self.index_template.render(&JsonValue::Null, &[ ("digests", &digests), ("current", &current), ("previous_link", &prev_link), ("previous", &prev), ("next_link", &next_link), ("next", &next), ])); Ok(res) } } impl Api for RootView { fn name(&self) -> &[&str] { &[] } fn route(&self, req: &mut Request) -> ApiResult { use self::header::{Allow, Location}; match req.method() { Method::Get => { if req.path_segs().len() == 0 || req.path_segs()[0] == "" { self.render_index(req) } else { let mut loc = "/api/v1/resources".to_owned(); for seg in req.path_segs() { loc.push('/'); loc.push_str(seg); } let res = Response::new() .with_status(StatusCode::MovedPermanently) .with_header(Location::new(loc)); Ok(res) } }, Method::Options => { let res = Response::new() .with_header(Allow(vec![Method::Options, Method::Get])); Ok(res) }, _ => Err(Error::method_not_allowed()) } } }
use std::sync::Arc; use serde_json::Value as JsonValue; use pulldown_cmark::Parser; use pulldown_cmark::{Options as ParserOptions, OPTION_ENABLE_TABLES}; use writium::prelude::*; use writium_cache::{Cache, DumbCacheSource}; use api::index::Index; use super::template::*; pub struct RootView { index_template: Template, digest_template: Template, post_cache: Arc<Cache<String>>, metadata_cache: Arc<Cache<JsonValue>>, index: Index, entries_per_request: usize, } impl RootView { pub fn new() -> RootView { RootView { index_template: Template::default(), digest_template: Template::default(), post_cache: Arc::new(Cache::new(0, DumbCacheSource::new())), metadata_cache: Arc::new(Cache::new(0, DumbCacheSource::new())), index: Index::default(), entries_per_request: 5, } } pub fn set_post_cache(&mut self, cache: Arc<Cache<String>>) { self.post_cache = cache; } pub fn set_metadata_cache(&mut self, cache: Arc<Cache<JsonValue>>) { self.metadata_cache = cache; } pub fn set_digest_template(&mut self, template: Template) { self.digest_template = template; } pub fn set_index_template(&mut self, template: Template) { self.index_template = template; } pub fn set_index(&mut self, index: Index) { self.index = index; } pub fn set_entries_per_request(&mut self, epr: usize) { self.entries_per_request = epr; } fn render_digest(&self, id: &str, post: &str, meta: &JsonValue) -> String { fn get_digest(full_text: &str) -> (String, String) { let mut lines = full_text.lines(); let title = lines .next() .unwrap_or_default()
ew(); lines .skip_while(|line| line.trim().len() == 0) .take_while(|line| line.trim().len() > 0) .for_each(|x| content += x); (title, content) } fn md_to_html(md: &str) -> String { let mut buf = String::with_capacity(md.len()); let mut opts = ParserOptions::empty(); opts.insert(OPTION_ENABLE_TABLES); let parser = Parser::new_ext(&md, opts); ::pulldown_cmark::html::push_html(&mut buf, parser); buf } let path = format!("/posts/{}", id); let (title, content) = get_digest(&post); self.digest_template.render(meta, &[ ("link", &path), ("id", &id), ("title", &title), ("content", &md_to_html(&content)), ]) } fn render_index(&self, req: &mut Request) -> ApiResult { use self::header::ContentType; #[derive(Deserialize)] struct Param { page: Option<usize>, } let param = req.to_param::<Param>()?; let guard = self.index.read().unwrap(); let max_page = { let len = guard.len(); if len % self.entries_per_request == 0 { len / self.entries_per_request } else { len / self.entries_per_request + 1 } }; let page = param.page.unwrap_or_default() .min(max_page) .max(1); let skip = (page - 1) * self.entries_per_request; let take = self.entries_per_request; let ids = guard.get_range(skip, take); let mut digests = String::new(); for id in ids { let post_cache = self.post_cache.get(&id)?; let post_guard = post_cache.read().unwrap(); let post: &str = post_guard.as_ref(); let metadata_cache = self.metadata_cache.get(&id)?; let metadata_guard = metadata_cache.read().unwrap(); let metadata: &JsonValue = &metadata_guard; digests.push_str(&self.render_digest(&id, post, metadata)); } let current = page.to_string(); let (prev, prev_link) = if page - 1 > 0 { ((page - 1).to_string(), format!("?page={}", page - 1)) } else { (String::new(), String::new()) }; let (next, next_link) = if page + 1 <= max_page { ((page + 1).to_string(), format!("?page={}", page + 1)) } else { (String::new(), String::new()) }; let res = Response::new() .with_header(ContentType( "text/html; charset=UTF-8".parse().unwrap()) ) .with_body(self.index_template.render(&JsonValue::Null, &[ ("digests", &digests), ("current", &current), ("previous_link", &prev_link), ("previous", &prev), ("next_link", &next_link), ("next", &next), ])); Ok(res) } } impl Api for RootView { fn name(&self) -> &[&str] { &[] } fn route(&self, req: &mut Request) -> ApiResult { use self::header::{Allow, Location}; match req.method() { Method::Get => { if req.path_segs().len() == 0 || req.path_segs()[0] == "" { self.render_index(req) } else { let mut loc = "/api/v1/resources".to_owned(); for seg in req.path_segs() { loc.push('/'); loc.push_str(seg); } let res = Response::new() .with_status(StatusCode::MovedPermanently) .with_header(Location::new(loc)); Ok(res) } }, Method::Options => { let res = Response::new() .with_header(Allow(vec![Method::Options, Method::Get])); Ok(res) }, _ => Err(Error::method_not_allowed()) } } }
.chars() .skip_while(|ch| ch == &'#') .skip_while(|ch| ch == &' ') .collect(); let mut content = String::n
function_block-random_span
[ { "content": "fn make_index(dir: &str, key: &str, index: &mut IndexCollection) {\n\n info!(\"Indexing files with key '{}'.\", key);\n\n for entry in WalkDir::new(&dir)\n\n .into_iter()\n\n .filter_map(|x| x.ok()) {\n\n // Seek for `content.md`.\n\n if !entry.file_type().is_file...
Rust
host/src/fs/cache.rs
manasrivastava/tinychain
e6082f587ac089307ca9264d90d20c3f0991da52
use std::convert::{TryFrom, TryInto}; use std::path::PathBuf; #[cfg(feature = "tensor")] use afarray::Array; use async_trait::async_trait; use destream::IntoStream; use freqache::Entry; use futures::{Future, TryFutureExt}; use log::{debug, error, info, warn}; use tokio::fs; use tokio::io::AsyncWrite; use tokio::sync::mpsc; use uplock::{RwLock, RwLockReadGuard, RwLockWriteGuard}; use tc_btree::Node; use tc_error::*; use tc_transact::fs::BlockData; use crate::chain::ChainBlock; use crate::scalar::Value; use super::{create_parent, io_err, TMP}; struct Policy; #[async_trait] impl freqache::Policy<PathBuf, CacheBlock> for Policy { fn can_evict(&self, block: &CacheBlock) -> bool { block.ref_count() <= 1 } async fn evict(&self, path: PathBuf, block: &CacheBlock) { debug!("evict block at {:?} from cache", path); let size = persist(&path, block) .await .expect("persist cache block to disk"); debug!("block at {:?} evicted, wrote {} bytes to disk", path, size); } } type LFU = freqache::LFUCache<PathBuf, CacheBlock, Policy>; #[derive(Clone)] pub enum CacheBlock { BTree(CacheLock<Node>), Chain(CacheLock<ChainBlock>), Value(CacheLock<Value>), #[cfg(feature = "tensor")] Tensor(CacheLock<Array>), } impl CacheBlock { async fn persist<W: AsyncWrite + Send + Unpin>(&self, sink: &mut W) -> TCResult<u64> { match self { Self::BTree(block) => { let contents = block.read().await; contents.persist(sink).await } Self::Chain(block) => { let contents = block.read().await; contents.persist(sink).await } Self::Value(block) => { let contents = block.read().await; contents.persist(sink).await } #[cfg(feature = "tensor")] Self::Tensor(block) => { let contents = block.read().await; contents.persist(sink).await } } } fn ref_count(&self) -> usize { match self { Self::BTree(block) => block.ref_count(), Self::Chain(block) => block.ref_count(), Self::Value(block) => block.ref_count(), #[cfg(feature = "tensor")] Self::Tensor(block) => block.ref_count(), } } } impl Entry for CacheBlock { fn weight(&self) -> u64 { match self { Self::BTree(_) => Node::max_size(), Self::Chain(_) => ChainBlock::max_size(), Self::Value(_) => Value::max_size(), #[cfg(feature = "tensor")] Self::Tensor(_) => Array::max_size(), } } } #[cfg(feature = "tensor")] impl From<CacheLock<Array>> for CacheBlock { fn from(lock: CacheLock<Array>) -> CacheBlock { Self::Tensor(lock) } } impl From<CacheLock<ChainBlock>> for CacheBlock { fn from(lock: CacheLock<ChainBlock>) -> CacheBlock { Self::Chain(lock) } } impl From<CacheLock<Node>> for CacheBlock { fn from(lock: CacheLock<Node>) -> CacheBlock { Self::BTree(lock) } } impl From<CacheLock<Value>> for CacheBlock { fn from(lock: CacheLock<Value>) -> CacheBlock { Self::Value(lock) } } #[cfg(feature = "tensor")] impl TryFrom<CacheBlock> for CacheLock<Array> { type Error = TCError; fn try_from(block: CacheBlock) -> TCResult<Self> { match block { CacheBlock::Tensor(block) => Ok(block), _ => Err(TCError::unsupported("unexpected block type")), } } } impl TryFrom<CacheBlock> for CacheLock<ChainBlock> { type Error = TCError; fn try_from(block: CacheBlock) -> TCResult<Self> { match block { CacheBlock::Chain(block) => Ok(block), _ => Err(TCError::unsupported("unexpected block type")), } } } impl TryFrom<CacheBlock> for CacheLock<Node> { type Error = TCError; fn try_from(block: CacheBlock) -> TCResult<Self> { match block { CacheBlock::BTree(block) => Ok(block), _ => Err(TCError::unsupported("unexpected block type")), } } } impl TryFrom<CacheBlock> for CacheLock<Value> { type Error = TCError; fn try_from(block: CacheBlock) -> TCResult<Self> { match block { CacheBlock::Value(block) => Ok(block), _ => Err(TCError::unsupported("unexpected block type")), } } } pub struct CacheLock<T> { lock: RwLock<T>, } impl<T> CacheLock<T> { fn new(value: T) -> Self { Self { lock: RwLock::new(value), } } pub async fn read(&self) -> RwLockReadGuard<T> { debug!( "CacheLock got read lock request on a lock with {} refs...", self.lock.ref_count() ); self.lock.read().await } pub async fn write(&self) -> RwLockWriteGuard<T> { debug!( "CacheLock got write lock request on a lock with {} refs...", self.lock.ref_count() ); self.lock.write().await } pub fn ref_count(&self) -> usize { self.lock.ref_count() } } impl<T> Clone for CacheLock<T> { fn clone(&self) -> Self { Self { lock: self.lock.clone(), } } } struct Evict; #[derive(Clone)] pub struct Cache { tx: mpsc::Sender<Evict>, lfu: RwLock<LFU>, } impl Cache { pub fn new(max_size: u64) -> Self { assert!(max_size > 0); let (tx, rx) = mpsc::channel(1024); let cache = Self { tx, lfu: RwLock::new(LFU::new(max_size, Policy)), }; spawn_cleanup_thread(cache.lfu.clone(), rx); cache } async fn _read_and_insert<B: BlockData>( mut cache: RwLockWriteGuard<LFU>, path: PathBuf, ) -> TCResult<CacheLock<B>> where CacheLock<B>: TryFrom<CacheBlock, Error = TCError>, CacheBlock: From<CacheLock<B>>, { let block_file = read_file(&path).await?; let block = B::load(block_file).await?; debug!("cache insert: {:?}", path); let block = CacheLock::new(block); cache.insert(path, block.clone().into()).await; Ok(block) } pub async fn read<B: BlockData>(&self, path: &PathBuf) -> TCResult<Option<CacheLock<B>>> where CacheLock<B>: TryFrom<CacheBlock, Error = TCError>, CacheBlock: From<CacheLock<B>>, { debug!("Cache::read {:?}", path); let mut cache = self.lfu.write().await; if let Some(block) = cache.get(path).await { debug!("cache hit: {:?}", path); let block = block.clone().try_into()?; return Ok(Some(block)); } else if !path.exists() { return Ok(None); } else { info!("cache miss: {:?}", path); } Self::_read_and_insert(cache, path.clone()) .map_ok(Some) .await } pub async fn delete(&self, path: &PathBuf) -> Option<CacheBlock> { debug!("Cache::delete {:?}", path); let mut cache = self.lfu.write().await; cache.remove(path).await } pub async fn delete_and_sync(&self, path: PathBuf) -> TCResult<()> { debug!("Cache::delete_and_sync {:?}", path); let mut cache = self.lfu.write().await; cache.remove(&path).await; let tmp = path.with_extension(TMP); if tmp.exists() { tokio::fs::remove_file(&tmp) .map_err(|e| io_err(e, &tmp)) .await?; } if path.exists() { tokio::fs::remove_file(&path) .map_err(|e| io_err(e, &path)) .await?; } Ok(()) } pub async fn delete_dir(&self, path: PathBuf) -> TCResult<()> { let _lock = self.lfu.write().await; tokio::fs::remove_dir_all(&path) .map_err(|e| io_err(e, &path)) .await } async fn _sync(cache: &mut LFU, path: &PathBuf) -> TCResult<bool> { debug!("sync block at {:?} with filesystem", &path); if let Some(block) = cache.get(path).await { let size = persist(path, &block).await?; debug!("sync'd block at {:?}, wrote {} bytes", path, size); Ok(true) } else { info!("cache sync miss: {:?}", path); Ok(path.exists()) } } pub async fn sync(&self, path: &PathBuf) -> TCResult<bool> { debug!("sync block at {:?} with filesystem", &path); let mut cache = self.lfu.write().await; Self::_sync(&mut cache, path).await } pub async fn sync_and_copy<'en, B: BlockData + IntoStream<'en> + 'en>( &self, source: PathBuf, dest: PathBuf, ) -> TCResult<CacheLock<B>> where CacheLock<B>: TryFrom<CacheBlock, Error = TCError>, CacheBlock: From<CacheLock<B>>, { debug!("cache sync + copy from {:?} to {:?}", source, dest); let mut cache = self.lfu.write().await; Self::_sync(&mut cache, &source).await?; tokio::fs::copy(&source, &dest) .map_err(|e| io_err(e, format!("copy from {:?} to {:?}", source, dest))) .await?; Self::_read_and_insert(cache, dest).await } async fn _write<'en, B: BlockData + IntoStream<'en> + 'en>( cache: &mut LFU, tx: &mpsc::Sender<Evict>, path: PathBuf, block: CacheLock<B>, ) where CacheBlock: From<CacheLock<B>>, { cache.insert(path, block.into()).await; if cache.is_full() { info!("the block cache is full ({} occupied out of {} capacity), triggering garbage collection...", cache.occupied(), cache.capacity()); if let Err(err) = tx.send(Evict).await { error!("the cache cleanup thread is dead! {}", err); } } } pub async fn write<'en, B: BlockData + IntoStream<'en> + 'en>( &self, path: PathBuf, block: B, ) -> TCResult<CacheLock<B>> where CacheBlock: From<CacheLock<B>>, { debug!("cache insert: {:?}", &path); let block = CacheLock::new(block); let mut cache = self.lfu.write().await; Self::_write(&mut cache, &self.tx, path, block.clone()).await; Ok(block) } } fn spawn_cleanup_thread(cache: RwLock<LFU>, mut rx: mpsc::Receiver<Evict>) { tokio::spawn(async move { info!("cache cleanup thread is running..."); while rx.recv().await.is_some() { let lfu = cache.read().await; debug!( "got Evict message, cache has {} entries (capacity {} bytes)", lfu.len(), lfu.capacity() ); if lfu.is_full() { let mut lfu = lfu.upgrade().await; debug!("running cache eviction with {} entries...", lfu.len()); lfu.evict().await; debug!("cache eviction complete, {} entries remain", lfu.len()); } else { debug!("cache eviction already ran, ignoring redundant Evict message"); } } warn!("cache cleanup thread shutting down"); }); } async fn persist(path: &PathBuf, block: &CacheBlock) -> TCResult<u64> { let tmp = path.with_extension(TMP); let size = { let mut tmp_file = if tmp.exists() { write_file(&tmp).await? } else { create_parent(&tmp).await?; create_file(&tmp).await? }; let size = block.persist(&mut tmp_file).await?; tmp_file.sync_all().map_err(|e| io_err(e, &tmp)).await?; size }; tokio::fs::rename(&tmp, path) .map_err(|e| io_err(e, &tmp)) .await?; Ok(size) } #[inline] fn create_file(path: &PathBuf) -> impl Future<Output = TCResult<fs::File>> + '_ { tokio::fs::File::create(path).map_err(move |e| io_err(e, path)) } #[inline] fn read_file(path: &PathBuf) -> impl Future<Output = TCResult<fs::File>> + '_ { fs::File::open(path).map_err(move |e| io_err(e, path)) } async fn write_file(path: &PathBuf) -> TCResult<fs::File> { fs::OpenOptions::new() .truncate(true) .write(true) .open(path) .map_err(move |e| io_err(e, path)) .await }
use std::convert::{TryFrom, TryInto}; use std::path::PathBuf; #[cfg(feature = "tensor")] use afarray::Array; use async_trait::async_trait; use destream::IntoStream; use freqache::Entry; use futures::{Future, TryFutureExt}; use log::{debug, error, info, warn}; use tokio::fs; use tokio::io::AsyncWrite; use tokio::sync::mpsc; use uplock::{RwLock, RwLockReadGuard, RwLockWriteGuard}; use tc_btree::Node; use tc_error::*; use tc_transact::fs::BlockData; use crate::chain::ChainBlock; use crate::scalar::Value; use super::{create_parent, io_err, TMP}; struct Policy; #[async_trait] impl freqache::Policy<PathBuf, CacheBlock> for Policy { fn can_evict(&self, block: &CacheBlock) -> bool { block.ref_count() <= 1 } async fn evict(&self, path: PathBuf, block: &CacheBlock) { debug!("evict block at {:?} from cache", path); let size = persist(&path, block) .await .expect("persist cache block to disk"); debug!("block at {:?} evicted, wrote {} bytes to disk", path, size); } } type LFU = freqache::LFUCache<PathBuf, CacheBlock, Policy>; #[derive(Clone)] pub enum CacheBlock { BTree(CacheLock<Node>), Chain(CacheLock<ChainBlock>), Value(CacheLock<Value>), #[cfg(feature = "tensor")] Tensor(CacheLock<Array>), } impl CacheBlock { async fn persist<W: AsyncWrite + Send + Unpin>(&self, sink: &mut W) -> TCResult<u64> { match self { Self::BTree(block) => { let contents = block.read().await; contents.persist(sink).await } Self::Chain(block) => { let contents = block.read().await; contents.persist(sink).await } Self::Value(block) => { let contents = block.read().await; contents.persist(sink).await } #[cfg(feature = "tensor")] Self::Tensor(block) => { let contents = block.read().await; contents.persist(sink).await } } } fn ref_count(&self) -> usize { match self { Self::BTree(block) => block.ref_count(), Self::Chain(block) => block.ref_count(), Self::Value(block) => block.ref_count(), #[cfg(feature = "tensor")] Self::Tensor(block) => block.ref_count(), } } } impl Entry for CacheBlock { fn weight(&self) -> u64 { match self { Self::BTree(_) => Node::max_size(), Self::Chain(_) => ChainBlock::max_size(), Self::Value(_) => Value::max_size(), #[cfg(feature = "tensor")] Self::Tensor(_) => Array::max_size(), } } } #[cfg(feature = "tensor")] impl From<CacheLock<Array>> for CacheBlock { fn from(lock: CacheLock<Array>) -> CacheBlock { Self::Tensor(lock) } } impl From<CacheLock<ChainBlock>> for CacheBlock { fn from(lock: CacheLock<ChainBlock>) -> CacheBlock { Self::Chain(lock) } } impl From<CacheLock<Node>> for CacheBlock { fn from(lock: CacheLock<Node>) -> CacheBlock { Self::BTree(lock) } } impl From<CacheLock<Value>> for CacheBlock { fn from(lock: CacheLock<Value>) -> CacheBlock { Self::Value(lock) } } #[cfg(feature = "tensor")] impl TryFrom<CacheBlock> for CacheLock<Array> { type Error = TCError; fn try_from(block: CacheBlock) -> TCResult<Self> { match block { CacheBlock::Tensor(block) => Ok(block), _ => Err(TCError::unsupported("unexpected block type")), } } } impl TryFrom<CacheBlock> for CacheLock<ChainBlock> { type Error = TCError; fn try_from(block: CacheBlock) -> TCResult<Self> { match block { CacheBlock::Chain(block) => Ok(block), _ => Err(TCError::unsupported("unexpected block type")), } } } impl TryFrom<CacheBlock> for CacheLock<Node> { type Error = TCError; fn try_from(block: CacheBlock) -> TCResult<Self> { match block { CacheBlock::BTree(block) => Ok(block), _ => Err(TCError::unsupported("unexpected block type")), } } } impl TryFrom<CacheBlock> for CacheLock<Value> { type Error = TCError; fn try_from(block: CacheBlock) -> TCResult<Self> { match block { CacheBlock::Value(block) => Ok(block), _ => Err(TCError::unsupported("unexpected block type")), } } } pub struct CacheLock<T> { lock: RwLock<T>, } impl<T> CacheLock<T> { fn new(value: T) -> Self { Self { lock: RwLock::new(value), } } pub async fn read(&self) -> RwLockReadGuard<T> { debug!( "CacheLock got read lock request on a lock with {} refs...", self.lock.ref_count() ); self.lock.read().await } pub async fn write(&self) -> RwLockWriteGuard<T> { debug!( "CacheLock got write lock request on a lock with {} refs...", self.lock.ref_count() ); self.lock.write().await } pub fn ref_count(&self) -> usize { self.lock.ref_count() } } impl<T> Clone for CacheLock<T> { fn clone(&self) -> Self { Self { lock: self.lock.clone(), } } } struct Evict; #[derive(Clone)] pub struct Cache { tx: mpsc::Sender<Evict>, lfu: RwLock<LFU>, } impl Cache { pub fn new(max_size: u64) -> Self { assert!(max_size > 0); let (tx, rx) = mpsc::channel(1024); let cache = Self { tx, lfu: RwLock::new(LFU::new(max_size, Policy)), }; spawn_cleanup_thread(cache.lfu.clone(), rx); cache } async fn _read_and_insert<B: BlockData>( mut cache: RwLockWriteGuard<LFU>, path: PathBuf, ) -> TCResult<CacheLock<B>> where CacheLock<B>: TryFrom<CacheBlock, Error = TCError>, CacheBlock: From<CacheLock<B>>, { let block_file = read_file(&path).await?; let block = B::load(block_file).await?; debug!("cache insert: {:?}", path); let block = CacheLock::new(block); cache.insert(path, block.clone().into()).await; Ok(block) } pub async fn read<B: Block
pub async fn delete(&self, path: &PathBuf) -> Option<CacheBlock> { debug!("Cache::delete {:?}", path); let mut cache = self.lfu.write().await; cache.remove(path).await } pub async fn delete_and_sync(&self, path: PathBuf) -> TCResult<()> { debug!("Cache::delete_and_sync {:?}", path); let mut cache = self.lfu.write().await; cache.remove(&path).await; let tmp = path.with_extension(TMP); if tmp.exists() { tokio::fs::remove_file(&tmp) .map_err(|e| io_err(e, &tmp)) .await?; } if path.exists() { tokio::fs::remove_file(&path) .map_err(|e| io_err(e, &path)) .await?; } Ok(()) } pub async fn delete_dir(&self, path: PathBuf) -> TCResult<()> { let _lock = self.lfu.write().await; tokio::fs::remove_dir_all(&path) .map_err(|e| io_err(e, &path)) .await } async fn _sync(cache: &mut LFU, path: &PathBuf) -> TCResult<bool> { debug!("sync block at {:?} with filesystem", &path); if let Some(block) = cache.get(path).await { let size = persist(path, &block).await?; debug!("sync'd block at {:?}, wrote {} bytes", path, size); Ok(true) } else { info!("cache sync miss: {:?}", path); Ok(path.exists()) } } pub async fn sync(&self, path: &PathBuf) -> TCResult<bool> { debug!("sync block at {:?} with filesystem", &path); let mut cache = self.lfu.write().await; Self::_sync(&mut cache, path).await } pub async fn sync_and_copy<'en, B: BlockData + IntoStream<'en> + 'en>( &self, source: PathBuf, dest: PathBuf, ) -> TCResult<CacheLock<B>> where CacheLock<B>: TryFrom<CacheBlock, Error = TCError>, CacheBlock: From<CacheLock<B>>, { debug!("cache sync + copy from {:?} to {:?}", source, dest); let mut cache = self.lfu.write().await; Self::_sync(&mut cache, &source).await?; tokio::fs::copy(&source, &dest) .map_err(|e| io_err(e, format!("copy from {:?} to {:?}", source, dest))) .await?; Self::_read_and_insert(cache, dest).await } async fn _write<'en, B: BlockData + IntoStream<'en> + 'en>( cache: &mut LFU, tx: &mpsc::Sender<Evict>, path: PathBuf, block: CacheLock<B>, ) where CacheBlock: From<CacheLock<B>>, { cache.insert(path, block.into()).await; if cache.is_full() { info!("the block cache is full ({} occupied out of {} capacity), triggering garbage collection...", cache.occupied(), cache.capacity()); if let Err(err) = tx.send(Evict).await { error!("the cache cleanup thread is dead! {}", err); } } } pub async fn write<'en, B: BlockData + IntoStream<'en> + 'en>( &self, path: PathBuf, block: B, ) -> TCResult<CacheLock<B>> where CacheBlock: From<CacheLock<B>>, { debug!("cache insert: {:?}", &path); let block = CacheLock::new(block); let mut cache = self.lfu.write().await; Self::_write(&mut cache, &self.tx, path, block.clone()).await; Ok(block) } } fn spawn_cleanup_thread(cache: RwLock<LFU>, mut rx: mpsc::Receiver<Evict>) { tokio::spawn(async move { info!("cache cleanup thread is running..."); while rx.recv().await.is_some() { let lfu = cache.read().await; debug!( "got Evict message, cache has {} entries (capacity {} bytes)", lfu.len(), lfu.capacity() ); if lfu.is_full() { let mut lfu = lfu.upgrade().await; debug!("running cache eviction with {} entries...", lfu.len()); lfu.evict().await; debug!("cache eviction complete, {} entries remain", lfu.len()); } else { debug!("cache eviction already ran, ignoring redundant Evict message"); } } warn!("cache cleanup thread shutting down"); }); } async fn persist(path: &PathBuf, block: &CacheBlock) -> TCResult<u64> { let tmp = path.with_extension(TMP); let size = { let mut tmp_file = if tmp.exists() { write_file(&tmp).await? } else { create_parent(&tmp).await?; create_file(&tmp).await? }; let size = block.persist(&mut tmp_file).await?; tmp_file.sync_all().map_err(|e| io_err(e, &tmp)).await?; size }; tokio::fs::rename(&tmp, path) .map_err(|e| io_err(e, &tmp)) .await?; Ok(size) } #[inline] fn create_file(path: &PathBuf) -> impl Future<Output = TCResult<fs::File>> + '_ { tokio::fs::File::create(path).map_err(move |e| io_err(e, path)) } #[inline] fn read_file(path: &PathBuf) -> impl Future<Output = TCResult<fs::File>> + '_ { fs::File::open(path).map_err(move |e| io_err(e, path)) } async fn write_file(path: &PathBuf) -> TCResult<fs::File> { fs::OpenOptions::new() .truncate(true) .write(true) .open(path) .map_err(move |e| io_err(e, path)) .await }
Data>(&self, path: &PathBuf) -> TCResult<Option<CacheLock<B>>> where CacheLock<B>: TryFrom<CacheBlock, Error = TCError>, CacheBlock: From<CacheLock<B>>, { debug!("Cache::read {:?}", path); let mut cache = self.lfu.write().await; if let Some(block) = cache.get(path).await { debug!("cache hit: {:?}", path); let block = block.clone().try_into()?; return Ok(Some(block)); } else if !path.exists() { return Ok(None); } else { info!("cache miss: {:?}", path); } Self::_read_and_insert(cache, path.clone()) .map_ok(Some) .await }
function_block-function_prefixed
[ { "content": "fn io_err<I: fmt::Debug + Send>(err: io::Error, info: I) -> TCError {\n\n match err.kind() {\n\n io::ErrorKind::NotFound => {\n\n TCError::internal(format!(\"host filesystem has no such entry {:?}\", info))\n\n }\n\n io::ErrorKind::PermissionDenied => TCError::in...
Rust
gcs-cxx/src/ecs_world.rs
Beliaar/godot-component-system
2e2bd9186a968853b53f447cffa34703b5ddc2a6
use std::string::String; use cxx::{type_id, ExternType}; use gcs::world::ecs_world::{create_ecs_world, ECSWorld}; use crate::component::component_data::create_component_data; use crate::component::component_data::CXXComponentData; use crate::component::component_definition::CXXComponentDefinition; use crate::component::component_info::create_component_info; use crate::component::component_info::CXXComponentInfo; use crate::entity::create_entity; use crate::entity::entity_id_from_string; use crate::entity::CXXEntityId; use crate::entity::EntityIdResult; use crate::godot::error::GCSResult; #[cxx::bridge(namespace = gcs::ffi)] pub mod ffi { extern "Rust" { type UnitResult; fn is_error(&self) -> bool; fn get_error(&self) -> String; } extern "Rust" { type StringVecResult; fn is_error(&self) -> bool; fn get_result(&self) -> Vec<String>; fn get_error(&self) -> String; } extern "Rust" { type EntityIdResult; fn is_error(&self) -> bool; fn get_result(&self) -> Box<CXXEntityId>; fn get_error(&self) -> String; } extern "Rust" { #[cxx_name = "ComponentInfo"] type CXXComponentInfo; } extern "Rust" { #[cxx_name = "ComponentData"] type CXXComponentData; fn get_field(self: &CXXComponentData, field: String) -> &ComponentValue; fn set_field(self: &mut CXXComponentData, field: String, value: &ComponentValue); fn create_component_data(entity: &CXXEntityId) -> Box<CXXComponentData>; } extern "Rust" { #[cxx_name = "EntityId"] type CXXEntityId; fn create_entity() -> Box<CXXEntityId>; fn as_string(&self) -> String; fn entity_id_from_string(id: String) -> Box<EntityIdResult>; } extern "Rust" { type ComponentInfoResult; fn is_error(&self) -> bool; fn get_result(&self) -> Box<CXXComponentInfo>; fn get_error(&self) -> String; } extern "Rust" { #[cxx_name = "ECSWorld"] type CXXECSWorld; pub(crate) fn create_component_info(hash: u64) -> Box<CXXComponentInfo>; fn register_component( self: &mut CXXECSWorld, name: String, component_definition: &ComponentDefinition, ) -> Box<ComponentInfoResult>; fn register_entity(&mut self, id: &CXXEntityId) -> Box<UnitResult>; pub fn set_component_data( &mut self, entity_id: &CXXEntityId, component: String, data: &CXXComponentData, ) -> Box<UnitResult>; fn is_component_added_to_entity(&self, entity_id: &CXXEntityId, component: String) -> bool; fn get_components_of_entity(&self, entity_id: &CXXEntityId) -> Box<StringVecResult>; fn create_entity(self: &mut CXXECSWorld) -> Box<CXXEntityId>; #[cxx_name = "create_ecs_world"] pub fn create_cxx_ecs_world() -> Box<CXXECSWorld>; } extern "C++" { include!("rust/cxx.h"); include!("gcs-cxx/include/godot/variant.h"); include!("gcs-cxx/src/component/component_definition.rs.h"); include!("gcs-cxx/src/component/component_value.rs.h"); type ComponentDefinition = crate::component::component_definition::CXXComponentDefinition; type ComponentValue = crate::component::component_value::CXXComponentValue; } } type ComponentInfoResult = GCSResult<Box<CXXComponentInfo>>; type UnitResult = GCSResult<()>; type StringVecResult = GCSResult<Vec<String>>; pub(crate) struct CXXECSWorld(ECSWorld<CXXComponentDefinition, CXXComponentData, CXXComponentInfo>); impl CXXECSWorld { fn register_component( self: &mut CXXECSWorld, name: String, component_definition: &CXXComponentDefinition, ) -> Box<ComponentInfoResult> { let result = self .0 .register_component(name, component_definition.clone()); Box::new(match result { Ok(info) => ComponentInfoResult::new_result(Box::new(info)), Err(error) => ComponentInfoResult::new_error(error), }) } fn register_entity(self: &mut CXXECSWorld, id: &CXXEntityId) -> Box<UnitResult> { let result = self.0.register_entity(id); Box::new(match result { Ok(_) => UnitResult::new_result(()), Err(err) => UnitResult::new_error(err.to_string()), }) } fn set_component_data( self: &mut CXXECSWorld, entity_id: &CXXEntityId, component: String, data: &CXXComponentData, ) -> Box<UnitResult> { let result = self.0.set_component_data(entity_id, component, data); Box::new(match result { Ok(_) => UnitResult::new_result(()), Err(err) => UnitResult::new_error(err.to_string()), }) } fn get_components_of_entity( self: &CXXECSWorld, entity_id: &CXXEntityId, ) -> Box<StringVecResult> { let result = self.0.get_components_of_entity(entity_id); Box::new(match result { Ok(value) => StringVecResult::new_result(value), Err(err) => StringVecResult::new_error(err.to_string()), }) } fn is_component_added_to_entity( self: &CXXECSWorld, entity_id: &CXXEntityId, component: String, ) -> bool { self.0.is_component_added_to_entity(entity_id, component) } fn create_entity(self: &mut CXXECSWorld) -> Box<CXXEntityId> { self.0.create_entity() } } pub(crate) fn create_cxx_ecs_world() -> Box<CXXECSWorld> { Box::new(CXXECSWorld(create_ecs_world::< CXXComponentDefinition, CXXComponentData, CXXComponentInfo, >())) } unsafe impl ExternType for CXXECSWorld { type Id = type_id!("gcs::ffi::ECSWorld"); type Kind = cxx::kind::Trivial; }
use std::string::String; use cxx::{type_id, ExternType}; use gcs::world::ecs_world::{create_ecs_world, ECSWorld}; use crate::component::component_data::create_component_data; use crate::component::component_data::CXXComponentData; use crate::component::component_definition::CXXComponentDefinition; use crate::component::component_info::create_component_info; use crate::component::component_info::CXXComponentInfo; use crate::entity::create_entity; use crate::entity::entity_id_from_string; use crate::entity::CXXEntityId; use crate::entity::EntityIdResult; use crate::godot::error::GCSResult; #[cxx::bridge(namespace = gcs::ffi)] pub mod ffi { extern "Rust" { type UnitResult; fn is_error(&self) -> bool; fn get_error(&self) -> String; } extern "Rust" { type StringVecResult; fn is_error(&self) -> bool; fn get_result(&self) -> Vec<String>; fn get_error(&self) -> String; } extern "Rust" { type EntityIdResult; fn is_error(&self) -> bool; fn get_result(&self) -> Box<CXXEntityId>; fn get_error(&self) -> String; } extern "Rust" { #[cxx_name = "ComponentInfo"] type CXXComponentInfo; } extern "Rust" { #[cxx_name = "ComponentData"] type CXXComponentData; fn get_field(self: &CXXComponentData, field: String) -> &ComponentValue; fn set_field(self: &mut CXXComponentData, field: String, value: &ComponentValue); fn create_component_data(entity: &CXXEntityId) -> Box<CXXComponentData>; } extern "Rust" { #[cxx_name = "EntityId"] type CXXEntityId; fn create_entity() -> Box<CXXEntityId>; fn as_string(&self) -> String; fn entity_id_from_string(id: String) -> Box<EntityIdResult>; } extern "Rust" { type ComponentInfoResult; fn is_error(&self) -> bool; fn get_result(&self) -> Box<CXXComponentInfo>; fn get_error(&self) -> String; } extern "Rust" { #[cxx_name = "ECSWorld"] type CXXECSWorld; pub(crate) fn create_component_info(hash: u64) -> Box<CXXComponentInfo>; fn register_component( self: &mut CXXECSWorld, name: String, component_definition: &ComponentDefinition, ) -> Box<ComponentInfoResult>; fn register_entity(&mut self, id: &CXXEntityId) -> Box<UnitResult>; pub fn set_component_data( &mut self, entity_id: &CXXEntityId, component: String, data: &CXXComponentData, ) -> Box<UnitResult>; fn is_component_added_to_entity(&self, entity_id: &CXXEntityId, component: String) -> bool; fn get_components_of_entity(&self, entity_id: &CXXEntityId) -> Box<StringVecResult>; fn create_entity(self: &mut CXXECSWorld) -> Box<CXXEntityId>; #[cxx_name = "create_ecs_world"] pub fn create_cxx_ecs_world() -> Box<CXXECSWorld>; } extern "C++" { include!("rust/cxx.h"); include!("gcs-cxx/include/godot/variant.h"); include!("gcs-cxx/src/component/component_definition.rs.h"); include!("gcs-cxx/src/component/component_value.rs.h"); type ComponentDefinition = crate::component::component_definition::CXXComponentDefinition; type ComponentValue = crate::component::component_value::CXXComponentValue; } } type ComponentInfoResult = GCSResult<Box<CXXComponentInfo>>; type UnitResult = GCSResult<()>; type StringVecResult = GCSResult<Vec<String>>; pub(crate) struct CXXECSWorld(ECSWorld<CXXComponentDefinition, CXXComponentData, CXXComponentInfo>); impl CXXECSWorld { fn register_component( self: &mut CXXECSWorld, name: String, component_definition: &CXXComponentDefinition, ) -> Box<ComponentInfoResult> { let result = self .0 .register_component(name, component_definition.clone()); Box::new(match result { Ok(info) => ComponentInfoResult::new_result(Box::new(info)), Err(error) => ComponentInfoResult::new_error(error), }) }
fn set_component_data( self: &mut CXXECSWorld, entity_id: &CXXEntityId, component: String, data: &CXXComponentData, ) -> Box<UnitResult> { let result = self.0.set_component_data(entity_id, component, data); Box::new(match result { Ok(_) => UnitResult::new_result(()), Err(err) => UnitResult::new_error(err.to_string()), }) } fn get_components_of_entity( self: &CXXECSWorld, entity_id: &CXXEntityId, ) -> Box<StringVecResult> { let result = self.0.get_components_of_entity(entity_id); Box::new(match result { Ok(value) => StringVecResult::new_result(value), Err(err) => StringVecResult::new_error(err.to_string()), }) } fn is_component_added_to_entity( self: &CXXECSWorld, entity_id: &CXXEntityId, component: String, ) -> bool { self.0.is_component_added_to_entity(entity_id, component) } fn create_entity(self: &mut CXXECSWorld) -> Box<CXXEntityId> { self.0.create_entity() } } pub(crate) fn create_cxx_ecs_world() -> Box<CXXECSWorld> { Box::new(CXXECSWorld(create_ecs_world::< CXXComponentDefinition, CXXComponentData, CXXComponentInfo, >())) } unsafe impl ExternType for CXXECSWorld { type Id = type_id!("gcs::ffi::ECSWorld"); type Kind = cxx::kind::Trivial; }
fn register_entity(self: &mut CXXECSWorld, id: &CXXEntityId) -> Box<UnitResult> { let result = self.0.register_entity(id); Box::new(match result { Ok(_) => UnitResult::new_result(()), Err(err) => UnitResult::new_error(err.to_string()), }) }
function_block-full_function
[ { "content": "pub fn create_component_field_definition() -> ffi::CXXComponentFieldDefinition {\n\n ffi::CXXComponentFieldDefinition::default()\n\n}\n\n\n\n#[derive(Hash, Eq, PartialEq, Clone, Default)]\n\npub struct CXXComponentDefinition {\n\n pub fields: Vec<ffi::CXXComponentFieldDefinition>,\n\n}\n\n\n...
Rust
rust/src/eddsa/utils.rs
hermeznetwork/hermez_flutter_sdk
c165ba3cdb6ecfc5f7cc476e658ff3265e178312
extern crate num; extern crate num_bigint; extern crate num_traits; use num_bigint::{BigInt, ToBigInt}; use num_traits::{One, Zero}; pub fn modulus(a: &BigInt, m: &BigInt) -> BigInt { ((a % m) + m) % m } pub fn modinv(a: &BigInt, q: &BigInt) -> Result<BigInt, String> { let big_zero: BigInt = Zero::zero(); if a == &big_zero { return Err("no mod inv of Zero".to_string()); } let mut mn = (q.clone(), a.clone()); let mut xy: (BigInt, BigInt) = (Zero::zero(), One::one()); while mn.1 != big_zero { xy = (xy.1.clone(), xy.0 - (mn.0.clone() / mn.1.clone()) * xy.1); mn = (mn.1.clone(), modulus(&mn.0, &mn.1)); } while xy.0 < Zero::zero() { xy.0 = modulus(&xy.0, q); } Ok(xy.0) } /* pub fn modinv_v2(a0: &BigInt, m0: &BigInt) -> BigInt { if m0 == &One::one() { return One::one(); } let (mut a, mut m, mut x0, mut inv): (BigInt, BigInt, BigInt, BigInt) = (a0.clone(), m0.clone(), Zero::zero(), One::one()); while a > One::one() { inv = inv - (&a / m.clone()) * x0.clone(); a = a % m.clone(); std::mem::swap(&mut a, &mut m); std::mem::swap(&mut x0, &mut inv); } if inv < Zero::zero() { inv += m0.clone() } inv } pub fn modinv_v3(a: &BigInt, q: &BigInt) -> BigInt { let mut aa: BigInt = a.clone(); let mut qq: BigInt = q.clone(); if qq < Zero::zero() { qq = -qq; } if aa < Zero::zero() { aa = -aa; } let d = num::Integer::gcd(&aa, &qq); if d != One::one() { println!("ERR no mod_inv"); } let res: BigInt; if d < Zero::zero() { res = d + qq; } else { res = d; } res } pub fn modinv_v4(x: &BigInt, q: &BigInt) -> BigInt { let (gcd, inverse, _) = extended_gcd(x.clone(), q.clone()); let one: BigInt = One::one(); if gcd == one { modulus(&inverse, q) } else { panic!("error: gcd!=one") } } pub fn extended_gcd(a: BigInt, b: BigInt) -> (BigInt, BigInt, BigInt) { let (mut s, mut old_s) = (BigInt::zero(), BigInt::one()); let (mut t, mut old_t) = (BigInt::one(), BigInt::zero()); let (mut r, mut old_r) = (b, a); while r != BigInt::zero() { let quotient = &old_r / &r; old_r -= &quotient * &r; std::mem::swap(&mut old_r, &mut r); old_s -= &quotient * &s; std::mem::swap(&mut old_s, &mut s); old_t -= quotient * &t; std::mem::swap(&mut old_t, &mut t); } let _quotients = (t, s); // == (a, b) / gcd (old_r, old_s, old_t) } */ pub fn concatenate_arrays<T: Clone>(x: &[T], y: &[T]) -> Vec<T> { x.iter().chain(y).cloned().collect() } pub fn modsqrt(a: &BigInt, q: &BigInt) -> Result<BigInt, String> { let zero: BigInt = Zero::zero(); let one: BigInt = One::one(); if legendre_symbol(&a, q) != 1 { return Err("not a mod p square".to_string()); } else if a == &zero { return Err("not a mod p square".to_string()); } else if q == &2.to_bigint().unwrap() { return Err("not a mod p square".to_string()); } else if q % 4.to_bigint().unwrap() == 3.to_bigint().unwrap() { let r = a.modpow(&((q + one) / 4), &q); return Ok(r); } let mut s = q - &one; let mut e: BigInt = Zero::zero(); while &s % 2 == zero { s = s >> 1; e = e + &one; } let mut n: BigInt = 2.to_bigint().unwrap(); while legendre_symbol(&n, q) != -1 { n = &n + &one; } let mut y = a.modpow(&((&s + &one) >> 1), q); let mut b = a.modpow(&s, q); let mut g = n.modpow(&s, q); let mut r = e; loop { let mut t = b.clone(); let mut m: BigInt = Zero::zero(); while &t != &one { t = modulus(&(&t * &t), q); m = m + &one; } if m == zero { return Ok(y.clone()); } t = g.modpow(&(2.to_bigint().unwrap().modpow(&(&r - &m - 1), q)), q); g = g.modpow(&(2.to_bigint().unwrap().modpow(&(r - &m), q)), q); y = modulus(&(y * t), q); b = modulus(&(b * &g), q); r = m.clone(); } } #[allow(dead_code)] pub fn modsqrt_v2(a: &BigInt, q: &BigInt) -> Result<BigInt, String> { let zero: BigInt = Zero::zero(); let one: BigInt = One::one(); if legendre_symbol(&a, q) != 1 { return Err("not a mod p square".to_string()); } else if a == &zero { return Err("not a mod p square".to_string()); } else if q == &2.to_bigint().unwrap() { return Err("not a mod p square".to_string()); } else if q % 4.to_bigint().unwrap() == 3.to_bigint().unwrap() { let r = a.modpow(&((q + one) / 4), &q); return Ok(r); } let mut p = q - &one; let mut s: BigInt = Zero::zero(); while &p % 2.to_bigint().unwrap() == zero { s = s + &one; p = p >> 1; } let mut z: BigInt = One::one(); while legendre_symbol(&z, q) != -1 { z = &z + &one; } let mut c = z.modpow(&p, q); let mut x = a.modpow(&((&p + &one) >> 1), q); let mut t = a.modpow(&p, q); let mut m = s; while &t != &one { let mut i: BigInt = One::one(); let mut e: BigInt = 2.to_bigint().unwrap(); while i < m { if t.modpow(&e, q) == one { break; } e = e * 2.to_bigint().unwrap(); i = i + &one; } let b = c.modpow(&(2.to_bigint().unwrap().modpow(&(&m - &i - 1), q)), q); x = modulus(&(x * &b), q); t = modulus(&(t * &b * &b), q); c = modulus(&(&b * &b), q); m = i.clone(); } return Ok(x); } pub fn legendre_symbol(a: &BigInt, q: &BigInt) -> i32 { let one: BigInt = One::one(); let ls: BigInt = a.modpow(&((q - &one) >> 1), &q); if &(ls) == &(q - one) { return -1; } 1 } #[cfg(test)] mod tests { use super::*; #[test] fn test_mod_inverse() { let a = BigInt::parse_bytes(b"123456789123456789123456789123456789123456789", 10).unwrap(); let b = BigInt::parse_bytes(b"12345678", 10).unwrap(); assert_eq!( modinv(&a, &b).unwrap(), BigInt::parse_bytes(b"641883", 10).unwrap() ); } #[test] fn test_sqrtmod() { let a = BigInt::parse_bytes( b"6536923810004159332831702809452452174451353762940761092345538667656658715568", 10, ) .unwrap(); let q = BigInt::parse_bytes( b"7237005577332262213973186563042994240857116359379907606001950938285454250989", 10, ) .unwrap(); assert_eq!( (modsqrt(&a, &q).unwrap()).to_string(), "5464794816676661649783249706827271879994893912039750480019443499440603127256" ); assert_eq!( (modsqrt_v2(&a, &q).unwrap()).to_string(), "5464794816676661649783249706827271879994893912039750480019443499440603127256" ); } }
extern crate num; extern crate num_bigint; extern crate num_traits; use num_bigint::{BigInt, ToBigInt}; use num_traits::{One, Zero}; pub fn modulus(a: &BigInt, m: &BigInt) -> BigInt { ((a % m) + m) % m } pub fn modinv(a: &BigInt, q: &BigInt) -> Result<BigInt, String> { let big_zero: BigInt = Zero::zero(); if a == &big_zero { return Err("no mod inv of Zero".to_string()); } let mut mn = (q.clone(), a.clone()); let mut xy: (BigInt, BigInt) = (Zero::zero(), One::one()); while mn.1 != big_zero { xy = (xy.1.clone(), xy.0 - (mn.0.clone() / mn.1.clone()) * xy.1); mn = (mn.1.clone(), modulus(&mn.0, &mn.1)); } while xy.0 < Zero::zero() { xy.0 = modulus(&xy.0, q); } Ok(xy.0) } /* pub fn modinv_v2(a0: &BigInt, m0: &BigInt) -> BigInt { if m0 == &One::one() { return One::one(); } let (mut a, mut m, mut x0, mut inv): (BigInt, BigInt, BigInt, BigInt) = (a0.clone(), m0.clone(), Zero::zero(), One::one()); while a > One::one() { inv = inv - (&a / m.clone()) * x0.clone(); a = a % m.clone(); std::mem::swap(&mut a, &mut m); std::mem::swap(&mut x0, &mut inv); } if inv < Zero::zero() { inv += m0.clone() } inv } pub fn modinv_v3(a: &BigInt, q: &BigInt) -> BigInt { let mut aa: BigInt = a.clone(); let mut qq: BigInt = q.clone(); if qq < Zero::zero() { qq = -qq; } if aa < Zero::zero() { aa = -aa; } let d = num::Integer::gcd(&aa, &qq); if d != One::one() { println!("ERR no mod_inv"); } let res: BigInt; if d < Zero::zero() { res = d + qq; } else { res = d; } res } pub fn modinv_v4(x: &BigInt, q: &BigInt) -> BigInt { let (gcd, inverse, _) = extended_gcd(x.clone(), q.clone()); let one: BigInt = One::one(); if gcd == one { modulus(&inverse, q) } else { panic!("error: gcd!=one") } } pub fn extended_gcd(a: BigInt, b: BigInt) -> (BigInt, BigInt, BigInt) { let (mut s, mut old_s) = (BigInt::zero(), BigInt::one()); let (mut t, mut old_t) = (BigInt::one(), BigInt::zero()); let (mut r, mut old_r) = (b, a); while r != BigInt::zero() { let quotient = &old_r / &r; old_r -= &quotient * &r; std::mem::swap(&mut
egendre_symbol(&a, q) != 1 { return Err("not a mod p square".to_string()); } else if a == &zero { return Err("not a mod p square".to_string()); } else if q == &2.to_bigint().unwrap() { return Err("not a mod p square".to_string()); } else if q % 4.to_bigint().unwrap() == 3.to_bigint().unwrap() { let r = a.modpow(&((q + one) / 4), &q); return Ok(r); } let mut s = q - &one; let mut e: BigInt = Zero::zero(); while &s % 2 == zero { s = s >> 1; e = e + &one; } let mut n: BigInt = 2.to_bigint().unwrap(); while legendre_symbol(&n, q) != -1 { n = &n + &one; } let mut y = a.modpow(&((&s + &one) >> 1), q); let mut b = a.modpow(&s, q); let mut g = n.modpow(&s, q); let mut r = e; loop { let mut t = b.clone(); let mut m: BigInt = Zero::zero(); while &t != &one { t = modulus(&(&t * &t), q); m = m + &one; } if m == zero { return Ok(y.clone()); } t = g.modpow(&(2.to_bigint().unwrap().modpow(&(&r - &m - 1), q)), q); g = g.modpow(&(2.to_bigint().unwrap().modpow(&(r - &m), q)), q); y = modulus(&(y * t), q); b = modulus(&(b * &g), q); r = m.clone(); } } #[allow(dead_code)] pub fn modsqrt_v2(a: &BigInt, q: &BigInt) -> Result<BigInt, String> { let zero: BigInt = Zero::zero(); let one: BigInt = One::one(); if legendre_symbol(&a, q) != 1 { return Err("not a mod p square".to_string()); } else if a == &zero { return Err("not a mod p square".to_string()); } else if q == &2.to_bigint().unwrap() { return Err("not a mod p square".to_string()); } else if q % 4.to_bigint().unwrap() == 3.to_bigint().unwrap() { let r = a.modpow(&((q + one) / 4), &q); return Ok(r); } let mut p = q - &one; let mut s: BigInt = Zero::zero(); while &p % 2.to_bigint().unwrap() == zero { s = s + &one; p = p >> 1; } let mut z: BigInt = One::one(); while legendre_symbol(&z, q) != -1 { z = &z + &one; } let mut c = z.modpow(&p, q); let mut x = a.modpow(&((&p + &one) >> 1), q); let mut t = a.modpow(&p, q); let mut m = s; while &t != &one { let mut i: BigInt = One::one(); let mut e: BigInt = 2.to_bigint().unwrap(); while i < m { if t.modpow(&e, q) == one { break; } e = e * 2.to_bigint().unwrap(); i = i + &one; } let b = c.modpow(&(2.to_bigint().unwrap().modpow(&(&m - &i - 1), q)), q); x = modulus(&(x * &b), q); t = modulus(&(t * &b * &b), q); c = modulus(&(&b * &b), q); m = i.clone(); } return Ok(x); } pub fn legendre_symbol(a: &BigInt, q: &BigInt) -> i32 { let one: BigInt = One::one(); let ls: BigInt = a.modpow(&((q - &one) >> 1), &q); if &(ls) == &(q - one) { return -1; } 1 } #[cfg(test)] mod tests { use super::*; #[test] fn test_mod_inverse() { let a = BigInt::parse_bytes(b"123456789123456789123456789123456789123456789", 10).unwrap(); let b = BigInt::parse_bytes(b"12345678", 10).unwrap(); assert_eq!( modinv(&a, &b).unwrap(), BigInt::parse_bytes(b"641883", 10).unwrap() ); } #[test] fn test_sqrtmod() { let a = BigInt::parse_bytes( b"6536923810004159332831702809452452174451353762940761092345538667656658715568", 10, ) .unwrap(); let q = BigInt::parse_bytes( b"7237005577332262213973186563042994240857116359379907606001950938285454250989", 10, ) .unwrap(); assert_eq!( (modsqrt(&a, &q).unwrap()).to_string(), "5464794816676661649783249706827271879994893912039750480019443499440603127256" ); assert_eq!( (modsqrt_v2(&a, &q).unwrap()).to_string(), "5464794816676661649783249706827271879994893912039750480019443499440603127256" ); } }
old_r, &mut r); old_s -= &quotient * &s; std::mem::swap(&mut old_s, &mut s); old_t -= quotient * &t; std::mem::swap(&mut old_t, &mut t); } let _quotients = (t, s); // == (a, b) / gcd (old_r, old_s, old_t) } */ pub fn concatenate_arrays<T: Clone>(x: &[T], y: &[T]) -> Vec<T> { x.iter().chain(y).cloned().collect() } pub fn modsqrt(a: &BigInt, q: &BigInt) -> Result<BigInt, String> { let zero: BigInt = Zero::zero(); let one: BigInt = One::one(); if l
random
[ { "content": "pub fn decompress_signature(b: &[u8; 64]) -> Result<Signature, String> {\n\n let r_b8_bytes: [u8; 32] = *array_ref!(b[..32], 0, 32);\n\n let s: BigInt = BigInt::from_bytes_le(Sign::Plus, &b[32..]);\n\n let r_b8 = decompress_point(r_b8_bytes);\n\n match r_b8 {\n\n Result::Err(err...
Rust
src/packages/string_more.rs
jonnyboyC/rhai
91963d10dc6fb5ab1a0e4ffc62f5ecc2643dfff8
#![allow(non_snake_case)] use crate::any::Dynamic; use crate::def_package; use crate::engine::Engine; use crate::fn_native::FnPtr; use crate::parser::{ImmutableString, INT}; use crate::plugin::*; use crate::utils::StaticVec; #[cfg(not(feature = "unchecked"))] use crate::{result::EvalAltResult, token::Position}; use crate::stdlib::{ any::TypeId, boxed::Box, format, mem, string::String, string::ToString, vec::Vec, }; macro_rules! gen_concat_functions { ($root:ident => $($arg_type:ident),+ ) => { pub mod $root { $(pub mod $arg_type { use super::super::*; #[export_fn] #[inline] pub fn append_func(x: &mut ImmutableString, y: $arg_type) -> String { format!("{}{}", x, y) } #[export_fn] #[inline] pub fn prepend_func(x: &mut $arg_type, y: ImmutableString) -> String { format!("{}{}", x, y) } })* } } } macro_rules! reg_functions { ($mod_name:ident += $root:ident ; $($arg_type:ident),+) => { $( set_exported_fn!($mod_name, "+", $root::$arg_type::append_func); set_exported_fn!($mod_name, "+", $root::$arg_type::prepend_func); )* } } def_package!(crate:MoreStringPackage:"Additional string utilities, including string building.", lib, { reg_functions!(lib += basic; INT, bool, char, FnPtr); #[cfg(not(feature = "only_i32"))] #[cfg(not(feature = "only_i64"))] { reg_functions!(lib += numbers; i8, u8, i16, u16, i32, i64, u32, u64); #[cfg(not(target_arch = "wasm32"))] reg_functions!(lib += num_128; i128, u128); } #[cfg(not(feature = "no_float"))] reg_functions!(lib += float; f32, f64); lib.combine_flatten(exported_module!(string_functions)); lib.set_raw_fn( "pad", &[TypeId::of::<ImmutableString>(), TypeId::of::<INT>(), TypeId::of::<char>()], |_engine: &Engine, _: &Module, args: &mut [&mut Dynamic]| { let len = *args[1].read_lock::<INT>().unwrap(); #[cfg(not(feature = "unchecked"))] if _engine.limits.max_string_size > 0 && len > 0 && (len as usize) > _engine.limits.max_string_size { return EvalAltResult::ErrorDataTooLarge( "Length of string".to_string(), _engine.limits.max_string_size, len as usize, Position::none(), ).into(); } if len > 0 { let ch = mem::take(args[2]).cast::<char>(); let mut s = args[0].write_lock::<ImmutableString>().unwrap(); let orig_len = s.chars().count(); if len as usize > orig_len { let p = s.make_mut(); for _ in 0..(len as usize - orig_len) { p.push(ch); } #[cfg(not(feature = "unchecked"))] if _engine.limits.max_string_size > 0 && s.len() > _engine.limits.max_string_size { return EvalAltResult::ErrorDataTooLarge( "Length of string".to_string(), _engine.limits.max_string_size, s.len(), Position::none(), ).into(); } } } Ok(()) }, ); lib.set_iter( TypeId::of::<ImmutableString>(), |arr| Box::new( arr.cast::<ImmutableString>().chars().collect::<Vec<_>>().into_iter().map(Into::into) ) as Box<dyn Iterator<Item = Dynamic>>, ); }); gen_concat_functions!(basic => INT, bool, char, FnPtr); #[cfg(not(feature = "only_i32"))] #[cfg(not(feature = "only_i64"))] gen_concat_functions!(numbers => i8, u8, i16, u16, i32, i64, u32, u64); #[cfg(not(feature = "only_i32"))] #[cfg(not(feature = "only_i64"))] #[cfg(not(target_arch = "wasm32"))] gen_concat_functions!(num_128 => i128, u128); #[cfg(not(feature = "no_float"))] gen_concat_functions!(float => f32, f64); #[export_module] mod string_functions { #[rhai_fn(name = "+")] #[inline(always)] pub fn add_append_unit(s: ImmutableString, _x: ()) -> ImmutableString { s } #[rhai_fn(name = "+")] #[inline(always)] pub fn add_prepend_unit(_x: (), s: ImmutableString) -> ImmutableString { s } #[rhai_fn(name = "+=")] #[inline(always)] pub fn append_char(s: &mut ImmutableString, ch: char) { *s += ch; } #[rhai_fn(name = "+=")] #[inline(always)] pub fn append_string(s: &mut ImmutableString, add: ImmutableString) { *s += &add; } #[inline(always)] pub fn len(s: &mut ImmutableString) -> INT { s.chars().count() as INT } #[rhai_fn(get = "len")] #[inline(always)] pub fn len_prop(s: &mut ImmutableString) -> INT { len(s) } #[inline(always)] pub fn clear(s: &mut ImmutableString) { s.make_mut().clear(); } pub fn truncate(s: &mut ImmutableString, len: INT) { if len > 0 { let chars: StaticVec<_> = s.chars().collect(); let copy = s.make_mut(); copy.clear(); copy.extend(chars.into_iter().take(len as usize)); } else { s.make_mut().clear(); } } pub fn trim(s: &mut ImmutableString) { let trimmed = s.trim(); if trimmed.len() < s.len() { *s = trimmed.to_string().into(); } } #[rhai_fn(name = "contains")] #[inline(always)] pub fn contains_char(s: &mut ImmutableString, ch: char) -> bool { s.contains(ch) } #[rhai_fn(name = "contains")] #[inline(always)] pub fn contains_string(s: &mut ImmutableString, find: ImmutableString) -> bool { s.contains(find.as_str()) } #[rhai_fn(name = "index_of")] pub fn index_of_char_starting_from(s: &mut ImmutableString, ch: char, start: INT) -> INT { let start = if start < 0 { 0 } else if (start as usize) >= s.chars().count() { return -1 as INT; } else { s.chars().take(start as usize).collect::<String>().len() }; s[start..] .find(ch) .map(|index| s[0..start + index].chars().count() as INT) .unwrap_or(-1 as INT) } #[rhai_fn(name = "index_of")] pub fn index_of_char(s: &mut ImmutableString, ch: char) -> INT { s.find(ch) .map(|index| s[0..index].chars().count() as INT) .unwrap_or(-1 as INT) } #[rhai_fn(name = "index_of")] pub fn index_of_string_starting_from( s: &mut ImmutableString, find: ImmutableString, start: INT, ) -> INT { let start = if start < 0 { 0 } else if (start as usize) >= s.chars().count() { return -1 as INT; } else { s.chars().take(start as usize).collect::<String>().len() }; s[start..] .find(find.as_str()) .map(|index| s[0..start + index].chars().count() as INT) .unwrap_or(-1 as INT) } #[rhai_fn(name = "index_of")] pub fn index_of_string(s: &mut ImmutableString, find: ImmutableString) -> INT { s.find(find.as_str()) .map(|index| s[0..index].chars().count() as INT) .unwrap_or(-1 as INT) } #[rhai_fn(name = "sub_string")] pub fn sub_string(s: ImmutableString, start: INT, len: INT) -> ImmutableString { let offset = if s.is_empty() || len <= 0 { return "".to_string().into(); } else if start < 0 { 0 } else if (start as usize) >= s.chars().count() { return "".to_string().into(); } else { start as usize }; let chars: StaticVec<_> = s.chars().collect(); let len = if offset + (len as usize) > chars.len() { chars.len() - offset } else { len as usize }; chars .iter() .skip(offset) .take(len) .cloned() .collect::<String>() .into() } #[rhai_fn(name = "sub_string")] #[inline(always)] pub fn sub_string_starting_from(s: ImmutableString, start: INT) -> ImmutableString { let len = s.len() as INT; sub_string(s, start, len) } #[rhai_fn(name = "crop")] pub fn crop_string(s: &mut ImmutableString, start: INT, len: INT) { let offset = if s.is_empty() || len <= 0 { s.make_mut().clear(); return; } else if start < 0 { 0 } else if (start as usize) >= s.chars().count() { s.make_mut().clear(); return; } else { start as usize }; let chars: StaticVec<_> = s.chars().collect(); let len = if offset + (len as usize) > chars.len() { chars.len() - offset } else { len as usize }; let copy = s.make_mut(); copy.clear(); copy.extend(chars.iter().skip(offset).take(len)); } #[rhai_fn(name = "crop")] #[inline(always)] pub fn crop_string_starting_from(s: &mut ImmutableString, start: INT) { crop_string(s, start, s.len() as INT); } #[rhai_fn(name = "replace")] #[inline(always)] pub fn replace_string(s: &mut ImmutableString, find: ImmutableString, sub: ImmutableString) { *s = s.replace(find.as_str(), sub.as_str()).into(); } #[rhai_fn(name = "replace")] #[inline(always)] pub fn replace_string_with_char(s: &mut ImmutableString, find: ImmutableString, sub: char) { *s = s.replace(find.as_str(), &sub.to_string()).into(); } #[rhai_fn(name = "replace")] #[inline(always)] pub fn replace_char_with_string(s: &mut ImmutableString, find: char, sub: ImmutableString) { *s = s.replace(&find.to_string(), sub.as_str()).into(); } #[rhai_fn(name = "replace")] #[inline(always)] pub fn replace_char(s: &mut ImmutableString, find: char, sub: char) { *s = s.replace(&find.to_string(), &sub.to_string()).into(); } #[cfg(not(feature = "no_index"))] pub mod arrays { use crate::engine::Array; #[rhai_fn(name = "+")] #[inline] pub fn append(x: &mut ImmutableString, y: Array) -> String { format!("{}{:?}", x, y) } #[rhai_fn(name = "+")] #[inline] pub fn prepend(x: &mut Array, y: ImmutableString) -> String { format!("{:?}{}", x, y) } } }
#![allow(non_snake_case)] use crate::any::Dynamic; use crate::def_package; use crate::engine::Engine; use crate::fn_native::FnPtr; use crate::parser::{ImmutableString, INT}; use crate::plugin::*; use crate::utils::StaticVec; #[cfg(not(feature = "unchecked"))] use crate::{result::EvalAltResult, token::Position}; use crate::stdlib::{ any::TypeId, boxed::Box, format, mem, string::String, string::ToString, vec::Vec, }; macro_rules! gen_concat_functions { ($root:ident => $($arg_type:ident),+ ) => { pub mod $root { $(pub mod $arg_type { use super::super::*; #[export_fn] #[inline] pub fn append_func(x: &mut ImmutableString, y: $arg_type) -> String { format!("{}{}", x, y) } #[export_fn] #[inline] pub fn prepend_func(x: &mut $arg_type, y: ImmutableString) -> String { format!("{}{}", x, y) } })* } } } macro_rules! reg_functions { ($mod_name:ident += $root:ident ; $($arg_type:ident),+) => { $( set_exported_fn!($mod_name, "+", $root::$arg_type::append_func); set_exported_fn!($mod_name, "+", $root::$arg_type::prepend_func); )* } } def_package!(crate:MoreStringPackage:"Additional string utilities, including string building.", lib, { reg_functions!(lib += basic; INT, bool, char, FnPtr); #[cfg(not(feature = "only_i32"))] #[cfg(not(feature = "only_i64"))] { reg_functions!(lib += numbers; i8, u8, i16, u16, i32, i64, u32, u64); #[cfg(not(target_arch = "wasm32"))] reg_functions!(lib += num_128; i128, u128); } #[cfg(not(feature = "no_float"))] reg_functions!(lib += float; f32, f64); lib.combine_flatten(exported_module!(string_functions)); lib.set_raw_fn( "pad", &[TypeId::of::<ImmutableString>(), TypeId::of::<INT>(), TypeId::of::<char>()], |_engine: &Engine, _: &Module, args: &mut [&mut Dynamic]| { let len = *args[1].read_lock::<INT>().unwrap(); #[cfg(not(feature = "unchecked"))] if _engine.limits.max_string_size > 0 && len > 0 && (len as usize) > _engine.limits.max_string_size { return EvalAltResult::ErrorDataTooLarge( "Length of string".to_string(), _engine.limits.max_string_size, len as usize, Position::none(), ).into(); } if len > 0 { let ch = mem::take(args[2]).cast::<char>(); let mut s = args[0].write_lock::<ImmutableString>().unwrap(); let orig_len = s.chars().count(); if len as usize > orig_len { let p = s.make_mut(); for _ in 0..(len as usize - orig_len) { p.push(ch); } #[cfg(not(feature = "unchecked"))] if _engine.limits.max_string_size > 0 && s.len() > _engine.limits.max_string_size { return EvalAltResult::ErrorDataTooLarge( "Length of string".to_string(), _engine.limits.max_string_size, s.len(), Position::none(), ).into(); } } } Ok(()) }, ); lib.set_iter( TypeId::of::<ImmutableString>(), |arr| Box::new( arr.cast::<ImmutableString>().chars().collect::<Vec<_>>().into_iter().map(Into::into) ) as Box<dyn Iterator<Item = Dynamic>>, ); }); gen_concat_functions!(basic => INT, bool, char, FnPtr); #[cfg(not(feature = "only_i32"))] #[cfg(not(feature = "only_i64"))] gen_concat_functions!(numbers => i8, u8, i16, u16, i32, i64, u32, u64); #[cfg(not(feature = "only_i32"))] #[cfg(not(feature = "only_i64"))] #[cfg(not(target_arch = "wasm32"))] gen_concat_functions!(num_128 => i128, u128); #[cfg(not(feature = "no_float"))] gen_concat_functions!(float => f32, f64); #[export_module] mod string_functions { #[rhai_fn(name = "+")] #[inline(always)] pub fn add_append_unit(s: ImmutableString, _x: ()) -> ImmutableString { s } #[rhai_fn(name = "+")] #[inline(always)] pub fn add_prepend_unit(_x: (), s: ImmutableString) -> ImmutableString { s } #[rhai_fn(name = "+=")] #[inline(always)] pub fn append_char(s: &mut ImmutableString, ch: char) { *s += ch; } #[rhai_fn(name = "+=")] #[inline(always)] pub fn append_string(s: &mut ImmutableString, add: ImmutableString) { *s += &add; } #[inline(always)] pub fn len(s: &mut ImmutableString) -> INT { s.chars().count() as INT } #[rhai_fn(get = "len")] #[inline(always)] pub fn len_prop(s: &mut ImmutableString) -> INT { len(s) } #[inline(always)] pub fn clear(s: &mut ImmutableString) { s.make_mut().clear(); } pub fn truncate(s: &mut ImmutableString, len: INT) { if len > 0 { let chars: StaticVec<_> = s.chars().collect(); let copy = s.make_mut(); copy.clear(); copy.extend(chars.into_iter().take(len as usize)); } else { s.make_mut().clear(); } } pub fn trim(s: &mut ImmutableString) { let trimmed = s.trim(); if trimmed.len() < s.len() { *s = trimmed.to_string().into(); } } #[rhai_fn(name = "contains")] #[inline(always)] pub fn contains_char(s: &mut ImmutableString, ch: char) -> bool { s.contains(ch) } #[rhai_fn(name = "contains")] #[inline(always)] pub fn contains_string(s: &mut ImmutableString, find: ImmutableString) -> bool { s.contains(find.as_str()) } #[rhai_fn(name = "index_of")] pub fn index_of_char_starting_from(s: &mut ImmutableString, ch: char, start: INT) -> INT { let start = if start < 0 { 0 } else if (start as usize) >= s.chars().count() { return -1 as INT; } else { s.chars().take(start as usize).collect::<String>().len() }; s[start..] .find(ch) .map(|index| s[0..start + index].chars().count() as INT) .unwrap_or(-1 as INT) } #[rhai_fn(name = "index_of")] pub fn index_of_char(s: &mut ImmutableString, ch: char) -> INT { s.find(ch) .map(|index| s[0..index].chars().count() as INT) .unwrap_or(-1 as INT) } #[rhai_fn(name = "index_of")]
#[rhai_fn(name = "index_of")] pub fn index_of_string(s: &mut ImmutableString, find: ImmutableString) -> INT { s.find(find.as_str()) .map(|index| s[0..index].chars().count() as INT) .unwrap_or(-1 as INT) } #[rhai_fn(name = "sub_string")] pub fn sub_string(s: ImmutableString, start: INT, len: INT) -> ImmutableString { let offset = if s.is_empty() || len <= 0 { return "".to_string().into(); } else if start < 0 { 0 } else if (start as usize) >= s.chars().count() { return "".to_string().into(); } else { start as usize }; let chars: StaticVec<_> = s.chars().collect(); let len = if offset + (len as usize) > chars.len() { chars.len() - offset } else { len as usize }; chars .iter() .skip(offset) .take(len) .cloned() .collect::<String>() .into() } #[rhai_fn(name = "sub_string")] #[inline(always)] pub fn sub_string_starting_from(s: ImmutableString, start: INT) -> ImmutableString { let len = s.len() as INT; sub_string(s, start, len) } #[rhai_fn(name = "crop")] pub fn crop_string(s: &mut ImmutableString, start: INT, len: INT) { let offset = if s.is_empty() || len <= 0 { s.make_mut().clear(); return; } else if start < 0 { 0 } else if (start as usize) >= s.chars().count() { s.make_mut().clear(); return; } else { start as usize }; let chars: StaticVec<_> = s.chars().collect(); let len = if offset + (len as usize) > chars.len() { chars.len() - offset } else { len as usize }; let copy = s.make_mut(); copy.clear(); copy.extend(chars.iter().skip(offset).take(len)); } #[rhai_fn(name = "crop")] #[inline(always)] pub fn crop_string_starting_from(s: &mut ImmutableString, start: INT) { crop_string(s, start, s.len() as INT); } #[rhai_fn(name = "replace")] #[inline(always)] pub fn replace_string(s: &mut ImmutableString, find: ImmutableString, sub: ImmutableString) { *s = s.replace(find.as_str(), sub.as_str()).into(); } #[rhai_fn(name = "replace")] #[inline(always)] pub fn replace_string_with_char(s: &mut ImmutableString, find: ImmutableString, sub: char) { *s = s.replace(find.as_str(), &sub.to_string()).into(); } #[rhai_fn(name = "replace")] #[inline(always)] pub fn replace_char_with_string(s: &mut ImmutableString, find: char, sub: ImmutableString) { *s = s.replace(&find.to_string(), sub.as_str()).into(); } #[rhai_fn(name = "replace")] #[inline(always)] pub fn replace_char(s: &mut ImmutableString, find: char, sub: char) { *s = s.replace(&find.to_string(), &sub.to_string()).into(); } #[cfg(not(feature = "no_index"))] pub mod arrays { use crate::engine::Array; #[rhai_fn(name = "+")] #[inline] pub fn append(x: &mut ImmutableString, y: Array) -> String { format!("{}{:?}", x, y) } #[rhai_fn(name = "+")] #[inline] pub fn prepend(x: &mut Array, y: ImmutableString) -> String { format!("{:?}{}", x, y) } } }
pub fn index_of_string_starting_from( s: &mut ImmutableString, find: ImmutableString, start: INT, ) -> INT { let start = if start < 0 { 0 } else if (start as usize) >= s.chars().count() { return -1 as INT; } else { s.chars().take(start as usize).collect::<String>().len() }; s[start..] .find(find.as_str()) .map(|index| s[0..start + index].chars().count() as INT) .unwrap_or(-1 as INT) }
function_block-full_function
[ { "content": "#[export_fn]\n\npub fn test_fn(input: &mut Clonable) -> &mut bool {\n\n &mut input.d\n\n}\n\n\n", "file_path": "codegen/ui_tests/return_mut_ref.rs", "rank": 0, "score": 353556.9496434691 }, { "content": "#[export_fn]\n\npub fn add_together(x: INT, y: INT) -> INT {\n\n x +...
Rust
examples/linked_timer_rtic.rs
akashihi/stm32l0xx-hal
d53ec21dc02348ecc8351f0578ec4eee08a447cf
#![no_main] #![no_std] extern crate panic_halt; use core::fmt::Write; use rtic::app; use stm32l0xx_hal::prelude::*; use stm32l0xx_hal::{ pac, rcc::Config, serial::{self, Serial}, time, timer::{LinkedTimer, LinkedTimerPair, Timer}, }; const LOGGER_FREQUENCY: u32 = 2; #[app(device = stm32l0xx_hal::pac, peripherals = true)] const APP: () = { struct Resources { serial: Serial<pac::USART1>, timer: Timer<pac::TIM6>, linked_tim2_tim3: LinkedTimerPair<pac::TIM2, pac::TIM3>, linked_tim21_tim22: LinkedTimerPair<pac::TIM21, pac::TIM22>, } #[init] fn init(ctx: init::Context) -> init::LateResources { let cp: cortex_m::Peripherals = ctx.core; let dp: pac::Peripherals = ctx.device; let mut rcc = dp.RCC.freeze(Config::hsi16()); let mut delay = cp.SYST.delay(rcc.clocks); let gpiob = dp.GPIOB.split(&mut rcc); let mut serial = Serial::usart1( dp.USART1, gpiob.pb6.into_floating_input(), gpiob.pb7.into_floating_input(), serial::Config::default(), &mut rcc, ) .unwrap(); writeln!(serial, "Starting example").ok(); writeln!(serial, "Init TIM2/TIM3...").ok(); let linked_tim2_tim3 = LinkedTimerPair::tim2_tim3(dp.TIM2, dp.TIM3, &mut rcc); delay.delay_ms(1000u16); writeln!(serial, "Init TIM21/TIM22...").ok(); let linked_tim21_tim22 = LinkedTimerPair::tim21_tim22(dp.TIM21, dp.TIM22, &mut rcc); let mut timer = dp.TIM6.timer(LOGGER_FREQUENCY.hz(), &mut rcc); timer.listen(); init::LateResources { serial, timer, linked_tim2_tim3, linked_tim21_tim22, } } #[task(binds = TIM6, resources = [serial, timer, linked_tim2_tim3, linked_tim21_tim22])] fn logger(ctx: logger::Context) { static mut PREV_TIM2_TIM3: u32 = 0; static mut PREV_TIM21_TIM22: u32 = 0; static mut TIMES_UNTIL_RESET: u32 = 3 * LOGGER_FREQUENCY; ctx.resources.timer.clear_irq(); if *TIMES_UNTIL_RESET > 1 { *TIMES_UNTIL_RESET -= 1; } else if *TIMES_UNTIL_RESET == 1 { writeln!(ctx.resources.serial, "Reset",).ok(); ctx.resources.linked_tim2_tim3.reset(); ctx.resources.linked_tim21_tim22.reset(); *TIMES_UNTIL_RESET -= 1; } print_timer( "TIM2/TIM3 ", ctx.resources.linked_tim2_tim3, ctx.resources.serial, PREV_TIM2_TIM3, ); print_timer( "TIM21/TIM22 ", ctx.resources.linked_tim21_tim22, ctx.resources.serial, PREV_TIM21_TIM22, ); } }; fn print_timer( name: &'static str, timer: &impl LinkedTimer, serial: &mut Serial<pac::USART1>, previous: &mut u32, ) { let cnt = timer.get_counter(); let delta = cnt - *previous; let freq = delta * LOGGER_FREQUENCY / 1000; writeln!( serial, "{} count {:>10} (msb={} lsb={} Δ{} {} kHz)", name, cnt, (cnt & 0xffff0000) >> 16, cnt & 0xffff, delta, freq, ) .ok(); *previous = cnt; }
#![no_main] #![no_std] extern crate panic_halt; use core::fmt::Write; use rtic::app; use stm32l0xx_hal::prelude::*; use stm32l0xx_hal::{ pac, rcc::Config, serial::{self, Serial}, time, timer::{LinkedTimer, LinkedTimerPair, Timer}, }; const LOGGER_FREQUENCY: u32 = 2; #[app(device = stm32l0xx_hal::pac, peripherals = true)] const APP: () = { struct Resources { serial: Serial<pac::USART1>, timer: Timer<pac::TIM6>, linked_tim2_tim3: LinkedTimerPair<pac::TIM2, pac::TIM3>, linked_tim21_tim22: LinkedTimerPair<pac::TIM21, pac::TIM22>, } #[init] fn init(ctx: init::Context) -> init::LateResources { let cp: cortex_m::Peripherals = ctx.core; let dp: pac::Peripherals = ctx.device; let mut rcc = dp.RCC.freeze(Config::hsi16()); let mut delay = cp.SYST.delay(rcc.clocks); let gpiob = dp.GPIOB.split(&mut rcc); let mut serial = Serial::usart1( dp.USART1, gpiob.pb6.into_floating_input(), gpiob.pb7.into_floating_input(), serial::Config::default(), &mut rcc, ) .unwrap(); writeln!(serial, "Starting example").ok(); writeln!(serial, "Init TIM2/TIM3...").ok(); let linked_tim2_tim3 = LinkedTimerPair::tim2_tim3(dp.T
#[task(binds = TIM6, resources = [serial, timer, linked_tim2_tim3, linked_tim21_tim22])] fn logger(ctx: logger::Context) { static mut PREV_TIM2_TIM3: u32 = 0; static mut PREV_TIM21_TIM22: u32 = 0; static mut TIMES_UNTIL_RESET: u32 = 3 * LOGGER_FREQUENCY; ctx.resources.timer.clear_irq(); if *TIMES_UNTIL_RESET > 1 { *TIMES_UNTIL_RESET -= 1; } else if *TIMES_UNTIL_RESET == 1 { writeln!(ctx.resources.serial, "Reset",).ok(); ctx.resources.linked_tim2_tim3.reset(); ctx.resources.linked_tim21_tim22.reset(); *TIMES_UNTIL_RESET -= 1; } print_timer( "TIM2/TIM3 ", ctx.resources.linked_tim2_tim3, ctx.resources.serial, PREV_TIM2_TIM3, ); print_timer( "TIM21/TIM22 ", ctx.resources.linked_tim21_tim22, ctx.resources.serial, PREV_TIM21_TIM22, ); } }; fn print_timer( name: &'static str, timer: &impl LinkedTimer, serial: &mut Serial<pac::USART1>, previous: &mut u32, ) { let cnt = timer.get_counter(); let delta = cnt - *previous; let freq = delta * LOGGER_FREQUENCY / 1000; writeln!( serial, "{} count {:>10} (msb={} lsb={} Δ{} {} kHz)", name, cnt, (cnt & 0xffff0000) >> 16, cnt & 0xffff, delta, freq, ) .ok(); *previous = cnt; }
IM2, dp.TIM3, &mut rcc); delay.delay_ms(1000u16); writeln!(serial, "Init TIM21/TIM22...").ok(); let linked_tim21_tim22 = LinkedTimerPair::tim21_tim22(dp.TIM21, dp.TIM22, &mut rcc); let mut timer = dp.TIM6.timer(LOGGER_FREQUENCY.hz(), &mut rcc); timer.listen(); init::LateResources { serial, timer, linked_tim2_tim3, linked_tim21_tim22, } }
function_block-function_prefixed
[ { "content": "fn delay() {\n\n // We can't use `Delay`, as that requires a frequency of at least one MHz.\n\n // Given our clock selection, the following loop should give us a nice delay\n\n // when compiled in release mode.\n\n for _ in 0..1_000 {\n\n asm::nop()\n\n }\n\n}\n", "file_p...
Rust
src/lib.rs
msakuta/rotate-enum
4a4b64a3b28bd30f961688f1462f9db57dbd6502
use core::panic; use proc_macro::TokenStream; use quote::quote; use syn::{parse_macro_input, Data, DeriveInput}; #[proc_macro_derive(RotateEnum)] pub fn rotate_enum(input: TokenStream) -> TokenStream { let input = parse_macro_input!(input as DeriveInput); let name = input.ident; let variants = if let Data::Enum(data) = &input.data { data.variants.iter().collect::<Vec<_>>() } else { panic!("derive(RotateEnum) must be applied to an enum"); }; let nexts = variants .iter() .skip(1) .chain(variants.get(0)) .map(|v| (&v.ident)) .collect::<Vec<_>>(); let tokens = quote! { impl #name{ pub fn next(self) -> Self { match self { #(Self::#variants => Self::#nexts, )* } } pub fn prev(self) -> Self { match self { #(Self::#nexts => Self::#variants, )* } } } }; tokens.into() } #[proc_macro_derive(ShiftEnum)] pub fn shift_enum(input: TokenStream) -> TokenStream { let input = parse_macro_input!(input as DeriveInput); let name = input.ident; let variants = if let Data::Enum(data) = &input.data { data.variants.iter().collect::<Vec<_>>() } else { panic!("derive(RotateEnum) must be applied to an enum"); }; let nexts = variants .iter() .skip(1) .map(|v| quote! { Some(Self::#v) }) .chain(Some(quote! { None })) .collect::<Vec<_>>(); let none_quote = Some(quote! { None }); let prevs = variants .iter() .take(variants.len() - 1) .map(|v| quote! { Some(Self::#v) }) .collect::<Vec<_>>(); let prevs = none_quote.iter().chain(&prevs).collect::<Vec<_>>(); let tokens = quote! { impl #name{ pub fn next(self) -> Option<Self> { match self { #(Self::#variants => #nexts, )* } } pub fn prev(self) -> Option<Self> { match self { #(Self::#variants => #prevs, )* } } } }; tokens.into() } #[proc_macro_derive(IterEnum)] pub fn iter_enum(input: TokenStream) -> TokenStream { let input = parse_macro_input!(input as DeriveInput); let name = input.ident; let variants = if let Data::Enum(data) = &input.data { data.variants.iter().collect::<Vec<_>>() } else { panic!("derive(RotateEnum) must be applied to an enum"); }; let first_variant = variants .first() .expect("derive(IterEnum) expects at least one variant in enum"); let nexts = variants .iter() .skip(1) .map(|v| quote! { Some(#name::#v) }) .chain(Some(quote! { None })) .collect::<Vec<_>>(); let iterator_name = syn::Ident::new(&(name.to_string() + "Iterator"), name.span()); let tokens = quote! { struct #iterator_name(Option<#name>); impl #iterator_name { fn new() -> Self { Self(Some(#name::#first_variant)) } } impl Iterator for #iterator_name { type Item = #name; fn next(&mut self) -> Option<Self::Item> { let ret = self.0.clone(); self.0 = match self.0 { #(Some(#name::#variants) => #nexts, )* None => None, }; ret } } impl #name { fn iter(&self) -> #iterator_name { #iterator_name(Some(self.clone())) } } }; tokens.into() }
use core::panic; use proc_macro::TokenStream; use quote::quote; use syn::{parse_macro_input, Data, DeriveInput}; #[proc_macro_derive(RotateEnum)] pub fn rotate_enum(input: TokenStream) -> TokenStream { let input = parse_macro_input!(input as DeriveInput); let name = input.ident;
let nexts = variants .iter() .skip(1) .chain(variants.get(0)) .map(|v| (&v.ident)) .collect::<Vec<_>>(); let tokens = quote! { impl #name{ pub fn next(self) -> Self { match self { #(Self::#variants => Self::#nexts, )* } } pub fn prev(self) -> Self { match self { #(Self::#nexts => Self::#variants, )* } } } }; tokens.into() } #[proc_macro_derive(ShiftEnum)] pub fn shift_enum(input: TokenStream) -> TokenStream { let input = parse_macro_input!(input as DeriveInput); let name = input.ident; let variants = if let Data::Enum(data) = &input.data { data.variants.iter().collect::<Vec<_>>() } else { panic!("derive(RotateEnum) must be applied to an enum"); }; let nexts = variants .iter() .skip(1) .map(|v| quote! { Some(Self::#v) }) .chain(Some(quote! { None })) .collect::<Vec<_>>(); let none_quote = Some(quote! { None }); let prevs = variants .iter() .take(variants.len() - 1) .map(|v| quote! { Some(Self::#v) }) .collect::<Vec<_>>(); let prevs = none_quote.iter().chain(&prevs).collect::<Vec<_>>(); let tokens = quote! { impl #name{ pub fn next(self) -> Option<Self> { match self { #(Self::#variants => #nexts, )* } } pub fn prev(self) -> Option<Self> { match self { #(Self::#variants => #prevs, )* } } } }; tokens.into() } #[proc_macro_derive(IterEnum)] pub fn iter_enum(input: TokenStream) -> TokenStream { let input = parse_macro_input!(input as DeriveInput); let name = input.ident; let variants = if let Data::Enum(data) = &input.data { data.variants.iter().collect::<Vec<_>>() } else { panic!("derive(RotateEnum) must be applied to an enum"); }; let first_variant = variants .first() .expect("derive(IterEnum) expects at least one variant in enum"); let nexts = variants .iter() .skip(1) .map(|v| quote! { Some(#name::#v) }) .chain(Some(quote! { None })) .collect::<Vec<_>>(); let iterator_name = syn::Ident::new(&(name.to_string() + "Iterator"), name.span()); let tokens = quote! { struct #iterator_name(Option<#name>); impl #iterator_name { fn new() -> Self { Self(Some(#name::#first_variant)) } } impl Iterator for #iterator_name { type Item = #name; fn next(&mut self) -> Option<Self::Item> { let ret = self.0.clone(); self.0 = match self.0 { #(Some(#name::#variants) => #nexts, )* None => None, }; ret } } impl #name { fn iter(&self) -> #iterator_name { #iterator_name(Some(self.clone())) } } }; tokens.into() }
let variants = if let Data::Enum(data) = &input.data { data.variants.iter().collect::<Vec<_>>() } else { panic!("derive(RotateEnum) must be applied to an enum"); };
assignment_statement
[ { "content": "#[test]\n\nfn test_shift() {\n\n let up = Direction::Up;\n\n let left = Direction::Left;\n\n let down = Direction::Down;\n\n let right = Direction::Right;\n\n\n\n let mut iter = up.iter();\n\n assert!(iter.next() == Some(up));\n\n assert!(iter.next() == Some(left));\n\n ass...
Rust
src/ping.rs
FrozenDroid/esp-idf-svc
d394bc67d288b5a3b8dcdeb896adcdf1ba7f1533
use core::{mem, ptr, time::Duration}; use ::log::*; #[cfg(feature = "std")] use std::sync::*; use embedded_svc::ipv4; use embedded_svc::mutex::Mutex; use embedded_svc::ping::*; use esp_idf_sys::*; use crate::private::common::*; #[derive(Debug)] pub struct EspPing(u32); unsafe impl Send for EspPing {} unsafe impl Sync for EspPing {} impl Default for EspPing { fn default() -> Self { Self(0) } } impl EspPing { pub fn new(interface_index: u32) -> Self { Self(interface_index) } fn run_ping<F: Fn(&Summary, &Reply)>( &self, ip: ipv4::Ipv4Addr, conf: &Configuration, tracker: &mut Tracker<F>, ) -> Result<(), EspError> { #[allow(clippy::needless_update)] let config = esp_ping_config_t { count: conf.count, interval_ms: conf.interval.as_millis() as u32, timeout_ms: conf.timeout.as_millis() as u32, data_size: conf.data_size, tos: conf.tos, target_addr: ip_addr_t { u_addr: ip_addr__bindgen_ty_1 { ip4: Newtype::<ip4_addr_t>::from(ip).0, }, type_: 0, }, task_stack_size: 4096, task_prio: 2, interface: self.0, ..Default::default() }; let callbacks = esp_ping_callbacks_t { on_ping_success: Some(EspPing::on_ping_success::<F>), on_ping_timeout: Some(EspPing::on_ping_timeout::<F>), on_ping_end: Some(EspPing::on_ping_end::<F>), cb_args: tracker as *mut Tracker<F> as *mut c_types::c_void, }; let mut handle: esp_ping_handle_t = ptr::null_mut(); let handle_ref = &mut handle; esp!(unsafe { esp_ping_new_session(&config, &callbacks, handle_ref as *mut *mut c_types::c_void) })?; if handle.is_null() { return Err(EspError::from(ESP_ERR_INVALID_ARG as _).unwrap()); } info!("Ping session established, got handle {:?}", handle); #[allow(clippy::mutex_atomic)] tracker.running.with_lock(|running| *running = true); esp!(unsafe { esp_ping_start(handle) })?; info!("Ping session started"); info!("Waiting for the ping session to complete"); #[cfg(feature = "std")] { #[allow(clippy::mutex_atomic)] let _running = tracker .cvar .wait_while(tracker.running.lock().unwrap(), |running| *running) .unwrap(); } #[cfg(not(feature = "std"))] { while tracker.running.with_lock(|running| *running) { unsafe { vTaskDelay(500) }; } } esp!(unsafe { esp_ping_stop(handle) })?; info!("Ping session stopped"); esp!(unsafe { esp_ping_delete_session(handle) })?; info!("Ping session {:?} removed", &handle); Ok(()) } unsafe extern "C" fn on_ping_success<F: Fn(&Summary, &Reply)>( handle: esp_ping_handle_t, args: *mut c_types::c_void, ) { info!("Ping success callback invoked"); let tracker_ptr: *mut Tracker<F> = args as _; let tracker = tracker_ptr.as_mut().unwrap(); let mut seqno: c_types::c_ushort = 0; esp_ping_get_profile( handle, esp_ping_profile_t_ESP_PING_PROF_SEQNO, &mut seqno as *mut c_types::c_ushort as *mut c_types::c_void, mem::size_of_val(&seqno) as u32, ); let mut ttl: c_types::c_uchar = 0; esp_ping_get_profile( handle, esp_ping_profile_t_ESP_PING_PROF_TTL, &mut ttl as *mut c_types::c_uchar as *mut c_types::c_void, mem::size_of_val(&ttl) as u32, ); let mut target_addr_raw = [0_u8; mem::size_of::<ip_addr_t>()]; let target_addr: &mut ip_addr_t = mem::transmute(&mut target_addr_raw); esp_ping_get_profile( handle, esp_ping_profile_t_ESP_PING_PROF_IPADDR, target_addr as *mut ip_addr_t as *mut c_types::c_void, mem::size_of::<ip_addr_t>() as _, ); let mut elapsed_time: c_types::c_uint = 0; esp_ping_get_profile( handle, esp_ping_profile_t_ESP_PING_PROF_TIMEGAP, &mut elapsed_time as *mut c_types::c_uint as *mut c_types::c_void, mem::size_of_val(&elapsed_time) as u32, ); let mut recv_len: c_types::c_uint = 0; esp_ping_get_profile( handle, esp_ping_profile_t_ESP_PING_PROF_SIZE, &mut recv_len as *mut c_types::c_uint as *mut c_types::c_void, mem::size_of_val(&recv_len) as u32, ); let addr = ipv4::Ipv4Addr::from(Newtype(target_addr.u_addr.ip4)); info!( "From {} icmp_seq={} ttl={} time={}ms bytes={}", addr, seqno, ttl, elapsed_time, recv_len ); if let Some(reply_callback) = tracker.reply_callback { Self::update_summary(handle, &mut tracker.summary); reply_callback( &tracker.summary, &Reply::Success(Info { addr, seqno: seqno as u32, ttl: ttl as u8, recv_len: recv_len as u32, elapsed_time: Duration::from_millis(elapsed_time as u64), }), ); } } unsafe extern "C" fn on_ping_timeout<F: Fn(&Summary, &Reply)>( handle: esp_ping_handle_t, args: *mut c_types::c_void, ) { info!("Ping timeout callback invoked"); let tracker_ptr: *mut Tracker<F> = args as _; let tracker = tracker_ptr.as_mut().unwrap(); let mut seqno: c_types::c_ushort = 0; esp_ping_get_profile( handle, esp_ping_profile_t_ESP_PING_PROF_SEQNO, &mut seqno as *mut c_types::c_ushort as *mut c_types::c_void, mem::size_of_val(&seqno) as u32, ); let mut target_addr_raw = [0_u8; mem::size_of::<ip_addr_t>()]; let target_addr: &mut ip_addr_t = mem::transmute(&mut target_addr_raw); esp_ping_get_profile( handle, esp_ping_profile_t_ESP_PING_PROF_IPADDR, target_addr as *mut ip_addr_t as *mut c_types::c_void, mem::size_of::<ip_addr_t>() as _, ); info!("From {} icmp_seq={} timeout", "???", seqno); if let Some(reply_callback) = tracker.reply_callback { Self::update_summary(handle, &mut tracker.summary); reply_callback(&tracker.summary, &Reply::Timeout); } } #[allow(clippy::mutex_atomic)] unsafe extern "C" fn on_ping_end<F: Fn(&Summary, &Reply)>( handle: esp_ping_handle_t, args: *mut c_types::c_void, ) { info!("Ping end callback invoked"); let tracker_ptr: *mut Tracker<F> = args as _; let tracker = tracker_ptr.as_mut().unwrap(); Self::update_summary(handle, &mut tracker.summary); info!( "{} packets transmitted, {} received, time {}ms", tracker.summary.transmitted, tracker.summary.received, tracker.summary.time.as_millis() ); #[cfg(feature = "std")] { *tracker.running.lock().unwrap() = false; tracker.cvar.notify_one(); } #[cfg(not(feature = "std"))] tracker.running.with_lock(|running| *running = false); } unsafe fn update_summary(handle: esp_ping_handle_t, summary: &mut Summary) { let mut transmitted: c_types::c_uint = 0; esp_ping_get_profile( handle, esp_ping_profile_t_ESP_PING_PROF_REQUEST, &mut transmitted as *mut c_types::c_uint as *mut c_types::c_void, mem::size_of_val(&transmitted) as u32, ); let mut received: c_types::c_uint = 0; esp_ping_get_profile( handle, esp_ping_profile_t_ESP_PING_PROF_REPLY, &mut received as *mut c_types::c_uint as *mut c_types::c_void, mem::size_of_val(&received) as u32, ); let mut total_time: c_types::c_uint = 0; esp_ping_get_profile( handle, esp_ping_profile_t_ESP_PING_PROF_DURATION, &mut total_time as *mut c_types::c_uint as *mut c_types::c_void, mem::size_of_val(&total_time) as u32, ); summary.transmitted = transmitted; summary.received = received; summary.time = Duration::from_millis(total_time as u64); } } impl Ping for EspPing { type Error = EspError; fn ping(&mut self, ip: ipv4::Ipv4Addr, conf: &Configuration) -> Result<Summary, Self::Error> { info!( "About to run a summary ping {} with configuration {:?}", ip, conf ); let mut tracker = Tracker::new(Some(&nop_callback)); self.run_ping(ip, conf, &mut tracker)?; Ok(tracker.summary) } fn ping_details<F: Fn(&Summary, &Reply)>( &mut self, ip: ipv4::Ipv4Addr, conf: &Configuration, reply_callback: &F, ) -> Result<Summary, Self::Error> { info!( "About to run a detailed ping {} with configuration {:?}", ip, conf ); let mut tracker = Tracker::new(Some(reply_callback)); self.run_ping(ip, conf, &mut tracker)?; Ok(tracker.summary) } } struct Tracker<'a, F: Fn(&Summary, &Reply)> { summary: Summary, #[cfg(feature = "std")] cvar: Condvar, #[cfg(feature = "std")] running: std::sync::Mutex<bool>, #[cfg(not(feature = "std"))] running: EspMutex<bool>, reply_callback: Option<&'a F>, } impl<'a, F: Fn(&Summary, &Reply)> Tracker<'a, F> { #[allow(clippy::mutex_atomic)] pub fn new(reply_callback: Option<&'a F>) -> Self { Self { summary: Default::default(), #[cfg(feature = "std")] cvar: Condvar::new(), #[cfg(feature = "std")] running: std::sync::Mutex::new(false), #[cfg(not(feature = "std"))] running: EspMutex::new(false), reply_callback, } } } fn nop_callback(_summary: &Summary, _reply: &Reply) {}
use core::{mem, ptr, time::Duration}; use ::log::*; #[cfg(feature = "std")] use std::sync::*; use embedded_svc::ipv4; use embedded_svc::mutex::Mutex; use embedded_svc::ping::*; use esp_idf_sys::*; use crate::private::common::*; #[derive(Debug)] pub struct EspPing(u32); unsafe impl Send for EspPing {} unsafe impl Sync for EspPing {} impl Default for EspPing { fn default() -> Self { Self(0) } } impl EspPing { pub fn new(interface_index: u32) -> Self { Self(interface_index) } fn run_ping<F: Fn(&Summary, &Reply)>( &self, ip: ipv4::Ipv4Addr, conf: &Configuration, tracker: &mut Tracker<F>, ) -> Result<(), EspError> { #[allow(clippy::needless_update)] let config = esp_ping_config_t { count: conf.count, interval_ms: conf.interval.as_millis() as u32, timeout_ms: conf.timeout.as_millis() as u32, data_size: conf.data_size, tos: conf.tos, target_addr: ip_addr_t { u_addr: ip_addr__bindgen_ty_1 { ip4: Newtype::<ip4_addr_t>::from(ip).0, }, type_: 0, }, task_stack_size: 4096, task_prio: 2, interface: self.0, ..Default::default() }; let callbacks = esp_ping_callbacks_t { on_ping_success: Some(EspPing::on_ping_success::<F>), on_ping_timeout: Some(EspPing::on_ping_timeout::<F>), on_ping_end: Some(EspPing::on_ping_end::<F>), cb_args: tracker as *mut Tracker<F> as *mut c_types::c_void, }; let mut handle: esp_ping_handle_t = ptr::null_mut(); let handle_ref = &mut handle; esp!(unsafe { esp_ping_new_session(&config, &callbacks, handle_ref as *mut *mut c_types::c_void) })?; if handle.is_null() { return Err(EspError::from(ESP_ERR_INVALID_ARG as _).unwrap()); } info!("Ping session established, got handle {:?}", handle); #[allow(clippy::mutex_atomic)] tracker.running.with_lock(|running| *running = true); esp!(unsafe { esp_ping_start(handle) })?; info!("Ping session started"); info!("Waiting for the ping session to complete"); #[cfg(feature = "std")] { #[allow(clippy::mutex_atomic)] let _running = tracker .cvar .wait_while(tracker.running.lock().unwrap(), |running| *running) .unwrap(); } #[cfg(not(feature = "std"))] { while tracker.running.with_lock(|running| *running) { unsafe { vTaskDelay(500) }; } } esp!(unsafe { esp_ping_stop(handle) })?; info!("Ping session stopped"); esp!(unsafe { esp_ping_delete_session(handle) })?; info!("Ping session {:?} removed", &handle); Ok(()) } unsafe extern "C" fn on_ping_success<F: Fn(&Summary, &Reply)>( handle: esp_ping_handle_t, args: *mut c_types::c_void, ) { info!("Ping success callback invoked"); let tracker_ptr: *mut Tracker<F> = args as _; let tracker = tracker_ptr.as_mut().unwrap(); let mut seqno: c_types::c_ushort = 0; esp_ping_get_profile( handle, esp_ping_profile_t_ESP_PING_PROF_SEQNO, &mut seqno as *mut c_types::c_ushort as *mut c_types::c_void, mem::size_of_val(&seqno) as u32, ); let mut ttl: c_types::c_uchar = 0; esp_ping_get_profile( handle, esp_ping_profile_t_ESP_PING_PROF_TTL, &mut ttl as *mut c_types::c_uchar as *mut c_types::c_void, mem::size_of_val(&ttl) as u32, ); let mut target_addr_raw = [0_u8; mem::size_of::<ip_addr_t>()]; let target_addr: &mut ip_addr_t = mem::transmute(&mut target_addr_raw); esp_ping_get_profile( handle, esp_ping_profile_t_ESP_PING_PROF_IPADDR, target_addr as *mut ip_addr_t as *mut c_types::c_void, mem::size_of::<ip_addr_t>() as _, ); let mut elapsed_time: c_types::c_uint = 0; esp_ping_get_profile( handle, esp_ping_profile_t_ESP_PING_PROF_TIMEGAP, &mut elapsed_time as *mut c_types::c_uint as *mut c_types::c_void, mem::size_of_val(&e
id, mem::size_of_val(&total_time) as u32, ); summary.transmitted = transmitted; summary.received = received; summary.time = Duration::from_millis(total_time as u64); } } impl Ping for EspPing { type Error = EspError; fn ping(&mut self, ip: ipv4::Ipv4Addr, conf: &Configuration) -> Result<Summary, Self::Error> { info!( "About to run a summary ping {} with configuration {:?}", ip, conf ); let mut tracker = Tracker::new(Some(&nop_callback)); self.run_ping(ip, conf, &mut tracker)?; Ok(tracker.summary) } fn ping_details<F: Fn(&Summary, &Reply)>( &mut self, ip: ipv4::Ipv4Addr, conf: &Configuration, reply_callback: &F, ) -> Result<Summary, Self::Error> { info!( "About to run a detailed ping {} with configuration {:?}", ip, conf ); let mut tracker = Tracker::new(Some(reply_callback)); self.run_ping(ip, conf, &mut tracker)?; Ok(tracker.summary) } } struct Tracker<'a, F: Fn(&Summary, &Reply)> { summary: Summary, #[cfg(feature = "std")] cvar: Condvar, #[cfg(feature = "std")] running: std::sync::Mutex<bool>, #[cfg(not(feature = "std"))] running: EspMutex<bool>, reply_callback: Option<&'a F>, } impl<'a, F: Fn(&Summary, &Reply)> Tracker<'a, F> { #[allow(clippy::mutex_atomic)] pub fn new(reply_callback: Option<&'a F>) -> Self { Self { summary: Default::default(), #[cfg(feature = "std")] cvar: Condvar::new(), #[cfg(feature = "std")] running: std::sync::Mutex::new(false), #[cfg(not(feature = "std"))] running: EspMutex::new(false), reply_callback, } } } fn nop_callback(_summary: &Summary, _reply: &Reply) {}
lapsed_time) as u32, ); let mut recv_len: c_types::c_uint = 0; esp_ping_get_profile( handle, esp_ping_profile_t_ESP_PING_PROF_SIZE, &mut recv_len as *mut c_types::c_uint as *mut c_types::c_void, mem::size_of_val(&recv_len) as u32, ); let addr = ipv4::Ipv4Addr::from(Newtype(target_addr.u_addr.ip4)); info!( "From {} icmp_seq={} ttl={} time={}ms bytes={}", addr, seqno, ttl, elapsed_time, recv_len ); if let Some(reply_callback) = tracker.reply_callback { Self::update_summary(handle, &mut tracker.summary); reply_callback( &tracker.summary, &Reply::Success(Info { addr, seqno: seqno as u32, ttl: ttl as u8, recv_len: recv_len as u32, elapsed_time: Duration::from_millis(elapsed_time as u64), }), ); } } unsafe extern "C" fn on_ping_timeout<F: Fn(&Summary, &Reply)>( handle: esp_ping_handle_t, args: *mut c_types::c_void, ) { info!("Ping timeout callback invoked"); let tracker_ptr: *mut Tracker<F> = args as _; let tracker = tracker_ptr.as_mut().unwrap(); let mut seqno: c_types::c_ushort = 0; esp_ping_get_profile( handle, esp_ping_profile_t_ESP_PING_PROF_SEQNO, &mut seqno as *mut c_types::c_ushort as *mut c_types::c_void, mem::size_of_val(&seqno) as u32, ); let mut target_addr_raw = [0_u8; mem::size_of::<ip_addr_t>()]; let target_addr: &mut ip_addr_t = mem::transmute(&mut target_addr_raw); esp_ping_get_profile( handle, esp_ping_profile_t_ESP_PING_PROF_IPADDR, target_addr as *mut ip_addr_t as *mut c_types::c_void, mem::size_of::<ip_addr_t>() as _, ); info!("From {} icmp_seq={} timeout", "???", seqno); if let Some(reply_callback) = tracker.reply_callback { Self::update_summary(handle, &mut tracker.summary); reply_callback(&tracker.summary, &Reply::Timeout); } } #[allow(clippy::mutex_atomic)] unsafe extern "C" fn on_ping_end<F: Fn(&Summary, &Reply)>( handle: esp_ping_handle_t, args: *mut c_types::c_void, ) { info!("Ping end callback invoked"); let tracker_ptr: *mut Tracker<F> = args as _; let tracker = tracker_ptr.as_mut().unwrap(); Self::update_summary(handle, &mut tracker.summary); info!( "{} packets transmitted, {} received, time {}ms", tracker.summary.transmitted, tracker.summary.received, tracker.summary.time.as_millis() ); #[cfg(feature = "std")] { *tracker.running.lock().unwrap() = false; tracker.cvar.notify_one(); } #[cfg(not(feature = "std"))] tracker.running.with_lock(|running| *running = false); } unsafe fn update_summary(handle: esp_ping_handle_t, summary: &mut Summary) { let mut transmitted: c_types::c_uint = 0; esp_ping_get_profile( handle, esp_ping_profile_t_ESP_PING_PROF_REQUEST, &mut transmitted as *mut c_types::c_uint as *mut c_types::c_void, mem::size_of_val(&transmitted) as u32, ); let mut received: c_types::c_uint = 0; esp_ping_get_profile( handle, esp_ping_profile_t_ESP_PING_PROF_REPLY, &mut received as *mut c_types::c_uint as *mut c_types::c_void, mem::size_of_val(&received) as u32, ); let mut total_time: c_types::c_uint = 0; esp_ping_get_profile( handle, esp_ping_profile_t_ESP_PING_PROF_DURATION, &mut total_time as *mut c_types::c_uint as *mut c_types::c_vo
random
[ { "content": "#[cfg(feature = \"alloc\")]\n\npub fn from_cstr_ptr(ptr: *const i8) -> alloc::string::String {\n\n unsafe { CStr::from_ptr(ptr) }.to_string_lossy().to_string()\n\n}\n\n\n", "file_path": "src/private/cstr.rs", "rank": 2, "score": 71492.7575833215 }, { "content": "#[cfg(featur...
Rust
src/model/wrapper.rs
MaxOhn/bathbot-cache
c9f5f406d32bc99d31d618d69ed0db84aba4833b
use serde::ser::{Serialize, SerializeStruct, Serializer}; use twilight_model::{ channel::{ thread::{PrivateThread, PublicThread}, GuildChannel, TextChannel, }, gateway::payload::incoming::MemberUpdate, guild::{Guild, Member, PartialGuild, PartialMember, Role}, id::{ChannelId, GuildId}, user::{CurrentUser, User}, }; pub struct GuildWrapper<'g>(pub &'g Guild); impl<'g> From<&'g Guild> for GuildWrapper<'g> { fn from(guild: &'g Guild) -> Self { Self(guild) } } impl<'g> Serialize for GuildWrapper<'g> { fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let len = 3 + self.0.icon.is_some() as usize; let mut guild = s.serialize_struct("CachedGuild", len)?; if let Some(ref icon) = self.0.icon { guild.serialize_field("a", icon)?; } guild.serialize_field("b", &self.0.id)?; guild.serialize_field("c", &self.0.name)?; guild.serialize_field("d", &self.0.owner_id)?; guild.end() } } pub struct PartialGuildWrapper<'g>(pub &'g PartialGuild); impl<'g> From<&'g PartialGuild> for PartialGuildWrapper<'g> { fn from(guild: &'g PartialGuild) -> Self { Self(guild) } } impl<'g> Serialize for PartialGuildWrapper<'g> { fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let len = 3 + self.0.icon.is_some() as usize; let mut guild = s.serialize_struct("CachedGuild", len)?; if let Some(ref icon) = self.0.icon { guild.serialize_field("a", icon)?; } guild.serialize_field("b", &self.0.id)?; guild.serialize_field("c", &self.0.name)?; guild.serialize_field("d", &self.0.owner_id)?; guild.end() } } pub struct CurrentUserWrapper<'u>(pub &'u CurrentUser); impl<'u> From<&'u CurrentUser> for CurrentUserWrapper<'u> { fn from(user: &'u CurrentUser) -> Self { Self(user) } } impl<'u> Serialize for CurrentUserWrapper<'u> { fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let len = 3 + self.0.avatar.is_some() as usize; let mut user = s.serialize_struct("CachedCurrentUser", len)?; if let Some(ref avatar) = self.0.avatar { user.serialize_field("a", avatar)?; } user.serialize_field("b", &self.0.discriminator)?; user.serialize_field("c", &self.0.id)?; user.serialize_field("d", &self.0.name)?; user.end() } } pub struct RoleWrapper<'r>(pub &'r Role); impl<'r> From<&'r Role> for RoleWrapper<'r> { fn from(role: &'r Role) -> Self { Self(role) } } impl<'r> Serialize for RoleWrapper<'r> { fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut role = s.serialize_struct("CachedRole", 4)?; role.serialize_field("a", &self.0.id)?; role.serialize_field("b", &self.0.name)?; role.serialize_field("c", &self.0.permissions)?; role.serialize_field("d", &self.0.position)?; role.end() } } pub struct MemberWrapper<'m>(pub &'m Member); impl<'m> From<&'m Member> for MemberWrapper<'m> { fn from(member: &'m Member) -> Self { Self(member) } } impl<'m> Serialize for MemberWrapper<'m> { fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let len = 2 + self.0.nick.is_some() as usize + !self.0.roles.is_empty() as usize; let mut member = s.serialize_struct("CachedMember", len)?; member.serialize_field("a", &self.0.guild_id)?; if let Some(ref nick) = self.0.nick { member.serialize_field("b", nick)?; } if !self.0.roles.is_empty() { member.serialize_field("c", &self.0.roles)?; } member.serialize_field("d", &self.0.user.id)?; member.end() } } pub struct PartialMemberWrapper<'m> { guild: GuildId, member: &'m PartialMember, user: &'m User, } impl<'m> From<(&'m PartialMember, GuildId, &'m User)> for PartialMemberWrapper<'m> { fn from((member, guild, user): (&'m PartialMember, GuildId, &'m User)) -> Self { Self { member, guild, user, } } } impl<'m> Serialize for PartialMemberWrapper<'m> { fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let len = 2 + self.member.nick.is_some() as usize + !self.member.roles.is_empty() as usize; let mut member = s.serialize_struct("CachedMember", len)?; member.serialize_field("a", &self.guild)?; if let Some(ref nick) = self.member.nick { member.serialize_field("b", nick)?; } if !self.member.roles.is_empty() { member.serialize_field("c", &self.member.roles)?; } member.serialize_field("d", &self.user.id)?; member.end() } } pub struct MemberUpdateWrapper<'m>(&'m MemberUpdate); impl<'m> From<&'m MemberUpdate> for MemberUpdateWrapper<'m> { fn from(member: &'m MemberUpdate) -> Self { Self(member) } } impl<'m> Serialize for MemberUpdateWrapper<'m> { fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let len = 2 + self.0.nick.is_some() as usize + !self.0.roles.is_empty() as usize; let mut member = s.serialize_struct("CachedMember", len)?; member.serialize_field("a", &self.0.guild_id)?; if let Some(ref nick) = self.0.nick { member.serialize_field("b", nick)?; } if !self.0.roles.is_empty() { member.serialize_field("c", &self.0.roles)?; } member.serialize_field("d", &self.0.user.id)?; member.end() } } pub struct TextChannelWrapper<'c>(pub &'c TextChannel); impl<'c> From<&'c TextChannel> for TextChannelWrapper<'c> { fn from(channel: &'c TextChannel) -> Self { Self(channel) } } impl<'c> Serialize for TextChannelWrapper<'c> { fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let len = 2 + self.0.guild_id.is_some() as usize + !self.0.permission_overwrites.is_empty() as usize; let mut channel = s.serialize_struct("CachedTextChannel", len)?; if let Some(ref guild) = self.0.guild_id { channel.serialize_field("a", guild)?; } channel.serialize_field("b", &self.0.id)?; channel.serialize_field("c", &self.0.name)?; if !self.0.permission_overwrites.is_empty() { channel.serialize_field("d", &self.0.permission_overwrites)?; } channel.end() } } pub struct PublicThreadWrapper<'c>(pub &'c PublicThread); impl<'c> From<&'c PublicThread> for PublicThreadWrapper<'c> { fn from(channel: &'c PublicThread) -> Self { Self(channel) } } impl<'c> Serialize for PublicThreadWrapper<'c> { fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let len = 2 + self.0.guild_id.is_some() as usize + self.0.parent_id.is_some() as usize; let mut channel = s.serialize_struct("CachedThread", len)?; if let Some(ref guild) = self.0.guild_id { channel.serialize_field("a", guild)?; } channel.serialize_field("b", &self.0.id)?; channel.serialize_field("c", &self.0.name)?; if let Some(ref parent_id) = self.0.parent_id { channel.serialize_field("d", parent_id)?; } channel.end() } } pub struct PrivateThreadWrapper<'c>(pub &'c PrivateThread); impl<'c> From<&'c PrivateThread> for PrivateThreadWrapper<'c> { fn from(channel: &'c PrivateThread) -> Self { Self(channel) } } impl<'c> Serialize for PrivateThreadWrapper<'c> { fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let len = 2 + self.0.guild_id.is_some() as usize + self.0.parent_id.is_some() as usize; let mut channel = s.serialize_struct("CachedThread", len)?; if let Some(ref guild) = self.0.guild_id { channel.serialize_field("a", guild)?; } channel.serialize_field("b", &self.0.id)?; channel.serialize_field("c", &self.0.name)?; if let Some(ref parent_id) = self.0.parent_id { channel.serialize_field("d", parent_id)?; } channel.end() } } pub enum BasicGuildChannel<'c> { PrivateThread(&'c PrivateThread), PublicThread(&'c PublicThread), Text(&'c TextChannel), } impl<'c> BasicGuildChannel<'c> { pub const fn guild_id(&self) -> Option<GuildId> { match self { Self::PrivateThread(c) => c.guild_id, Self::PublicThread(c) => c.guild_id, Self::Text(c) => c.guild_id, } } pub const fn id(&self) -> ChannelId { match self { Self::PrivateThread(c) => c.id, Self::PublicThread(c) => c.id, Self::Text(c) => c.id, } } pub fn from(channel: &'c GuildChannel) -> Option<Self> { match channel { GuildChannel::PrivateThread(c) => Some(Self::PrivateThread(c)), GuildChannel::PublicThread(c) => Some(Self::PublicThread(c)), GuildChannel::Text(c) => Some(Self::Text(c)), _ => None, } } } impl<'c> Serialize for BasicGuildChannel<'c> { fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { match self { BasicGuildChannel::PrivateThread(c) => { s.serialize_newtype_variant("CachedGuildChannel", 0, "a", &PrivateThreadWrapper(c)) } BasicGuildChannel::PublicThread(c) => { s.serialize_newtype_variant("CachedGuildChannel", 1, "b", &PublicThreadWrapper(c)) } BasicGuildChannel::Text(c) => { s.serialize_newtype_variant("CachedGuildChannel", 2, "c", &TextChannelWrapper(c)) } } } }
use serde::ser::{Serialize, SerializeStruct, Serializer}; use twilight_model::{ channel::{ thread::{PrivateThread, PublicThread}, GuildChannel, TextChannel, }, gateway::payload::incoming::MemberUpdate, guild::{Guild, Member, PartialGuild, PartialMember, Role}, id::{ChannelId, GuildId}, user::{CurrentUser, User}, }; pub struct GuildWrapper<'g>(pub &'g Guild); impl<'g> From<&'g Guild> for GuildWrapper<'g> { fn from(guild: &'g Guild) -> Self { Self(guild) } } impl<'g> Serialize for GuildWrapper<'g> { fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let len = 3 + self.0.icon.is_some() as usize; let mut guild = s.serialize_struct("CachedGuild", len)?; if let Some(ref icon) = self.0.icon { guild.serialize_field("a", icon)?; } guild.serialize_field("b", &self.0.id)?; guild.serialize_field("c", &self.0.name)?; guild.serialize_field("d", &self.0.owner_id)?; guild.end() } } pub struct PartialGuildWrapper<'g>(pub &'g PartialGuild); impl<'g> From<&'g PartialGuild> for PartialGuildWrapper<'g> { fn from(guild: &'g PartialGuild) -> Self { Self(guild) } } impl<'g> Serialize for PartialGuildWrapper<'g> { fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let len = 3 + self.0.icon.is_some() as usize; let mut guild = s.serialize_struct("CachedGuild", len)?; if let Some(ref icon) = self.0.icon { guild.serialize_field("a", icon)?; } guild.serialize_field("b", &self.0.id)?; guild.serialize_field("c", &self.0.name)?; guild.serialize_field("d", &self.0.owner_id)?; guild.end() } } pub struct CurrentUserWrapper<'u>(pub &'u CurrentUser); impl<'u> From<&'u CurrentUser> for CurrentUserWrapper<'u> { fn from(user: &'u CurrentUser) -> Self { Self(user) } } impl<'u> Serialize for CurrentUserWrapper<'u> { fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let len = 3 + self.0.avatar.is_some() as usize; let mut user = s.serialize_struct("CachedCurrentUser", len)?; if let Some(ref avatar) = self.0.avatar { user.serialize_field("a", avatar)?; } user.serialize_field("b", &self.0.discriminator)?; user.serialize_field("c", &self.0.id)?; user.serialize_field("d", &self.0.name)?; user.end() } } pub struct RoleWrapper<'r>(pub &'r Role); impl<'r> From<&'r Role> for RoleWrapper<'r> { fn from(role: &'r Role) -> Self { Self(role) } } impl<'r> Serialize for RoleWrapper<'r> { fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut role = s.serialize_struct("CachedRole", 4)?; role.serialize_field("a", &self.0.id)?; role.serialize_field("b", &self.0.name)?; role.serialize_field("c", &self.0.permissions)?; role.serialize_field("d", &self.0.position)?; role.end() } } pub struct MemberWrapper<'m>(pub &'m Member); impl<'m> From<&'m Member> for MemberWrapper<'m> { fn from(member: &'m Member) -> Self { Self(member) } } impl<'m> Serialize for MemberWrapper<'m> { fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let len = 2 + self.0.nick.is_some() as usize + !self.0.roles.is_empty() as usize; let mut member = s.serialize_struct("CachedMember", len)?; member.serialize_field("a", &self.0.guild_id)?; if let Some(ref nick) = self.0.nick { member.serialize_field("b", nick)?; } if !self.0.
member.end() } } pub struct PartialMemberWrapper<'m> { guild: GuildId, member: &'m PartialMember, user: &'m User, } impl<'m> From<(&'m PartialMember, GuildId, &'m User)> for PartialMemberWrapper<'m> { fn from((member, guild, user): (&'m PartialMember, GuildId, &'m User)) -> Self { Self { member, guild, user, } } } impl<'m> Serialize for PartialMemberWrapper<'m> { fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let len = 2 + self.member.nick.is_some() as usize + !self.member.roles.is_empty() as usize; let mut member = s.serialize_struct("CachedMember", len)?; member.serialize_field("a", &self.guild)?; if let Some(ref nick) = self.member.nick { member.serialize_field("b", nick)?; } if !self.member.roles.is_empty() { member.serialize_field("c", &self.member.roles)?; } member.serialize_field("d", &self.user.id)?; member.end() } } pub struct MemberUpdateWrapper<'m>(&'m MemberUpdate); impl<'m> From<&'m MemberUpdate> for MemberUpdateWrapper<'m> { fn from(member: &'m MemberUpdate) -> Self { Self(member) } } impl<'m> Serialize for MemberUpdateWrapper<'m> { fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let len = 2 + self.0.nick.is_some() as usize + !self.0.roles.is_empty() as usize; let mut member = s.serialize_struct("CachedMember", len)?; member.serialize_field("a", &self.0.guild_id)?; if let Some(ref nick) = self.0.nick { member.serialize_field("b", nick)?; } if !self.0.roles.is_empty() { member.serialize_field("c", &self.0.roles)?; } member.serialize_field("d", &self.0.user.id)?; member.end() } } pub struct TextChannelWrapper<'c>(pub &'c TextChannel); impl<'c> From<&'c TextChannel> for TextChannelWrapper<'c> { fn from(channel: &'c TextChannel) -> Self { Self(channel) } } impl<'c> Serialize for TextChannelWrapper<'c> { fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let len = 2 + self.0.guild_id.is_some() as usize + !self.0.permission_overwrites.is_empty() as usize; let mut channel = s.serialize_struct("CachedTextChannel", len)?; if let Some(ref guild) = self.0.guild_id { channel.serialize_field("a", guild)?; } channel.serialize_field("b", &self.0.id)?; channel.serialize_field("c", &self.0.name)?; if !self.0.permission_overwrites.is_empty() { channel.serialize_field("d", &self.0.permission_overwrites)?; } channel.end() } } pub struct PublicThreadWrapper<'c>(pub &'c PublicThread); impl<'c> From<&'c PublicThread> for PublicThreadWrapper<'c> { fn from(channel: &'c PublicThread) -> Self { Self(channel) } } impl<'c> Serialize for PublicThreadWrapper<'c> { fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let len = 2 + self.0.guild_id.is_some() as usize + self.0.parent_id.is_some() as usize; let mut channel = s.serialize_struct("CachedThread", len)?; if let Some(ref guild) = self.0.guild_id { channel.serialize_field("a", guild)?; } channel.serialize_field("b", &self.0.id)?; channel.serialize_field("c", &self.0.name)?; if let Some(ref parent_id) = self.0.parent_id { channel.serialize_field("d", parent_id)?; } channel.end() } } pub struct PrivateThreadWrapper<'c>(pub &'c PrivateThread); impl<'c> From<&'c PrivateThread> for PrivateThreadWrapper<'c> { fn from(channel: &'c PrivateThread) -> Self { Self(channel) } } impl<'c> Serialize for PrivateThreadWrapper<'c> { fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let len = 2 + self.0.guild_id.is_some() as usize + self.0.parent_id.is_some() as usize; let mut channel = s.serialize_struct("CachedThread", len)?; if let Some(ref guild) = self.0.guild_id { channel.serialize_field("a", guild)?; } channel.serialize_field("b", &self.0.id)?; channel.serialize_field("c", &self.0.name)?; if let Some(ref parent_id) = self.0.parent_id { channel.serialize_field("d", parent_id)?; } channel.end() } } pub enum BasicGuildChannel<'c> { PrivateThread(&'c PrivateThread), PublicThread(&'c PublicThread), Text(&'c TextChannel), } impl<'c> BasicGuildChannel<'c> { pub const fn guild_id(&self) -> Option<GuildId> { match self { Self::PrivateThread(c) => c.guild_id, Self::PublicThread(c) => c.guild_id, Self::Text(c) => c.guild_id, } } pub const fn id(&self) -> ChannelId { match self { Self::PrivateThread(c) => c.id, Self::PublicThread(c) => c.id, Self::Text(c) => c.id, } } pub fn from(channel: &'c GuildChannel) -> Option<Self> { match channel { GuildChannel::PrivateThread(c) => Some(Self::PrivateThread(c)), GuildChannel::PublicThread(c) => Some(Self::PublicThread(c)), GuildChannel::Text(c) => Some(Self::Text(c)), _ => None, } } } impl<'c> Serialize for BasicGuildChannel<'c> { fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { match self { BasicGuildChannel::PrivateThread(c) => { s.serialize_newtype_variant("CachedGuildChannel", 0, "a", &PrivateThreadWrapper(c)) } BasicGuildChannel::PublicThread(c) => { s.serialize_newtype_variant("CachedGuildChannel", 1, "b", &PublicThreadWrapper(c)) } BasicGuildChannel::Text(c) => { s.serialize_newtype_variant("CachedGuildChannel", 2, "c", &TextChannelWrapper(c)) } } } }
roles.is_empty() { member.serialize_field("c", &self.0.roles)?; } member.serialize_field("d", &self.0.user.id)?;
function_block-random_span
[ { "content": "fn populate_members(key: &RedisKey, members: &mut RedisMembers) {\n\n match key {\n\n RedisKey::Channel { guild, .. } => {\n\n populate_member(CHANNEL_KEYS, *key, members);\n\n\n\n if let Some(guild) = guild {\n\n populate_member(format!(\"{}:{}\", GU...
Rust
crates/modor/src/system_params/mod.rs
modor-engine/modor
447ae453030de44ed93a2ab03a66261080304ce4
use crate::storages::archetypes::EntityLocation; use crate::storages::components::ComponentTypeIdx; use crate::storages::core::CoreStorage; use crate::storages::systems::SystemProperties; use crate::system_params::internal::{QuerySystemParamWithLifetime, SystemParamWithLifetime}; use crate::{SystemData, SystemInfo}; pub(crate) mod components; pub(crate) mod components_mut; pub(crate) mod entity; pub(crate) mod optional_components; pub(crate) mod optional_components_mut; pub(crate) mod optional_singletons; pub(crate) mod optional_singletons_mut; pub(crate) mod queries; pub(crate) mod singletons; pub(crate) mod singletons_mut; pub(crate) mod tuples; pub(crate) mod world; pub trait SystemParam: for<'a> SystemParamWithLifetime<'a> { #[doc(hidden)] type Tuple: SystemParam; #[doc(hidden)] type InnerTuple: SystemParam; #[doc(hidden)] fn properties(core: &mut CoreStorage) -> SystemProperties; #[doc(hidden)] fn lock<'a>( data: SystemData<'a>, info: SystemInfo<'a>, ) -> <Self as SystemParamWithLifetime<'a>>::Guard; #[doc(hidden)] fn borrow_guard<'a, 'b>( guard: &'a mut <Self as SystemParamWithLifetime<'b>>::Guard, ) -> <Self as SystemParamWithLifetime<'a>>::GuardBorrow where 'b: 'a; #[doc(hidden)] fn stream<'a, 'b>( guard: &'a mut <Self as SystemParamWithLifetime<'b>>::GuardBorrow, ) -> <Self as SystemParamWithLifetime<'a>>::Stream where 'b: 'a; #[doc(hidden)] fn stream_next<'a, 'b>( stream: &'a mut <Self as SystemParamWithLifetime<'b>>::Stream, ) -> Option<<Self as SystemParamWithLifetime<'a>>::Param> where 'b: 'a; } pub trait QuerySystemParam: SystemParam + for<'a> QuerySystemParamWithLifetime<'a> { #[doc(hidden)] fn filtered_component_type_idxs(data: SystemData<'_>) -> Vec<ComponentTypeIdx>; #[doc(hidden)] fn query_iter<'a, 'b>( guard: &'a <Self as SystemParamWithLifetime<'b>>::GuardBorrow, ) -> <Self as QuerySystemParamWithLifetime<'a>>::Iter where 'b: 'a; #[doc(hidden)] fn query_iter_mut<'a, 'b>( guard: &'a mut <Self as SystemParamWithLifetime<'b>>::GuardBorrow, ) -> <Self as QuerySystemParamWithLifetime<'a>>::IterMut where 'b: 'a; #[doc(hidden)] fn get<'a, 'b>( guard: &'a <Self as SystemParamWithLifetime<'b>>::GuardBorrow, location: EntityLocation, ) -> Option<<Self as QuerySystemParamWithLifetime<'a>>::ConstParam> where 'b: 'a; #[doc(hidden)] fn get_mut<'a, 'b>( guard: &'a mut <Self as SystemParamWithLifetime<'b>>::GuardBorrow, location: EntityLocation, ) -> Option<<Self as SystemParamWithLifetime<'a>>::Param> where 'b: 'a; #[doc(hidden)] fn get_both_mut<'a, 'b>( guard: &'a mut <Self as SystemParamWithLifetime<'b>>::GuardBorrow, location1: EntityLocation, location2: EntityLocation, ) -> ( Option<<Self as SystemParamWithLifetime<'a>>::Param>, Option<<Self as SystemParamWithLifetime<'a>>::Param>, ) where 'b: 'a; } pub(crate) mod internal { use crate::SystemParam; use std::any::Any; pub trait SystemParamWithLifetime<'a> { type Param: 'a; type Guard: 'a; type GuardBorrow: 'a; type Stream: 'a; } pub trait QuerySystemParamWithLifetime<'a>: SystemParamWithLifetime<'a> { type ConstParam: 'a + SystemParamWithLifetime<'a>; type Iter: 'a + Sync + Send + Iterator<Item = <Self::ConstParam as SystemParamWithLifetime<'a>>::Param> + DoubleEndedIterator + ExactSizeIterator; type IterMut: 'a + Sync + Send + Iterator<Item = <Self as SystemParamWithLifetime<'a>>::Param> + DoubleEndedIterator + ExactSizeIterator; } pub trait LockableSystemParam: SystemParam { type LockedType: Any; type Mutability: Mutability; } #[allow(unreachable_pub)] pub trait Mutability {} pub struct Const; impl Mutability for Const {} pub struct Mut; impl Mutability for Mut {} } pub(crate) mod utils { use crate::storages::archetypes::{ArchetypeEntityPos, ArchetypeIdx, EntityLocation}; use typed_index_collections::TiVec; pub(crate) fn get_both_mut<T>( data: &mut TiVec<ArchetypeIdx, TiVec<ArchetypeEntityPos, T>>, location1: EntityLocation, location2: EntityLocation, ) -> (Option<&mut T>, Option<&mut T>) { if location1.idx == location2.idx { if location1.idx >= data.next_key() { (None, None) } else { get_both_mut_internal(&mut data[location1.idx], location1.pos, location2.pos) } } else { let (sub_data1, sub_data2) = get_both_mut_internal(data, location1.idx, location2.idx); ( sub_data1.and_then(|d| d.get_mut(location1.pos)), sub_data2.and_then(|d| d.get_mut(location2.pos)), ) } } fn get_both_mut_internal<K, T>( data: &mut TiVec<K, T>, key1: K, key2: K, ) -> (Option<&mut T>, Option<&mut T>) where K: Ord + From<usize> + Copy, usize: From<K>, { if key2 >= data.next_key() { (data.get_mut(key1), None) } else if key1 >= data.next_key() { (None, data.get_mut(key2)) } else if key1 > key2 { let (left, right) = data.split_at_mut(key1); (Some(&mut right[K::from(0)]), Some(&mut left[key2])) } else { let (left, right) = data.split_at_mut(key2); (Some(&mut left[key1]), Some(&mut right[K::from(0)])) } } }
use crate::storages::archetypes::EntityLocation; use crate::storages::components::ComponentTypeIdx; use crate::storages::core::CoreStorage; use crate::storages::systems::SystemProperties; use crate::system_params::internal::{QuerySystemParamWithLifetime, SystemParamWithLifetime}; use crate::{SystemData, SystemInfo}; pub(crate) mod components; pub(crate) mod components_mut; pub(crate) mod entity; pub(crate) mod optional_components; pub(crate) mod optional_components_mut; pub(crate) mod optional_singletons; pub(crate) mod optional_singletons_mut; pub(crate) mod queries; pub(crate) mod singletons; pub(crate) mod singletons_mut; pub(crate) mod tuples; pub(crate) mod world; pub trait SystemParam: for<'a> SystemParamWithLifetime<'a> { #[doc(hidden)] type Tuple: SystemParam; #[doc(hidden)] type InnerTuple: SystemParam; #[doc(hidden)] fn properties(core: &mut CoreStorage) -> SystemProperties; #[doc(hidden)] fn lock<'a>( data: SystemData<'a>, info: SystemInfo<'a>, ) -> <Self as SystemParamWithLifetime<'a>>::Guard; #[doc(hidden)] fn borrow_guard<'a, 'b>( guard: &'a mut <Self as SystemParamWithLifetime<'b>>::Guard, ) -> <Self as SystemParamWithLifetime<'a>>::GuardBorrow where 'b: 'a; #[doc(hidden)] fn stream<'a, 'b>( guard: &'a mut <Self as SystemParamWithLifetime<'b>>::GuardBorrow, ) -> <Self as SystemParamWithLifetime<'a>>::Stream where 'b: 'a; #[doc(hidden)] fn stream_next<'a, 'b>( stream: &'a mut <Self as SystemParamWithLifetime<'b>>::Stream, ) -> Option<<Self as SystemParamWithLifetime<'a>>::Param> where 'b: 'a; } pub trait QuerySystemParam: SystemParam + for<'a> QuerySystemParamWithLifetime<'a> { #[doc(hidden)] fn filtered_component_type_idxs(data: SystemData<'_>) -> Vec<ComponentTypeIdx>; #[doc(hidden)] fn query_iter<'a, 'b>( guard: &'a <Self as SystemParamWithLifetime<'b>>::GuardBorrow, ) -> <Self as QuerySystemParamWithLifetime<'a>>::Iter where 'b: 'a; #[doc(hidden)] fn query_iter_mut<'a, 'b>( guard: &'a mut <Self as SystemParamWithLifetime<'b>>::GuardBorrow, ) -> <Self as QuerySystemParamWithLifetime<'a>>::IterMut where 'b: 'a; #[doc(hidden)] fn get<'a, 'b>( guard: &'a <Self as SystemParamWithLifetime<'b>>::GuardBorrow, location: EntityLocation, ) -> Option<<Self as QuerySystemParamWithLifetime<'a>>::ConstParam> where 'b: 'a; #[doc(hidden)] fn get_mut<'a, 'b>( guard: &'a mut <Self as SystemParamWithLifetime<'b>>::GuardBorrow, location: EntityLocation, ) -> Option<<Self as SystemParamWithLifetime<'a>>::Param> where 'b: 'a; #[doc(hidden)] fn get_both_mut<'a, 'b>( guard: &'a mut <Self as SystemParamWithLifetime<'b>>::GuardBorrow, location1: EntityLocation, location2: EntityLocation, ) -> ( Option<<Self as SystemParamWithLifetime<'a>>::Param>, Option<<Self as SystemParamWithLifetime<'a>>::Param>, ) where 'b: 'a; } pub(crate) mod internal { use crate::SystemParam; use std::any::Any; pub trait SystemParamWithLifetime<'a> { type Param: 'a; type Guard: 'a; type GuardBorrow: 'a; type Stream: 'a; } pub trait QuerySystemParamWithLifetime<'a>: SystemParamWithLifetime<'a> { type ConstParam: 'a + SystemParamWithLifetime<'a>; type Iter: 'a + Sync + Send + Iterator<Item = <Self::ConstParam as SystemParamWithLifetime<'a>>::Param> + DoubleEndedIterator + ExactSizeIterator; type IterMut: 'a + Sync + Send + Iterator<Item = <Self as SystemParamWithLifetime<'a>>::Param> + DoubleEndedIterator + ExactSizeIterator; } pub trait LockableSystemParam: SystemParam { type LockedType: Any; type Mutability: Mutability; } #[allow(unreachable_pub)] pub trait Mutability {} pub struct Const; impl Mutability for Const {} pub struct Mut; impl Mutability for Mut {} } pub(crate) mod utils { use crate::storages::archetypes::{ArchetypeEntityPos, ArchetypeIdx, EntityLocation}; use typed_index_collections::TiVec; pub(crate) fn get_both_mut<T>( data: &mut TiVec<ArchetypeIdx, TiVec<ArchetypeEntityPos, T>>, location1: EntityLocation, location2: EntityLocation, ) -> (Option<&mut T>, Option<&mut T>) { if location1.idx == location2.idx { if location1.idx >= data.next_key() { (None, None) } else { get_both_mut_internal(&mut data[location1.idx], location1.pos, location2.pos) } } else { let (sub_data1, sub_data2) = get_both_mut_internal(data, location1.idx, location2.idx); (
fn get_both_mut_internal<K, T>( data: &mut TiVec<K, T>, key1: K, key2: K, ) -> (Option<&mut T>, Option<&mut T>) where K: Ord + From<usize> + Copy, usize: From<K>, { if key2 >= data.next_key() { (data.get_mut(key1), None) } else if key1 >= data.next_key() { (None, data.get_mut(key2)) } else if key1 > key2 { let (left, right) = data.split_at_mut(key1); (Some(&mut right[K::from(0)]), Some(&mut left[key2])) } else { let (left, right) = data.split_at_mut(key2); (Some(&mut left[key1]), Some(&mut right[K::from(0)])) } } }
sub_data1.and_then(|d| d.get_mut(location1.pos)), sub_data2.and_then(|d| d.get_mut(location2.pos)), ) } }
function_block-function_prefix_line
[ { "content": "/// A trait for defining the main component of an entity type.\n\n///\n\n/// This trait shouldn't be directly implemented.<br>\n\n/// Instead, you can use [`entity`](macro@crate::entity) and [`singleton`](macro@crate::singleton)\n\n/// proc macros.\n\npub trait EntityMainComponent: Sized + Any + S...
Rust
src/net.rs
dejano-with-tie/stun_codec
4f15041b761eb57bb3c6aef09610ec61603c9663
use crate::constants::MAGIC_COOKIE; use crate::TransactionId; use bytecodec::bytes::{BytesDecoder, BytesEncoder}; use bytecodec::combinator::Peekable; use bytecodec::fixnum::{U16beDecoder, U16beEncoder, U8Decoder, U8Encoder}; use bytecodec::{ByteCount, Decode, Encode, Eos, ErrorKind, Result, SizedEncode}; use std::net::{IpAddr, SocketAddr}; const FAMILY_IPV4: u8 = 1; const FAMILY_IPV6: u8 = 2; pub fn socket_addr_xor(addr: SocketAddr, transaction_id: TransactionId) -> SocketAddr { let xor_port = addr.port() ^ (MAGIC_COOKIE >> 16) as u16; match addr.ip() { IpAddr::V4(ip) => { let mut octets = ip.octets(); for (i, b) in octets.iter_mut().enumerate() { *b ^= (MAGIC_COOKIE >> (24 - i * 8)) as u8; } let xor_ip = From::from(octets); SocketAddr::new(IpAddr::V4(xor_ip), xor_port) } IpAddr::V6(ip) => { let mut octets = ip.octets(); for (i, b) in octets.iter_mut().enumerate().take(4) { *b ^= (MAGIC_COOKIE >> (24 - i * 8)) as u8; } for (i, b) in octets.iter_mut().enumerate().take(16).skip(4) { *b ^= transaction_id.as_bytes()[i - 4]; } let xor_ip = From::from(octets); SocketAddr::new(IpAddr::V6(xor_ip), xor_port) } } } #[derive(Debug, Default)] pub struct SocketAddrDecoder { unused: U8Decoder, family: Peekable<U8Decoder>, port: U16beDecoder, ip: BytesDecoder<IpBytes>, } impl SocketAddrDecoder { pub fn new() -> Self { Self::default() } } impl Decode for SocketAddrDecoder { type Item = SocketAddr; fn decode(&mut self, buf: &[u8], eos: Eos) -> Result<usize> { let mut offset = 0; if !self.family.is_idle() { bytecodec_try_decode!(self.unused, offset, buf, eos); bytecodec_try_decode!(self.family, offset, buf, eos); let family = self.family.peek().expect("never fails"); match *family { FAMILY_IPV4 => self.ip.set_bytes(IpBytes::V4([0; 4])), FAMILY_IPV6 => self.ip.set_bytes(IpBytes::V6([0; 16])), _ => track_panic!( ErrorKind::InvalidInput, "Unknown address family: {}", family ), } } bytecodec_try_decode!(self.port, offset, buf, eos); bytecodec_try_decode!(self.ip, offset, buf, eos); Ok(offset) } fn finish_decoding(&mut self) -> Result<Self::Item> { let _ = track!(self.unused.finish_decoding())?; let _ = track!(self.family.finish_decoding())?; let port = track!(self.port.finish_decoding())?; let ip = match track!(self.ip.finish_decoding())? { IpBytes::V4(b) => IpAddr::V4(b.into()), IpBytes::V6(b) => IpAddr::V6(b.into()), }; Ok(SocketAddr::new(ip, port)) } fn requiring_bytes(&self) -> ByteCount { self.unused .requiring_bytes() .add_for_decoding(self.family.requiring_bytes()) .add_for_decoding(self.port.requiring_bytes()) .add_for_decoding(self.ip.requiring_bytes()) } fn is_idle(&self) -> bool { self.port.is_idle() && self.ip.is_idle() } } #[derive(Debug, Default)] pub struct SocketAddrEncoder { unused: U8Encoder, family: U8Encoder, port: U16beEncoder, ip: BytesEncoder<IpBytes>, } impl SocketAddrEncoder { pub fn new() -> Self { Self::default() } } impl Encode for SocketAddrEncoder { type Item = SocketAddr; fn encode(&mut self, buf: &mut [u8], eos: Eos) -> Result<usize> { let mut offset = 0; bytecodec_try_encode!(self.unused, offset, buf, eos); bytecodec_try_encode!(self.family, offset, buf, eos); bytecodec_try_encode!(self.port, offset, buf, eos); bytecodec_try_encode!(self.ip, offset, buf, eos); Ok(offset) } fn start_encoding(&mut self, item: Self::Item) -> Result<()> { track!(self.unused.start_encoding(0))?; if item.ip().is_ipv4() { track!(self.family.start_encoding(FAMILY_IPV4))?; } else { track!(self.family.start_encoding(FAMILY_IPV6))?; } track!(self.port.start_encoding(item.port()))?; track!(self.ip.start_encoding(IpBytes::new(item.ip())))?; Ok(()) } fn requiring_bytes(&self) -> ByteCount { ByteCount::Finite(self.exact_requiring_bytes()) } fn is_idle(&self) -> bool { self.ip.is_idle() } } impl SizedEncode for SocketAddrEncoder { fn exact_requiring_bytes(&self) -> u64 { self.unused.exact_requiring_bytes() + self.family.exact_requiring_bytes() + self.port.exact_requiring_bytes() + self.ip.exact_requiring_bytes() } } #[derive(Debug)] enum IpBytes { V4([u8; 4]), V6([u8; 16]), } impl IpBytes { fn new(ip: IpAddr) -> Self { match ip { IpAddr::V4(ip) => IpBytes::V4(ip.octets()), IpAddr::V6(ip) => IpBytes::V6(ip.octets()), } } } impl AsRef<[u8]> for IpBytes { fn as_ref(&self) -> &[u8] { match self { IpBytes::V4(bytes) => bytes, IpBytes::V6(bytes) => bytes, } } } impl AsMut<[u8]> for IpBytes { fn as_mut(&mut self) -> &mut [u8] { match self { IpBytes::V4(bytes) => bytes, IpBytes::V6(bytes) => bytes, } } } #[cfg(test)] mod tests { use bytecodec::{DecodeExt, EncodeExt}; use super::*; #[test] fn socket_addr_xor_works() { let transaction_id = TransactionId::new([ 0xb7, 0xe7, 0xa7, 0x01, 0xbc, 0x34, 0xd6, 0x86, 0xfa, 0x87, 0xdf, 0xae, ]); let addr: SocketAddr = "192.0.2.1:32853".parse().unwrap(); assert_eq!( socket_addr_xor(addr, transaction_id), "225.18.166.67:41287".parse().unwrap() ); let addr: SocketAddr = "[2001:db8:1234:5678:11:2233:4455:6677]:32853" .parse() .unwrap(); assert_eq!( socket_addr_xor(addr, transaction_id), "[113:a9fa:a5d3:f179:bc25:f4b5:bed2:b9d9]:41287" .parse() .unwrap() ); } #[test] fn socket_addr_encoder_works() { let mut encoder = SocketAddrEncoder::new(); let v4addr = "127.0.0.1:80".parse().unwrap(); let bytes = encoder.encode_into_bytes(v4addr).unwrap(); assert_eq!(bytes, [0, 1, 0, 80, 127, 0, 0, 1]); let v6addr = "[::]:90".parse().unwrap(); let bytes = encoder.encode_into_bytes(v6addr).unwrap(); assert_eq!( bytes, [0, 2, 0, 90, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] ); } #[test] fn socket_addr_decoder_works() { let mut decoder = SocketAddrDecoder::new(); let v4addr = decoder .decode_from_bytes(&[0, 1, 0, 80, 127, 0, 0, 1]) .unwrap(); assert_eq!(v4addr.to_string(), "127.0.0.1:80"); let v6addr = decoder .decode_from_bytes(&[0, 2, 0, 90, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]) .unwrap(); assert_eq!(v6addr.to_string(), "[::]:90"); } }
use crate::constants::MAGIC_COOKIE; use crate::TransactionId; use bytecodec::bytes::{BytesDecoder, BytesEncoder}; use bytecodec::combinator::Peekable; use bytecodec::fixnum::{U16beDecoder, U16beEncoder, U8Decoder, U8Encoder}; use bytecodec::{ByteCount, Decode, Encode, Eos, ErrorKind, Result, SizedEncode}; use std::net::{IpAddr, SocketAddr}; const FAMILY_IPV4: u8 = 1; const FAMILY_IPV6: u8 = 2; pub fn socket_addr_xor(addr: SocketAddr, transaction_id: TransactionId) -> SocketAddr { let xor_port = addr.port() ^ (MAGIC_COOKIE >> 16) as u16; match addr.ip() { IpAddr::V4(ip) => { let mut octets = ip.octets(); for (i, b) in octets.iter_mut().enumerate() { *b ^= (MAGIC_COOKIE >> (24 - i * 8)) as u8; } let xor_ip = From::from(octets); SocketAddr::new(IpAddr::V4(xor_ip), xor_port) } IpAddr::V6(ip) => { let mut octets = ip.octets(); for (i, b) in octets.iter_mut().enumerate().take(4) { *b ^= (MAGIC_COOKIE >> (24 - i * 8)) as u8; } for (i, b) in octets.iter_mut().enumerate().take(16).skip(4) { *b ^= transaction_id.as_bytes()[i - 4]; } let xor_ip = From::from(octets); SocketAddr::new(IpAddr::V6(xor_ip), xor_port) } } } #[derive(Debug, Default)] pub struct SocketAddrDecoder { unused: U8Decoder, family: Peekable<U8Decoder>, port: U16beDecoder, ip: BytesDecoder<IpBytes>, } impl SocketAddrDecoder { pub fn new() -> Self { Self::default() } } impl Decode for SocketAddrDecoder { type Item = SocketAddr; fn decode(&mut self, buf: &[u8], eos: Eos) -> Result<usize> { let mut offset = 0; if !self.family.is_idle() { bytecodec_try_decode!(self.unused, offset, buf, eos); bytecodec_try_decode!(self.family, offset, buf, eos); let family = self.family.peek().expect("never fails"); match *family { FAMILY_IPV4 => self.ip.set_bytes(IpBytes::V4([0; 4])), FAMILY_IPV6 => self.ip.set_bytes(IpBytes::V6([0; 16])), _ => track_panic!( ErrorKind::InvalidInput, "Unknown address family: {}", family ), } } bytecodec_try_decode!(self.port, offset, buf, eos); bytecodec_try_decode!(self.ip, offset, buf, eos); Ok(offset) } fn finish_decoding(&mut self) -> Result<Self::Item> { let _ = track!(self.unused.finish_decoding())?; let _ = track!(self.family.finish_decoding())?; let port = track!(self.port.finish_decoding())?; let ip = match track!(self.ip.finish_decoding())? { IpBytes::V4(b) => IpAddr::V4(b.into()), IpBytes::V6(b) => IpAddr::V6(b.into()), }; Ok(SocketAddr::new(ip, port)) } fn requiring_bytes(&self) -> ByteCount { self.unused .requiring_bytes() .add_for_decoding(self.family.requiring_bytes()) .add_for_decoding(self.port.requiring_bytes()) .add_for_decoding(self.ip.requiring_bytes()) } fn is_idle(&self) -> bool { self.port.is_idle() && self.ip.is_idle() } } #[derive(Debug, Default)] pub struct SocketAddrEncoder { unused: U8Encoder, family: U8Encoder, port: U16beEncoder, ip: BytesEncoder<IpBytes>, } impl SocketAddrEncoder { pub fn new() -> Self { Self::default() } } impl Encode for SocketAddrEncoder { type Item = SocketAddr; fn encode(&mut self, buf: &mut [u8], eos: Eos) -> Result<usize> { let mut offset = 0; bytecodec_try_encode!(self.unused, offset, buf, eos); bytecodec_try_encode!(self.family, offset, buf, eos); bytecodec_try_encode!(self.port, offset, buf, eos); bytecodec_try_encode!(self.ip, offset, buf, eos); Ok(offset) } fn start_encoding(&mut self, item: Self::Item) -> Result<()> { track!(self.unused.start_encoding(0))?; if item.ip().is_ipv4() { track!(self.family.start_encoding(FAMILY_IPV4))?; } else { track!(self.family.start_encoding(FAMILY_IPV6))?; } track!(self.port.start_encoding(item.port()))?; track!(self.ip.start_encoding(IpBytes::new(item.ip())))?; Ok(()) } fn requiring_bytes(&self) -> ByteCount { ByteCount::Finite(self.exact_requiring_bytes()) } fn is_idle(&self) -> bool { self.ip.is_idle() } } impl SizedEncode for SocketAddrEncoder { fn exact_requiring_bytes(&self) -> u64 { self.unused.exact_requiring_bytes() + self.family.exact_requiring_bytes() + self.port.exact_requiring_bytes() + self.ip.exact_requiring_bytes() } } #[derive(Debug)] enum IpBytes { V4([u8; 4]), V6([u8; 16]), } impl IpBytes { fn new(ip: IpAddr) -> Self { match ip { IpAddr::V4(ip) => IpBytes::V4(ip.octets()), IpAddr::V6(ip) => IpBytes::V6(ip.octets()), } } } impl AsRef<[u8]> for IpBytes { fn as_ref(&self) -> &[u8] { match self { IpBytes::V4(bytes) => bytes, IpBytes::V6(bytes) => bytes, } } } impl AsMut<[u8]> for IpBytes {
} #[cfg(test)] mod tests { use bytecodec::{DecodeExt, EncodeExt}; use super::*; #[test] fn socket_addr_xor_works() { let transaction_id = TransactionId::new([ 0xb7, 0xe7, 0xa7, 0x01, 0xbc, 0x34, 0xd6, 0x86, 0xfa, 0x87, 0xdf, 0xae, ]); let addr: SocketAddr = "192.0.2.1:32853".parse().unwrap(); assert_eq!( socket_addr_xor(addr, transaction_id), "225.18.166.67:41287".parse().unwrap() ); let addr: SocketAddr = "[2001:db8:1234:5678:11:2233:4455:6677]:32853" .parse() .unwrap(); assert_eq!( socket_addr_xor(addr, transaction_id), "[113:a9fa:a5d3:f179:bc25:f4b5:bed2:b9d9]:41287" .parse() .unwrap() ); } #[test] fn socket_addr_encoder_works() { let mut encoder = SocketAddrEncoder::new(); let v4addr = "127.0.0.1:80".parse().unwrap(); let bytes = encoder.encode_into_bytes(v4addr).unwrap(); assert_eq!(bytes, [0, 1, 0, 80, 127, 0, 0, 1]); let v6addr = "[::]:90".parse().unwrap(); let bytes = encoder.encode_into_bytes(v6addr).unwrap(); assert_eq!( bytes, [0, 2, 0, 90, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] ); } #[test] fn socket_addr_decoder_works() { let mut decoder = SocketAddrDecoder::new(); let v4addr = decoder .decode_from_bytes(&[0, 1, 0, 80, 127, 0, 0, 1]) .unwrap(); assert_eq!(v4addr.to_string(), "127.0.0.1:80"); let v6addr = decoder .decode_from_bytes(&[0, 2, 0, 90, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]) .unwrap(); assert_eq!(v6addr.to_string(), "[::]:90"); } }
fn as_mut(&mut self) -> &mut [u8] { match self { IpBytes::V4(bytes) => bytes, IpBytes::V6(bytes) => bytes, } }
function_block-full_function
[ { "content": "#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\nstruct Type {\n\n class: MessageClass,\n\n method: Method,\n\n}\n\nimpl Type {\n\n fn as_u16(self) -> u16 {\n\n let class = self.class as u16;\n\n let method = self.method.as_u16();\n\n (method & 0b0000_0000_1111)\...
Rust
zandbox/src/zandbox/main.rs
tpscrpt/zinc
35307d3da96377b76425e03aefca97c5c10c5565
mod arguments; mod error; use std::collections::HashMap; use std::str::FromStr; use actix_web::middleware; use actix_web::web; use actix_web::App; use actix_web::HttpServer; use colored::Colorize; use rayon::iter::IntoParallelIterator; use rayon::iter::ParallelIterator; use zksync_eth_signer::PrivateKeySigner; use zksync_types::AccountId; use zinc_build::Application as BuildApplication; use zandbox::ContractSelectAllOutput; use zandbox::ContractStorage; use zandbox::DatabaseClient; use zandbox::FieldSelectInput; use zandbox::SharedData; use zandbox::SharedDataContract; use self::arguments::Arguments; use self::error::Error; #[actix_rt::main] async fn main() -> Result<(), Error> { let args = Arguments::new(); zinc_logger::initialize(zinc_const::app_name::ZANDBOX, args.verbosity); log::info!("Zandbox server started"); let network = zksync::Network::from_str(args.network.as_str()).map_err(Error::InvalidNetwork)?; log::info!("Initializing the PostgreSQL client"); let postgresql = DatabaseClient::new(args.postgresql_uri.as_str()).await?; log::info!("Loading the compiled contracts from the database"); let database_data: Vec<ContractSelectAllOutput> = postgresql .select_contracts() .await? .into_par_iter() .collect(); let mut contracts = HashMap::with_capacity(database_data.len()); for contract in database_data.into_iter() { let eth_address = zinc_zksync::eth_address_from_vec(contract.eth_address); let eth_private_key = zinc_zksync::eth_private_key_from_vec(contract.eth_private_key); log::info!( "{} instance `{}` of the contract `{} v{}` with address {}", "Loaded".bright_green(), contract.instance, contract.name, contract.version, serde_json::to_string(&eth_address).expect(zinc_const::panic::DATA_CONVERSION), ); let application = BuildApplication::try_from_slice(contract.bytecode.as_slice()) .expect(zinc_const::panic::VALIDATED_DURING_DATABASE_POPULATION); let build = match application { BuildApplication::Circuit(_circuit) => { panic!(zinc_const::panic::VALIDATED_DURING_DATABASE_POPULATION) } BuildApplication::Contract(contract) => contract, }; let provider = zksync::Provider::new(network); let wallet_credentials = zksync::WalletCredentials::from_eth_signer( eth_address, PrivateKeySigner::new(eth_private_key), network, ) .await?; let wallet = zksync::Wallet::new(provider, wallet_credentials).await?; let database_fields = postgresql .select_fields(FieldSelectInput::new(contract.account_id as AccountId)) .await?; let storage = ContractStorage::new_with_data( database_fields, build.storage.as_slice(), eth_address, &wallet, ) .await?; contracts.insert( eth_address, SharedDataContract::new( eth_address, contract.name, contract.version, contract.instance, contract.source_code, contract.bytecode, contract.verifying_key, Some(contract.account_id as AccountId), eth_private_key, build, storage, ), ); } let data = SharedData::new(postgresql, contracts).wrap(); HttpServer::new(move || { App::new() .wrap(middleware::Logger::default()) .wrap(middleware::DefaultHeaders::new().content_type()) .wrap(actix_cors::Cors::default()) .app_data(web::JsonConfig::default().limit(zinc_const::limit::JSON_PAYLOAD)) .data(data.clone()) .configure(zandbox::configure) }) .bind(format!( "{}:{}", zinc_const::zandbox::HOST, args.http_port.unwrap_or(zinc_const::zandbox::PORT) )) .map_err(Error::ServerBinding)? .run() .await .map_err(Error::ServerRuntime)?; log::info!("Zandbox server finished"); Ok(()) }
mod arguments; mod error; use std::collections::HashMap; use std::str::FromStr; use actix_web::middleware; use actix_web::web; use actix_web::App; use actix_web::HttpServer; use colored::Colorize; use rayon::iter::IntoParallelIterator; use rayon::iter::ParallelIterator; use zksync_eth_signer::PrivateKeySigner; use zksync_types::AccountId; use zinc_build::Application as BuildApplication; use zandbox::ContractSelectAllOutput; use zandbox::ContractStorage; use zandbox::DatabaseClient; use zandbox::FieldSelectInput; use zandbox::SharedData; use zandbox::SharedDataContract; use self::arguments::Arguments; use self::error::Error; #[actix_rt::main]
async fn main() -> Result<(), Error> { let args = Arguments::new(); zinc_logger::initialize(zinc_const::app_name::ZANDBOX, args.verbosity); log::info!("Zandbox server started"); let network = zksync::Network::from_str(args.network.as_str()).map_err(Error::InvalidNetwork)?; log::info!("Initializing the PostgreSQL client"); let postgresql = DatabaseClient::new(args.postgresql_uri.as_str()).await?; log::info!("Loading the compiled contracts from the database"); let database_data: Vec<ContractSelectAllOutput> = postgresql .select_contracts() .await? .into_par_iter() .collect(); let mut contracts = HashMap::with_capacity(database_data.len()); for contract in database_data.into_iter() { let eth_address = zinc_zksync::eth_address_from_vec(contract.eth_address); let eth_private_key = zinc_zksync::eth_private_key_from_vec(contract.eth_private_key); log::info!( "{} instance `{}` of the contract `{} v{}` with address {}", "Loaded".bright_green(), contract.instance, contract.name, contract.version, serde_json::to_string(&eth_address).expect(zinc_const::panic::DATA_CONVERSION), ); let application = BuildApplication::try_from_slice(contract.bytecode.as_slice()) .expect(zinc_const::panic::VALIDATED_DURING_DATABASE_POPULATION); let build = match application { BuildApplication::Circuit(_circuit) => { panic!(zinc_const::panic::VALIDATED_DURING_DATABASE_POPULATION) } BuildApplication::Contract(contract) => contract, }; let provider = zksync::Provider::new(network); let wallet_credentials = zksync::WalletCredentials::from_eth_signer( eth_address, PrivateKeySigner::new(eth_private_key), network, ) .await?; let wallet = zksync::Wallet::new(provider, wallet_credentials).await?; let database_fields = postgresql .select_fields(FieldSelectInput::new(contract.account_id as AccountId)) .await?; let storage = ContractStorage::new_with_data( database_fields, build.storage.as_slice(), eth_address, &wallet, ) .await?; contracts.insert( eth_address, SharedDataContract::new( eth_address, contract.name, contract.version, contract.instance, contract.source_code, contract.bytecode, contract.verifying_key, Some(contract.account_id as AccountId), eth_private_key, build, storage, ), ); } let data = SharedData::new(postgresql, contracts).wrap(); HttpServer::new(move || { App::new() .wrap(middleware::Logger::default()) .wrap(middleware::DefaultHeaders::new().content_type()) .wrap(actix_cors::Cors::default()) .app_data(web::JsonConfig::default().limit(zinc_const::limit::JSON_PAYLOAD)) .data(data.clone()) .configure(zandbox::configure) }) .bind(format!( "{}:{}", zinc_const::zandbox::HOST, args.http_port.unwrap_or(zinc_const::zandbox::PORT) )) .map_err(Error::ServerBinding)? .run() .await .map_err(Error::ServerRuntime)?; log::info!("Zandbox server finished"); Ok(()) }
function_block-full_function
[ { "content": "///\n\n/// The auxiliary `main` function to facilitate the `?` error conversion operator.\n\n///\n\nfn main_inner() -> Result<(), Error> {\n\n let args = Arguments::new();\n\n\n\n zinc_logger::initialize(zinc_const::app_name::COMPILER, args.verbosity);\n\n\n\n let manifest = Manifest::try...
Rust
src/scene/save_scene.rs
atsisy/subterranean
8c844e95b47e441c43709bd7f6aefa2c15da880b
use ggez::graphics as ggraphics; use torifune::core::Clock; use torifune::graphics::drawable::*; use torifune::graphics::object::*; use crate::core::{FontID, SavableData, SoundID, SuzuContext, TextureID, TileBatchTextureID}; use crate::object::effect_object; use crate::object::save_scene_object::*; use crate::object::util_object::*; use crate::scene::*; use crate::flush_delay_event; pub struct SaveScene { background: UniTexture, exit_button: SelectButton, event_list: DelayEventList<Self>, scene_transition_effect: Option<effect_object::ScreenTileEffect>, save_entry_table: SaveEntryTable, scene_transition: SceneID, scene_transition_type: SceneTransition, clock: Clock, } impl SaveScene { pub fn new<'a>(ctx: &mut SuzuContext<'a>) -> Self { let save_data_list = (1..=4) .map(|slot_index| match SavableData::new_load(slot_index) { Ok(savable_data) => Some(savable_data), Err(_) => None, }) .collect(); let save_entry_table = SaveEntryTable::new( ctx, numeric::Rect::new(50.0, 50.0, 1248.0, 672.0), save_data_list, 0, ); let background = UniTexture::new( ctx.ref_texture(TextureID::JpHouseTexture), numeric::Point2f::new(0.0, 0.0), numeric::Vector2f::new(1.0, 1.0), 0.0, 0, ); let scene_transition_effect = Some(effect_object::ScreenTileEffect::new( ctx, TileBatchTextureID::Shoji, numeric::Rect::new( 0.0, 0.0, crate::core::WINDOW_SIZE_X as f32, crate::core::WINDOW_SIZE_Y as f32, ), 30, effect_object::SceneTransitionEffectType::Open, effect_object::TilingEffectType::WholeTile, -128, 0, )); let texture = Box::new(TextButtonTexture::new( ctx, numeric::Point2f::new(0.0, 0.0), "戻る".to_string(), FontInformation::new( ctx.resource.get_font(FontID::Cinema), numeric::Vector2f::new(24.0, 24.0), ggraphics::Color::from_rgba_u32(0xf6e1d5ff), ), 10.0, ggraphics::Color::from_rgba_u32(0x5a4f3fff), 0, )); let exit_button = SelectButton::new( ctx, numeric::Rect::new( 1050.0, (crate::core::WINDOW_SIZE_Y as f32) - 120.0, 100.0, 50.0, ), texture, ); let mut event_list = DelayEventList::new(); event_list.add_event( Box::new(move |slf: &mut Self, _, _| { slf.scene_transition_effect = None; }), 31, ); SaveScene { background: background, event_list: event_list, exit_button: exit_button, scene_transition_effect: scene_transition_effect, save_entry_table: save_entry_table, scene_transition: SceneID::Save, scene_transition_type: SceneTransition::Keep, clock: 0, } } fn exit_scene_poping<'a>(&mut self, ctx: &mut SuzuContext<'a>, t: Clock) { self.scene_transition_effect = Some(effect_object::ScreenTileEffect::new( ctx, TileBatchTextureID::Shoji, numeric::Rect::new( 0.0, 0.0, crate::core::WINDOW_SIZE_X as f32, crate::core::WINDOW_SIZE_Y as f32, ), 30, effect_object::SceneTransitionEffectType::Close, effect_object::TilingEffectType::WholeTile, -128, t, )); self.event_list.add_event( Box::new(move |slf: &mut Self, _, _| { slf.scene_transition = SceneID::Scenario; slf.scene_transition_type = SceneTransition::PoppingTransition; }), 31, ); if let Some(save_data) = ctx.savable_data.as_mut() { let _ = save_data.get_scenario_save_data(); } } fn load_and_scene_swap<'a>(&mut self, ctx: &mut SuzuContext<'a>, slot: u8, t: Clock) { match SavableData::new_load(slot) { Ok(data) => { ctx.savable_data.replace(data); } Err(_) => return, } self.scene_transition_effect = Some(effect_object::ScreenTileEffect::new( ctx, TileBatchTextureID::Shoji, numeric::Rect::new( 0.0, 0.0, crate::core::WINDOW_SIZE_X as f32, crate::core::WINDOW_SIZE_Y as f32, ), 30, effect_object::SceneTransitionEffectType::Close, effect_object::TilingEffectType::WholeTile, -128, t, )); self.event_list.add_event( Box::new(move |slf: &mut Self, ctx, _| { slf.scene_transition = SceneID::Scenario; slf.scene_transition_type = SceneTransition::SwapTransition; ctx.resource.stop_bgm(ctx.context, SoundID::Title); }), 31, ); } } impl SceneManager for SaveScene { fn mouse_button_up_event<'a>( &mut self, ctx: &mut SuzuContext<'a>, _button: ginput::mouse::MouseButton, point: numeric::Point2f, ) { let t = self.get_current_clock(); match self.save_entry_table.click_handler(ctx, point) { SaveDataOperation::Loading(slot) => { self.load_and_scene_swap(ctx, slot, t); } _ => (), } if self.exit_button.contains(ctx.context, point) { self.exit_scene_poping(ctx, t); } } fn pre_process<'a>(&mut self, ctx: &mut SuzuContext<'a>) { let t = self.get_current_clock(); if let Some(transition_effect) = self.scene_transition_effect.as_mut() { transition_effect.effect(ctx.context, t); ctx.process_utility.redraw(); } if flush_delay_event!(self, self.event_list, ctx, self.get_current_clock()) > 0 { ctx.process_utility.redraw(); } } fn drawing_process(&mut self, ctx: &mut ggez::Context) { self.background.draw(ctx).unwrap(); self.save_entry_table.draw(ctx).unwrap(); self.exit_button.draw(ctx).unwrap(); if let Some(transition_effect) = self.scene_transition_effect.as_mut() { transition_effect.draw(ctx).unwrap(); } } fn post_process<'a>(&mut self, _ctx: &mut SuzuContext<'a>) -> SceneTransition { self.update_current_clock(); self.scene_transition_type } fn transition(&self) -> SceneID { self.scene_transition } fn get_current_clock(&self) -> Clock { self.clock } fn update_current_clock(&mut self) { self.clock += 1; } }
use ggez::graphics as ggraphics; use torifune::core::Clock; use torifune::graphics::drawable::*; use torifune::graphics::object::*; use crate::core::{FontID, SavableData, SoundID, SuzuContext, TextureID, TileBatchTextureID}; use crate::object::effect_object; use crate::object::save_scene_object::*; use crate::object::util_object::*; use crate::scene::*; use crate::flush_delay_event; pub struct SaveScene { background: UniTexture, exit_button: SelectButton, event_list: DelayEventList<Self>, scene_transition_effect: Option<effect_object::ScreenTileEffect>, save_entry_table: SaveEntryTable, scene_transition: SceneID, scene_transition_type: SceneTransition, clock: Clock, } impl SaveScene { pub fn new<'a>(ctx: &mut SuzuContext<'a>) -> Self { let save_data_list = (1..=4) .map(|slot_index| match SavableData::new_load(slot_index) { Ok(savable_data) => Some(savable_data), Err(_) => None, }) .collect(); let save_entry_table = SaveEntryTable::new( ctx, numeric::Rect::new(50.0, 50.0, 1248.0, 672.0), save_data_list, 0, ); let background = UniTexture::new( ctx.ref_texture(TextureID::JpHouseTexture), numeric::Point2f::new(0.0, 0.0), numeric::Vector2f::new(1.0, 1.0), 0.0, 0, ); let scene_transition_effect = Some(effect_object::ScreenTileEffect::new( ctx, TileBatchTextureID::Shoji, numeric::Rect::new( 0.0, 0.0, crate::core::WINDOW_SIZE_X as f32, crate::core::WINDOW_SIZE_Y as f32, ), 30, effect_object::SceneTransitionEffectType::Open, effect_object::TilingEffectType::WholeTile, -128, 0, )); let texture = Box::new(TextButtonTexture::new( ctx, numeric::Point2f::new(0.0, 0.0), "戻る".to_string(), FontInformation::new( ctx.resource.get_font(FontID::Cinema), numeric::Vector2f::new(24.0, 24.0), ggraphics::Color::from_rgba_u32(0xf6e1d5ff), ), 10.0, ggraphics::Color::from_rgba_u32(0x5a4f3fff), 0, )); let exit_button = SelectButton::new( ctx, numeric::Rect::new( 1050.0, (crate::core::WINDOW_SIZE_Y as f32) - 120.0, 100.0, 50.0, ), texture, ); let mut event_list = DelayEventList::new(); event_list.add_event( Box::new(move |slf: &mut Self, _, _| { slf.scene_transition_effect = None; }), 31, ); SaveScene { background: background, event_list: event_list, exit_button: exit_button, scene_transition_effect: scene_transition_effect, save_entry_table: save_entry_table, scene_transition: SceneID::Save, scene_transition_type: SceneTransition::Keep, clock: 0, } } fn exit_scene_poping<'a>(&mut self, ctx: &mut SuzuContext<'a>, t: Clock) { self.scene_transition_effect = Some(effect_object::ScreenTileEffect::new( ctx, TileBatchTextureID::Shoji, numeric::Rect::new( 0.0, 0.0, crate::core::WINDOW_SIZE_X as f32, crate::core::WINDOW_SIZE_Y as f32, ), 30, effect_object::SceneTransitionEffectType::Close, effect_object::TilingEffectType::WholeTile, -128, t, )); self.event_list.add_event( Box::new(move |slf: &mut Self, _, _| { slf.scene_transition = SceneID::Scenario; slf.scene_transition_type = SceneTransition::PoppingTransition; }), 31, ); if let Some(save_data) = ctx.savable_data.as_mut() { let _ = save_data.get_scenario_save_data(); } } fn load_and_scene_swap<'a>(&mut self, ctx: &mut SuzuContext<'a>, slot: u8, t: Clock) { match SavableData::new_load(slot) { Ok(data) => { ctx.savable_data.replace(data); } Err(_) => return, } self.scene_transition_effect = Some(effect_object::ScreenTileEffect::new( ctx, TileBatchTextureID::Shoji, numeric::Rect::new( 0.0, 0.0, crate::core::WINDOW_SIZE_X as f32, crate::core::WINDOW_SIZE_Y as f32, ), 30, effect_object::SceneTransitionEffectType::Close, effect_object::TilingEffectType::WholeTile, -128, t, )); self.event_list.add_event( Box::new(move |slf: &mut Self, ctx, _| { slf.scene_transition = SceneID::Scenario; slf.scene_transition_type = SceneTransition::SwapTransition; ctx.resource.stop_bgm(ctx.context, SoundID::Title); }), 31, ); } } impl SceneManager for SaveScene { fn mouse_button_up_event<'a>( &mut self, ctx: &mut SuzuContext<'a>, _button: ginput::mouse::MouseButton, point: numeric::Point2f, ) { let t = self.get_current_clock(); match self.save_entry_table.click_handler(ctx, point) { SaveDataOperation::Loading(slot) => { self.load_and_scene_swap(ctx, slot, t); } _ => (), } if self.exit_button.contains(ctx.context, point) { self.exit_scene_poping(ctx, t); } } fn pre_process<'a>(&mut self, ctx: &mut SuzuContext<'a>) { let t = self.get_current_clock(); if let Some(transition_effect) = self.scene_transition_effect.as_mut() { transition_effect.effect(ctx.context, t); ctx.process_utility.redraw(); } if flush_delay_event!(self, self.event_list, ctx, self.get_current_clock()) > 0 { ctx.process_utility.redraw(); } } fn drawing_process(&mut self, ctx: &mut ggez::Context) { self.background.draw(ctx).unwrap(); self.save_entry_table.draw(ctx).unwrap(); self.exit_button.draw(ctx).unwrap();
} fn post_process<'a>(&mut self, _ctx: &mut SuzuContext<'a>) -> SceneTransition { self.update_current_clock(); self.scene_transition_type } fn transition(&self) -> SceneID { self.scene_transition } fn get_current_clock(&self) -> Clock { self.clock } fn update_current_clock(&mut self) { self.clock += 1; } }
if let Some(transition_effect) = self.scene_transition_effect.as_mut() { transition_effect.draw(ctx).unwrap(); }
if_condition
[ { "content": "pub fn clock_needle_angle(hour: u8, minute: u8) -> (f32, f32) {\n\n let hour = hour % 12;\n\n\n\n let angle_per_hour = 2.0 * std::f32::consts::PI / (12.0 * 60.0);\n\n let angle_per_minute = 2.0 * std::f32::consts::PI / 60.0;\n\n\n\n (\n\n ((hour as f32 * 60.0) + minute as f32) *...
Rust
src/lib.rs
tosus/ranpaman-core
d65943b4cb8f9d644927277c6c33d2752b559b02
use num_bigint::BigUint; use num_traits::cast::{FromPrimitive, ToPrimitive}; use orion::aead; #[allow(dead_code)] use serde::{Deserialize, Serialize}; use std::collections::HashMap; use std::io::prelude::*; use blake2::{Blake2b, Digest}; use zeroize::Zeroize; const ENCRYPTION_SALT: [u8; 64] = [ 0xe3, 0x1a, 0x0c, 0x9b, 0x6b, 0x01, 0xbe, 0x19, 0xc5, 0x44, 0x7f, 0xb9, 0x2f, 0x79, 0x94, 0x91, 0xcf, 0xae, 0xb6, 0xda, 0x09, 0x0c, 0x24, 0xf3, 0x0f, 0xab, 0x2b, 0xf2, 0x4a, 0x1c, 0x39, 0xf7, 0xc1, 0xfc, 0xdc, 0x61, 0xc3, 0xf3, 0x15, 0xcf, 0x64, 0x76, 0x96, 0x25, 0xf9, 0xe6, 0xb1, 0x18, 0x62, 0xbd, 0x03, 0x6a, 0x67, 0x2d, 0xbb, 0x42, 0x1c, 0xbb, 0xb3, 0x24, 0x83, 0x5f, 0x7e, 0x53, ]; const MASTER_PASS_SALT: [u8; 64] = [ 0xa1, 0x48, 0x48, 0x5a, 0x76, 0x31, 0xe5, 0x45, 0x65, 0xf4, 0xde, 0xb0, 0xbb, 0x3a, 0x8f, 0xcc, 0xaa, 0x35, 0xff, 0x87, 0x7c, 0xd5, 0xcd, 0x4c, 0x4a, 0xbb, 0xbe, 0x21, 0x56, 0x5b, 0xe2, 0x7e, 0x60, 0x70, 0xd6, 0x5c, 0x0e, 0x3a, 0xa6, 0x02, 0xf9, 0xa1, 0xc9, 0x37, 0x88, 0x2a, 0xe0, 0xdc, 0x06, 0xcc, 0x25, 0xa6, 0x05, 0x8d, 0x75, 0x91, 0xc5, 0xdb, 0x0d, 0x90, 0xdb, 0xf3, 0x05, 0x8f, ]; type Result<T> = std::result::Result<T, Box<std::error::Error>>; #[derive(Serialize, Deserialize, Debug, PartialEq)] pub struct Ranpaman { master_password: Vec<u8>, encryption_key: Vec<u8>, file_path: Option<String>, data: HashMap<(String, String), Settings>, } #[derive(Serialize, Deserialize, Debug, PartialEq)] pub struct Settings { include_special_characters: bool, revision: u32, password_length: u32, } impl Default for Settings { fn default() -> Settings { Settings { include_special_characters: true, revision: 0, password_length: 30, } } } impl Drop for Ranpaman { fn drop(&mut self) { self.master_password.zeroize(); self.encryption_key.zeroize(); } } impl Ranpaman { pub fn new(mut master_password: String, file_path: Option<String>) -> Ranpaman { let config = argon2::Config::default(); let pw = argon2::hash_raw(&master_password.as_bytes(), &MASTER_PASS_SALT, &config).unwrap(); let key = argon2::hash_raw(&master_password.as_bytes(), &ENCRYPTION_SALT, &config).unwrap(); master_password.zeroize(); Ranpaman { master_password: pw, encryption_key: key, file_path, data: HashMap::new(), } } pub fn add_account( &mut self, login: String, service_name: String, settings: Settings, ) -> Result<()> { if service_name.is_empty() || login.is_empty() || settings.password_length < 4 { } let key = (service_name, login); if self.data.contains_key(&key) { } else { self.data.insert(key, settings); } Ok(()) } pub fn get_password(&self, login: String, service_name: String) -> Result<String> { match self .data .get(&(service_name.to_string(), login.to_string())) { Some(settings) => { let salt: &[u8] = &[ login.as_bytes(), service_name.as_bytes(), &settings.revision.to_le_bytes(), ] .concat(); let argon_config = argon2::Config::default(); let hash = argon2::hash_raw(&self.master_password, salt, &argon_config).unwrap(); let char_sets = generate_character_sets(settings); return encode_password(&hash, char_sets, settings.password_length as usize); } None => { Ok(String::from("")) } } } pub fn change_file_path(&mut self, new_path: Option<String>) -> Result<()> { match new_path { None => { if let Some(old_path) = &self.file_path { std::fs::remove_file(old_path)?; self.file_path = None; } } Some(new_path) => { let mut new_file = std::fs::File::create(&new_path)?; if let Some(old_path) = &self.file_path { std::fs::remove_file(old_path)?; } self.file_path = Some(new_path); let encoded_self = bincode::serialize(&self).unwrap(); let encrypted_self = aead::seal( &aead::SecretKey::from_slice(&self.encryption_key).unwrap(), &encoded_self, ) .unwrap(); new_file.write(&encrypted_self)?; } } Ok(()) } pub fn write_to_file(&self) -> Result<()> { let encoded_self = bincode::serialize(&self).unwrap(); let encrypted_self = aead::seal( &aead::SecretKey::from_slice(&self.encryption_key).unwrap(), &encoded_self, ) .unwrap(); std::fs::write( self.file_path.as_ref().ok_or("No file path specified")?, encrypted_self, )?; Ok(()) } pub fn read_from_file(mut master_password: String, path: &str) -> Result<Ranpaman> { let read = std::fs::read(path)?; let config = argon2::Config::default(); let key = argon2::hash_raw(&master_password.as_bytes(), &ENCRYPTION_SALT, &config).unwrap(); master_password.zeroize(); let decrypted = aead::open(&aead::SecretKey::from_slice(&key).unwrap(), &read).unwrap(); Ok(bincode::deserialize(&decrypted)?) } pub fn get_file_path(&self) -> Option<&String> { self.file_path.as_ref() } } fn generate_character_sets(settings: &Settings) -> Vec<Vec<char>> { let mut char_sets = Vec::new(); char_sets.push((b'A'..=b'Z').map(char::from).collect()); char_sets.push((b'a'..=b'z').map(char::from).collect()); if settings.include_special_characters { char_sets.push(vec!['1', '2', '3', '4', '5', '6', '7', '8', '9']); char_sets.push(vec!['%', '&', '#', '$', '+', '-', '@']); } char_sets } fn encode_password( raw_password: &[u8], char_sets: Vec<Vec<char>>, length: usize, ) -> Result<String> { if char_sets.iter().any(|set| set.is_empty()) { } let mut entropy = BigUint::from_bytes_le(raw_password); let mut char_set_use_flags: Vec<bool> = char_sets.iter().map(|_| false).collect(); let set_length = char_sets.iter().map(|set| set.len()).sum(); let mut encoded_password = String::new(); while encoded_password.len() < length { if entropy < BigUint::from_usize(set_length).unwrap() { } let new_char: usize = (entropy.clone() % set_length).to_usize().unwrap(); entropy /= set_length; let mut collective_length = 0; for (index, set) in char_sets.iter().enumerate() { if new_char < set.len() + collective_length { encoded_password.push(set[new_char - collective_length]); char_set_use_flags[index] = true; break; } collective_length += set.len(); } } if char_set_use_flags.into_iter().all(|flag| flag){ return Ok(encoded_password); }else{ let mut hasher = Blake2b::new(); hasher.input(raw_password); return encode_password(&hasher.result(), char_sets, length); } } #[cfg(test)] mod tests { use super::*; #[test] fn service_password_generation() { let mut ranpaman = Ranpaman::new("masterpass".to_string(), None); let site = String::from("somesite.com"); let mail = String::from("someone@somemail.com"); let settings = Settings::default(); ranpaman .add_account(site.clone(), mail.clone(), settings) .unwrap(); let password = ranpaman.get_password(site, mail).unwrap(); assert_eq!("#DnLScQHt4zu%QDLqP$7VD535UjExb", password); } #[test] fn key_generation() { let ranpaman = Ranpaman::new("masterpass".to_string(), None); assert_eq!( ranpaman.master_password, [ 223, 108, 222, 141, 127, 89, 120, 143, 166, 127, 41, 255, 155, 5, 5, 195, 198, 186, 182, 18, 209, 221, 182, 64, 164, 34, 27, 230, 196, 48, 187, 237 ] ); assert_eq!( ranpaman.encryption_key, [ 110, 249, 117, 224, 82, 86, 66, 21, 42, 235, 243, 204, 137, 226, 46, 12, 116, 161, 243, 48, 201, 170, 187, 179, 80, 147, 37, 111, 124, 108, 191, 182 ] ); } #[test] fn read_write() { let path = "read_write_test_file"; let ranpaman = Ranpaman::new("masterpass".to_string(), Some(path.to_string())); ranpaman.write_to_file().unwrap(); let decoded = Ranpaman::read_from_file("masterpass".to_string(), path).unwrap(); std::fs::remove_file(path).unwrap(); assert_eq!(ranpaman, decoded); } #[test] fn change_file_path() { let path = "change_file_path_test_file"; let ranpaman = Ranpaman::new("masterpass".to_string(), Some(path.to_string())); ranpaman.write_to_file().unwrap(); let mut decoded = Ranpaman::read_from_file("masterpass".to_string(), path).unwrap(); let new_path = "change_file_path_other_test_file"; decoded .change_file_path(Some(new_path.to_string())) .unwrap(); let mut decoded = Ranpaman::read_from_file("masterpass".to_string(), new_path).unwrap(); decoded.change_file_path(Some(path.to_string())).unwrap(); std::fs::remove_file(path).unwrap(); assert_eq!(ranpaman, decoded); } #[test] fn get_file_path() { let path = "get_file_path_test_file"; let ranpaman = Ranpaman::new("masterpass".to_string(), Some(path.to_string())); assert_eq!(ranpaman.get_file_path(), Some(&path.to_string())); } }
use num_bigint::BigUint; use num_traits::cast::{FromPrimitive, ToPrimitive}; use orion::aead; #[allow(dead_code)] use serde::{Deserialize, Serialize}; use std::collections::HashMap; use std::io::prelude::*; use blake2::{Blake2b, Digest}; use zeroize::Zeroize; const ENCRYPTION_SALT: [u8; 64] = [ 0xe3, 0x1a, 0x0c, 0x9b, 0x6b, 0x01, 0xbe, 0x19, 0xc5, 0x44, 0x7f, 0xb9, 0x2f, 0x79, 0x94, 0x91, 0xcf, 0xae, 0xb6, 0xda, 0x09, 0x0c, 0x24, 0xf3, 0x0f, 0xab, 0x2b, 0xf2, 0x4a, 0x1c, 0x39, 0xf7, 0xc1, 0xfc, 0xdc, 0x61, 0xc3, 0xf3, 0x15, 0xcf, 0x64, 0x76, 0x96, 0x25, 0xf9, 0xe6, 0xb1, 0x18, 0x62, 0xbd, 0x03, 0x6a, 0x67, 0x2d, 0xbb, 0x42, 0x1c, 0xbb, 0xb3, 0x24, 0x83, 0x5f, 0x7e, 0x53, ]; const MASTER_PASS_SALT: [u8; 64] = [ 0xa1, 0x48, 0x48, 0x5a, 0x76, 0x31, 0xe5, 0x45, 0x65, 0xf4, 0xde, 0xb0, 0xbb, 0x3a, 0x8f, 0xcc, 0xaa, 0x35, 0xff, 0x87, 0x7c, 0xd5, 0xcd, 0x4c, 0x4a, 0xbb, 0xbe, 0x21, 0x56, 0x5b, 0xe2, 0x7e, 0x60, 0x70, 0xd6, 0x5c, 0x0e, 0x3a, 0xa6, 0x02, 0xf9, 0xa1, 0xc9, 0x37, 0x88, 0x2a, 0xe0, 0xdc, 0x06, 0xcc, 0x25, 0xa6, 0x05, 0x8d, 0x75, 0x91, 0xc5, 0xdb, 0x0d, 0x90, 0xdb, 0xf3, 0x05, 0x8f, ]; type Result<T> = std::result::Result<T, Box<std::error::Error>>; #[derive(Serialize, Deserialize, Debug, PartialEq)] pub struct Ranpaman { master_password: Vec<u8>, encryption_key: Vec<u8>, file_path: Option<String>, data: HashMap<(String, String), Settings>, } #[derive(Serialize, Deserialize, Debug, PartialEq)] pub struct Settings { include_special_characters: bool, revision: u32, password_length: u32, } impl Default for Settings { fn default() -> Settings { Settings { include_special_characters: true, revision: 0, password_length: 30, } } } impl Drop for Ranpaman { fn drop(&mut self) { self.master_password.zeroize(); self.encryption_key.zeroize(); } } impl Ranpaman { pub fn new(mut master_password: String, file_path: Option<String>) -> Ranpaman { let config = argon2::Config::default(); let pw = argon2::hash_raw(&master_password.as_bytes(), &MASTER_PASS_SALT, &config).unwrap(); let key = argon2::hash_raw(&master_password.as_bytes(), &ENCRYPTION_SALT, &config).unwrap(); master_password.zeroize(); Ranpaman { master_password: pw, encryption_key: key, file_path, data: HashMap::new(), } }
pub fn get_password(&self, login: String, service_name: String) -> Result<String> { match self .data .get(&(service_name.to_string(), login.to_string())) { Some(settings) => { let salt: &[u8] = &[ login.as_bytes(), service_name.as_bytes(), &settings.revision.to_le_bytes(), ] .concat(); let argon_config = argon2::Config::default(); let hash = argon2::hash_raw(&self.master_password, salt, &argon_config).unwrap(); let char_sets = generate_character_sets(settings); return encode_password(&hash, char_sets, settings.password_length as usize); } None => { Ok(String::from("")) } } } pub fn change_file_path(&mut self, new_path: Option<String>) -> Result<()> { match new_path { None => { if let Some(old_path) = &self.file_path { std::fs::remove_file(old_path)?; self.file_path = None; } } Some(new_path) => { let mut new_file = std::fs::File::create(&new_path)?; if let Some(old_path) = &self.file_path { std::fs::remove_file(old_path)?; } self.file_path = Some(new_path); let encoded_self = bincode::serialize(&self).unwrap(); let encrypted_self = aead::seal( &aead::SecretKey::from_slice(&self.encryption_key).unwrap(), &encoded_self, ) .unwrap(); new_file.write(&encrypted_self)?; } } Ok(()) } pub fn write_to_file(&self) -> Result<()> { let encoded_self = bincode::serialize(&self).unwrap(); let encrypted_self = aead::seal( &aead::SecretKey::from_slice(&self.encryption_key).unwrap(), &encoded_self, ) .unwrap(); std::fs::write( self.file_path.as_ref().ok_or("No file path specified")?, encrypted_self, )?; Ok(()) } pub fn read_from_file(mut master_password: String, path: &str) -> Result<Ranpaman> { let read = std::fs::read(path)?; let config = argon2::Config::default(); let key = argon2::hash_raw(&master_password.as_bytes(), &ENCRYPTION_SALT, &config).unwrap(); master_password.zeroize(); let decrypted = aead::open(&aead::SecretKey::from_slice(&key).unwrap(), &read).unwrap(); Ok(bincode::deserialize(&decrypted)?) } pub fn get_file_path(&self) -> Option<&String> { self.file_path.as_ref() } } fn generate_character_sets(settings: &Settings) -> Vec<Vec<char>> { let mut char_sets = Vec::new(); char_sets.push((b'A'..=b'Z').map(char::from).collect()); char_sets.push((b'a'..=b'z').map(char::from).collect()); if settings.include_special_characters { char_sets.push(vec!['1', '2', '3', '4', '5', '6', '7', '8', '9']); char_sets.push(vec!['%', '&', '#', '$', '+', '-', '@']); } char_sets } fn encode_password( raw_password: &[u8], char_sets: Vec<Vec<char>>, length: usize, ) -> Result<String> { if char_sets.iter().any(|set| set.is_empty()) { } let mut entropy = BigUint::from_bytes_le(raw_password); let mut char_set_use_flags: Vec<bool> = char_sets.iter().map(|_| false).collect(); let set_length = char_sets.iter().map(|set| set.len()).sum(); let mut encoded_password = String::new(); while encoded_password.len() < length { if entropy < BigUint::from_usize(set_length).unwrap() { } let new_char: usize = (entropy.clone() % set_length).to_usize().unwrap(); entropy /= set_length; let mut collective_length = 0; for (index, set) in char_sets.iter().enumerate() { if new_char < set.len() + collective_length { encoded_password.push(set[new_char - collective_length]); char_set_use_flags[index] = true; break; } collective_length += set.len(); } } if char_set_use_flags.into_iter().all(|flag| flag){ return Ok(encoded_password); }else{ let mut hasher = Blake2b::new(); hasher.input(raw_password); return encode_password(&hasher.result(), char_sets, length); } } #[cfg(test)] mod tests { use super::*; #[test] fn service_password_generation() { let mut ranpaman = Ranpaman::new("masterpass".to_string(), None); let site = String::from("somesite.com"); let mail = String::from("someone@somemail.com"); let settings = Settings::default(); ranpaman .add_account(site.clone(), mail.clone(), settings) .unwrap(); let password = ranpaman.get_password(site, mail).unwrap(); assert_eq!("#DnLScQHt4zu%QDLqP$7VD535UjExb", password); } #[test] fn key_generation() { let ranpaman = Ranpaman::new("masterpass".to_string(), None); assert_eq!( ranpaman.master_password, [ 223, 108, 222, 141, 127, 89, 120, 143, 166, 127, 41, 255, 155, 5, 5, 195, 198, 186, 182, 18, 209, 221, 182, 64, 164, 34, 27, 230, 196, 48, 187, 237 ] ); assert_eq!( ranpaman.encryption_key, [ 110, 249, 117, 224, 82, 86, 66, 21, 42, 235, 243, 204, 137, 226, 46, 12, 116, 161, 243, 48, 201, 170, 187, 179, 80, 147, 37, 111, 124, 108, 191, 182 ] ); } #[test] fn read_write() { let path = "read_write_test_file"; let ranpaman = Ranpaman::new("masterpass".to_string(), Some(path.to_string())); ranpaman.write_to_file().unwrap(); let decoded = Ranpaman::read_from_file("masterpass".to_string(), path).unwrap(); std::fs::remove_file(path).unwrap(); assert_eq!(ranpaman, decoded); } #[test] fn change_file_path() { let path = "change_file_path_test_file"; let ranpaman = Ranpaman::new("masterpass".to_string(), Some(path.to_string())); ranpaman.write_to_file().unwrap(); let mut decoded = Ranpaman::read_from_file("masterpass".to_string(), path).unwrap(); let new_path = "change_file_path_other_test_file"; decoded .change_file_path(Some(new_path.to_string())) .unwrap(); let mut decoded = Ranpaman::read_from_file("masterpass".to_string(), new_path).unwrap(); decoded.change_file_path(Some(path.to_string())).unwrap(); std::fs::remove_file(path).unwrap(); assert_eq!(ranpaman, decoded); } #[test] fn get_file_path() { let path = "get_file_path_test_file"; let ranpaman = Ranpaman::new("masterpass".to_string(), Some(path.to_string())); assert_eq!(ranpaman.get_file_path(), Some(&path.to_string())); } }
pub fn add_account( &mut self, login: String, service_name: String, settings: Settings, ) -> Result<()> { if service_name.is_empty() || login.is_empty() || settings.password_length < 4 { } let key = (service_name, login); if self.data.contains_key(&key) { } else { self.data.insert(key, settings); } Ok(()) }
function_block-full_function
[ { "content": "# ranpaman-core\n\nA library for creating MasterPassword-style password managers. Written in Rust and uses Argon2.\n\n\n\nCurrently a proof of concept and obviously insecure.\n", "file_path": "README.md", "rank": 3, "score": 7357.13440566655 } ]
Rust
server/src/server/mod.rs
JAD3N/mc-server
ce1ad57f2417f722d6c00ae21fdc3cd0efdadd7a
mod settings; mod status; mod executor; pub use settings::*; pub use status::*; pub use executor::*; use crate::core::Registries; use crate::chat::component::TextComponent; use crate::world::level::Level; use crate::network::{Listener, Connection}; use std::collections::HashMap; use std::net::SocketAddr; use std::sync::Arc; use std::path::Path; use tokio::sync::Mutex; use tokio::runtime; use futures::future; use flume::{Sender, Receiver}; pub static VERSION_NAME: &str = "1.15.2"; pub static VERSION_STABLE: bool = true; pub static WORLD_VERSION: u32 = 2230; pub static PROTOCOL_VERSION: u32 = 578; pub static PACK_VERSION: u32 = 5; pub static RELEASE_TARGET: &str = "1.15.2"; pub enum ServerRequest { Connected(Connection), } pub struct ServerShared { pub registries: Arc<Registries>, pub settings: Arc<ServerSettings>, pub status: Arc<Mutex<ServerStatus>>, } pub struct Server { pub shared: Arc<ServerShared>, pub connections: Vec<Arc<Mutex<Connection>>>, pub levels: HashMap<String, Arc<Mutex<Level>>>, pub tx: Sender<ServerRequest>, pub rx: Receiver<ServerRequest>, } impl Server { pub fn get_level(&self, dimension: &String) -> Option<&Arc<Mutex<Level>>> { self.levels.get(dimension) } pub async fn tick(&mut self) -> anyhow::Result<()> { for request in self.rx.try_iter() { match request { ServerRequest::Connected(connection) => { if !connection.is_connected() { continue; } let connection = Arc::new(Mutex::new(connection)); self.connections.push(connection); }, } } let mut disconnected = vec![]; for (i, connection) in self.connections.iter().enumerate() { let mut connection = connection.lock().await; connection.tick(); if !connection.is_connected() { disconnected.push(i); } } for &i in disconnected.iter().rev() { self.connections.remove(i); } Ok(()) } } pub struct ServerContainer { pub server: Arc<Mutex<Server>>, pub shared: Arc<ServerShared>, } impl ServerContainer { pub fn new(registries: Registries, settings: ServerSettings) -> Self { let mut status = ServerStatus { description: TextComponent::new(settings.motd()).into(), version: ServerStatusVersion { name: String::from(VERSION_NAME), protocol: PROTOCOL_VERSION, }, players: ServerStatusPlayers { max_players: settings.max_players(), num_players: 0, sample: vec![], }, favicon: None, }; let favicon_path = Path::new("server-icon.png"); if !favicon_path.is_file() { } if favicon_path.is_file() { if let Err(e) = status.load_favicon(favicon_path) { error!("Couldn't load server icon: {}", e); } } let shared = Arc::new(ServerShared { registries: Arc::new(registries), settings: Arc::new(settings), status: Arc::new(Mutex::new(status)), }); let (tx, rx) = flume::unbounded(); let server = Arc::new(Mutex::new(Server { shared: shared.clone(), connections: vec![], levels: HashMap::new(), tx, rx, })); Self { server, shared } } async fn load_levels(&self) -> anyhow::Result<()> { info!("loading levels"); let mut server = self.server.lock().await; server.levels.insert( String::from("level_1"), Arc::new(Mutex::new(Level { name: String::from("Level 1"), server: self.server.clone(), })) ); info!("loaded levels"); Ok(()) } async fn bind(&self, addr: SocketAddr) -> anyhow::Result<Listener> { let shared = self.shared.clone(); let server = self.server.clone(); let server_tx = server.lock().await .tx.clone(); let listener = Listener::bind( server_tx, shared, addr, ).await?; Ok(listener) } async fn execute(&self) -> anyhow::Result<()> { let mut executor = ServerExecutor::new(self.server.clone()); loop { executor.execute().await?; executor.wait().await; } } pub fn start(&self) -> anyhow::Result<()> { let addr = self.shared.settings.addr().parse::<SocketAddr>()?; let mut network_rt = runtime::Builder::new() .thread_name("network") .core_threads(2) .threaded_scheduler() .enable_all() .build() .unwrap(); let mut server_rt = runtime::Builder::new() .thread_name("server") .threaded_scheduler() .enable_all() .build() .unwrap(); match network_rt.block_on(self.bind(addr)) { Err(e) => error!("Network error: {}", e), Ok(listener) => { let (load_levels, stop_handle_1) = future::abortable(self.load_levels()); let (execute, stop_handle_2) = future::abortable(self.execute()); ctrlc::set_handler(move || { stop_handle_1.abort(); stop_handle_2.abort(); }).ok(); if let Err(e) = server_rt.block_on(load_levels) { error!("Fatal error loading levels: {}", e); } else { network_rt.spawn(listener.listen()); server_rt.block_on(execute).ok(); drop(network_rt); } } }; info!("Server shutdown."); Ok(()) } }
mod settings; mod status; mod executor; pub use settings::*; pub use status::*; pub use executor::*; use crate::core::Registries; use crate::chat::component::TextComponent; use crate::world::level::Level; use crate::network::{Listener, Connection}; use std::collections::HashMap; use std::net::SocketAddr; use std::sync::Arc; use std::path::Path; use tokio::sync::Mutex; use tokio::runtime; use futures::future; use flume::{Sender, Receiver}; pub static VERSION_NAME: &str = "1.15.2"; pub static VERSION_STABLE: bool = true; pub static WORLD_VERSION: u32 = 2230; pub static PROTOCOL_VERSION: u32 = 578; pub static PACK_VERSION: u32 = 5; pub static RELEASE_TARGET: &str = "1.15.2"; pub enum ServerRequest { Connected(Connection), } pub struct ServerShared { pub registries: Arc<Registries>, pub settings: Arc<ServerSettings>, pub status: Arc<Mutex<ServerStatus>>, } pub struct Server { pub shared: Arc<ServerShared>, pub connections: Vec<Arc<Mutex<Connection>>>, pub levels: HashMap<String, Arc<Mutex<Level>>>, pub tx: Sender<ServerRequest>, pub rx: Receiver<ServerRequest>, } impl Server { pub fn get_level(&self, dimension: &String) -> Option<&Arc<Mutex<Level>>> { self.levels.get(dimension) } pub async fn tick(&mut self) -> anyhow::Result<()> { for request in self.rx.try_iter() { match request { ServerRequest::Connected(connection) => { if !connection.is_connected() { continue; } let connection = Arc::new(Mutex::new(connection)); self.connections.push(connection); }, } } let mut disconnected = vec![]; for (i, connection) in self.connections.iter().enumerate() { let mut connection = connection.lock().await; connection.tick(); if !connection.is_connected() { disconnected.push(i); } } for &i in disconnected.iter().rev() { self.connections.remove(i); } Ok(()) } } pub struct ServerContainer { pub server: Arc<Mutex<Server>>, pub shared: Arc<ServerShared>, } impl ServerContainer { pub fn new(registries: Registries, settings: ServerSettings) -> Self { let mut status = ServerStatus { description: TextComponent::new(settings.motd()).into(), version: ServerStatusVersion { name: String::from(VERSION_NAME), protocol: PROTOCOL_VERSION, }, players: ServerStatusPlayers { max_players: settings.max_players(), num_players: 0, sample: vec![], }, favicon: None, }; let favicon_path = Path::new("server-icon.png"); if !favicon_path.is_file() { } if favicon_path.is_file() { if let Err(e) = status.load_favicon(favicon_path) { error!("Couldn't load server icon: {}", e); } } let shared = Arc::new(ServerShared { registries: Arc::new(registries), settings: Arc::new(settings), status: Arc::new(Mutex::new(status)), }); let (tx, rx) = flume::unbounded(); let server = Arc::new(Mutex::new(Server { shared: shared.clone(), connections: vec![], levels: HashMap::new(), tx, rx, })); Self { server, shared } } async fn load_levels(&self) -> anyhow::Result<()> { info!("loading levels"); let mut server = self.server.lock().await; server.levels.insert( String::from("level_1"), Arc::new(Mutex::new(Level { name: String::from("Level 1"), server: self.server.clon
async fn bind(&self, addr: SocketAddr) -> anyhow::Result<Listener> { let shared = self.shared.clone(); let server = self.server.clone(); let server_tx = server.lock().await .tx.clone(); let listener = Listener::bind( server_tx, shared, addr, ).await?; Ok(listener) } async fn execute(&self) -> anyhow::Result<()> { let mut executor = ServerExecutor::new(self.server.clone()); loop { executor.execute().await?; executor.wait().await; } } pub fn start(&self) -> anyhow::Result<()> { let addr = self.shared.settings.addr().parse::<SocketAddr>()?; let mut network_rt = runtime::Builder::new() .thread_name("network") .core_threads(2) .threaded_scheduler() .enable_all() .build() .unwrap(); let mut server_rt = runtime::Builder::new() .thread_name("server") .threaded_scheduler() .enable_all() .build() .unwrap(); match network_rt.block_on(self.bind(addr)) { Err(e) => error!("Network error: {}", e), Ok(listener) => { let (load_levels, stop_handle_1) = future::abortable(self.load_levels()); let (execute, stop_handle_2) = future::abortable(self.execute()); ctrlc::set_handler(move || { stop_handle_1.abort(); stop_handle_2.abort(); }).ok(); if let Err(e) = server_rt.block_on(load_levels) { error!("Fatal error loading levels: {}", e); } else { network_rt.spawn(listener.listen()); server_rt.block_on(execute).ok(); drop(network_rt); } } }; info!("Server shutdown."); Ok(()) } }
e(), })) ); info!("loaded levels"); Ok(()) }
function_block-function_prefixed
[ { "content": "fn init_resource_registry<T: 'static>(name: &str) -> Arc<ResourceRegistry<T>> {\n\n let mut event = RegisterEvent(ResourceRegistry::new());\n\n\n\n // send event to all subscribers to add to registry\n\n dispatch_event!(\"main\", &mut event);\n\n\n\n // log completion\n\n info!(\"re...
Rust
sf/src/twin.rs
SetTheorist/rust-special-functions
a03ea7f07677a79dc8c80c468e90c6525b64bf96
use std::ops::{Add,Sub,Mul,Div,Rem}; use std::ops::{AddAssign,SubAssign,MulAssign,DivAssign,RemAssign}; use std::ops::{Neg}; #[derive(Clone,Copy,Debug,Default,PartialEq,PartialOrd)] pub struct Twin<F>{hi:F, lo:F} pub trait Base: Sized + Copy + Add<Output=Self> + Sub<Output=Self> + Mul<Output=Self> + Div<Output=Self> + Neg<Output=Self> + PartialOrd + PartialEq + Default { fn SPLIT() -> Self; fn mul_add(self, b:Self, c:Self) -> Self; fn HAS_MUL_ADD() -> bool; fn recip(self) -> Self; fn sqrt(self) -> Self; fn cbrt(self) -> Self; fn ceil(self) -> Self; fn floor(self) -> Self; fn round(self) -> Self; fn trunc(self) -> Self; fn abs(self) -> Self; fn ci(c:isize) -> Self; fn cf(c:f64) -> Self; fn to64(self) -> f64; fn epsilon() -> Self; } impl Base for f32 { #[inline] fn SPLIT() -> Self { 4097.0 } #[inline] fn mul_add(self, b:Self, c:Self) -> Self { self.mul_add(b, c) } #[inline] fn HAS_MUL_ADD() -> bool { true } #[inline] fn recip(self) -> Self { self.recip() } #[inline] fn sqrt(self) -> Self { self.sqrt() } #[inline] fn cbrt(self) -> Self { self.cbrt() } #[inline] fn ceil(self) -> Self { self.ceil() } #[inline] fn floor(self) -> Self { self.floor() } #[inline] fn round(self) -> Self { self.round() } #[inline] fn trunc(self) -> Self { self.trunc() } #[inline] fn abs(self) -> Self { self.abs() } #[inline] fn ci(c:isize) -> Self { c as f32 } #[inline] fn cf(c:f64) -> Self { c as f32 } #[inline] fn to64(self) -> f64 { self as f64 } #[inline] fn epsilon() -> Self { f32::EPSILON } } impl Base for f64 { #[inline] fn SPLIT() -> Self { 134217729.0 } #[inline] fn mul_add(self, b:Self, c:Self) -> Self { self.mul_add(b, c) } #[inline] fn HAS_MUL_ADD() -> bool { true } #[inline] fn recip(self) -> Self { self.recip() } #[inline] fn sqrt(self) -> Self { self.sqrt() } #[inline] fn cbrt(self) -> Self { self.cbrt() } #[inline] fn ceil(self) -> Self { self.ceil() } #[inline] fn floor(self) -> Self { self.floor() } #[inline] fn round(self) -> Self { self.round() } #[inline] fn trunc(self) -> Self { self.trunc() } #[inline] fn abs(self) -> Self { self.abs() } #[inline] fn ci(c:isize) -> Self { c as f64 } #[inline] fn cf(c:f64) -> Self { c as f64 } #[inline] fn to64(self) -> f64 { self } #[inline] fn epsilon() -> Self { f64::EPSILON } } impl<F:Base> Base for Twin<F> { #[inline] fn SPLIT() -> Twin<F> { Twin::new((F::SPLIT()-F::ci(1))*(F::SPLIT()-F::ci(1)), F::ci(1)) } #[inline] fn mul_add(self, b:Self, c:Self) -> Self { unimplemented!() } #[inline] fn HAS_MUL_ADD() -> bool { false } #[inline] fn recip(self) -> Self { self.recip() } #[inline] fn sqrt(self) -> Self { self.sqrt() } #[inline] fn cbrt(self) -> Self { self.cbrt() } #[inline] fn ceil(self) -> Self { self.ceil() } #[inline] fn floor(self) -> Self { self.floor() } #[inline] fn round(self) -> Self { self.round() } #[inline] fn trunc(self) -> Self { self.trunc() } #[inline] fn abs(self) -> Self { self.abs() } #[inline] fn ci(c:isize) -> Self { Twin::new(F::ci(c),F::default()) } #[inline] fn cf(c:f64) -> Self { Twin::new(F::cf(c),F::default()) } #[inline] fn to64(self) -> f64 { self.hi.to64() } #[inline] fn epsilon() -> Self { Twin{hi:F::epsilon(),lo:F::default()}*Twin{hi:F::epsilon(),lo:F::default()} } } use crate::f128::*; impl Base for f128 { #[inline] fn SPLIT() -> Self { f128::from_bits(0x4037_0000_0000_0000__0100_0000_0000_0000) } #[inline] fn mul_add(self, b:Self, c:Self) -> Self { unimplemented!() } #[inline] fn HAS_MUL_ADD() -> bool { false } #[inline] fn recip(self) -> Self { self.recip() } #[inline] fn sqrt(self) -> Self { self.sqrt() } #[inline] fn cbrt(self) -> Self { self.cbrt() } #[inline] fn ceil(self) -> Self { self.ceil() } #[inline] fn floor(self) -> Self { self.floor() } #[inline] fn round(self) -> Self { self.round() } #[inline] fn trunc(self) -> Self { self.trunc() } #[inline] fn abs(self) -> Self { self.abs() } #[inline] fn ci(c:isize) -> Self { f128::from(c) } #[inline] fn cf(c:f64) -> Self { f128::from(c) } #[inline] fn to64(self) -> f64 { f64::from(self) } #[inline] fn epsilon() -> Self { f128::from(f64::EPSILON).sqr() } } #[inline] fn qtsum<F:Base>(a:F, b:F) -> (F, F) { let s = a + b; let e = b + (a - s); (s, e) } #[inline] fn ddsum<F:Base>(a:F, b:F) -> (F, F) { let s = a + b; let v = s - a; let e = (a + (v - s)) + (b - v); (s, e) } #[inline] fn split<F:Base>(a:F) -> (F, F) { let t = F::SPLIT() * a; let ahi = t - (t - a); let alo = a - ahi; (ahi, alo) } #[inline] fn ddprod<F:Base>(a:F, b:F) -> (F, F) { if F::HAS_MUL_ADD() { let p = a * b; let e = a.mul_add(b, -p); (p, e) } else { let (ahi, alo) = split(a); let (bhi, blo) = split(b); let p = a * b; let e = (((ahi * bhi - p) + ahi * blo) + alo * bhi) + alo * blo; (p, e) } } #[inline] fn qdadd<F:Base>(Twin{hi:xhi, lo:xlo}:Twin<F>, y:F) -> Twin<F> { let (shi, slo) = ddsum(y, xhi); let (hhi, hlo) = qtsum(shi, slo + xlo); let (hi, lo) = qtsum(hhi, hlo); Twin{hi, lo} } #[inline] fn dqadd<F:Base>(x:F, y:Twin<F>) -> Twin<F> { qdadd(y, x) } #[inline] fn qqadd<F:Base>(Twin{hi:xhi, lo:xlo}:Twin<F>, Twin{hi:yhi, lo:ylo}:Twin<F>) -> Twin<F> { let (hs, he) = ddsum(xhi, yhi); let (ls, le) = ddsum(xlo, ylo); let (h, k) = qtsum(hs, he + ls); let (hi, lo) = qtsum(h, le + k); Twin{hi, lo} } #[inline] fn qnegate<F:Base>(Twin{hi, lo}:Twin<F>) -> Twin<F> { Twin{hi:-hi, lo:-lo} } #[inline] fn qdprod<F:Base>(Twin{hi:xhi, lo:xlo}: Twin<F>, y:F) -> Twin<F> { let (thi, tlo) = ddprod(xhi, y); let (hi, lo) = qtsum(thi, tlo + y * xlo); Twin{hi, lo} } #[inline] fn dqprod<F:Base>(x:F, y: Twin<F>) -> Twin<F> { qdprod(y, x) } #[inline] fn qqprod<F:Base>(Twin{hi:xhi, lo:xlo}:Twin<F>, Twin{hi:yhi, lo:ylo}:Twin<F>) -> Twin<F> { let (p, e) = ddprod(xhi, yhi); let (hi, lo) = qtsum(p, e + (xhi * ylo + xlo * yhi)); Twin{hi, lo} } #[inline] fn qqdivide<F:Base>(Twin{hi:xhi, lo:xlo}:Twin<F>, Twin{hi:yhi, lo:ylo}:Twin<F>) -> Twin<F> { let cc = xhi / yhi; let (uu, u) = ddprod(cc, yhi); let c = ((((xhi - uu) - u) + xlo) - cc * ylo) / yhi; let (hi, lo) = qtsum(cc, c); Twin{hi, lo} } #[inline] fn dqdivide<F:Base>(x:F, Twin{hi:yhi, lo:ylo}:Twin<F>) -> Twin<F> { let cc = x / yhi; let (uu, u) = ddprod(cc, yhi); let c = (((x - uu) - u) - cc * ylo) / yhi; let (hi, lo) = qtsum(cc, c); Twin{hi, lo} } #[inline] fn qddivide<F:Base>(Twin{hi:xhi, lo:xlo}:Twin<F>, y:F) -> Twin<F> { let xdy = xhi / y; let (uu, u) = ddprod(xdy, y); let c = (((xhi - uu) - u) + xlo) / y; let (hi, lo) = qtsum(xdy, c); Twin{hi, lo} } impl<F:Base> Twin<F> { #[inline] pub fn new(a:F, b:F) -> Self { let (hi, lo) = ddsum(a, b); Twin{hi, lo} } #[inline] pub unsafe fn new_raw(a:F, b:F) -> Self { Twin{hi:a, lo:b} } #[inline] pub fn parts(Twin{hi, lo}: Self) -> (F, F) { (hi, lo) } #[inline] pub fn hi(self) -> F { self.hi } #[inline] pub fn lo(self) -> F { self.lo } #[inline] pub fn recip(self) -> Self { Self::new(F::ci(1),F::default()) / self } #[inline] pub fn sqr(self) -> Self { self*self } pub fn sqrt(self) -> Self { let q0 = self.hi.sqrt(); let x = Self::new(q0, F::default()); let x = (x+self/x)*F::cf(0.5); x } pub fn sqrt_recip(self) -> Self { let z = F::default(); let c3 = F::ci(3); let c1_2 = F::cf(0.5); let q0 = self.hi.sqrt().recip(); let x = Self::new(q0, z); let x = x*(-self*x.sqr() + c3)*c1_2; x } pub fn cbrt(self) -> Self { let z = F::default(); let c2 = F::ci(2); let c3 = F::ci(3); let q0 = self.hi.cbrt(); let x = Self::new(q0, z); let x = (x*c2 + self/x.sqr())/c3; x } pub fn cbrt_recip(self) -> Self { let z = F::default(); let c3 = F::ci(3); let c4 = F::ci(4); let q0 = self.hi.cbrt().recip(); let x = Self::new(q0, z); let x = x*(-self*x*x.sqr() + c4)/c3; let x = x*(-self*x*x.sqr() + c4)/c3; x } pub fn floor(self) -> Self { let xhi = self.hi.floor(); if self.hi == xhi { let xlo = self.lo.floor(); let (hi, lo) = qtsum(xhi, xlo); Twin{hi, lo} } else { Twin{hi:xhi, lo:F::default()} } } pub fn ceil(self) -> Self { let xhi = self.hi.ceil(); if self.hi == xhi { let xlo = self.lo.ceil(); let (hi, lo) = qtsum(xhi, xlo); Twin{hi, lo} } else { Twin{hi:xhi, lo:F::default()} } } pub fn round(self) -> Self { let xhi = self.hi.round(); if self.hi == xhi { let xlo = self.lo.round(); let (hi, lo) = qtsum(xhi, xlo); Twin{hi, lo} } else { if (xhi-self.hi).abs()==F::cf(0.5) && self.lo < F::default() { Twin{hi:xhi-F::ci(1), lo:F::default()} } else { Twin{hi:xhi, lo:F::default()} } } } pub fn trunc(self) -> Self { let xhi = self.hi.trunc(); if self.hi == xhi { let xlo = self.lo.trunc(); let (hi, lo) = qtsum(xhi, xlo); Twin{hi, lo} } else { Twin{hi:xhi, lo:F::default()} } } #[inline] pub fn abs(self) -> Self { if self < Self::default() { -self } else { self } } } impl<F:Base> Neg for Twin<F> { type Output = Self; fn neg(self) -> Self { qnegate(self) } } impl<F:Base> Add<Self> for Twin<F> { type Output = Self; fn add(self, y: Self) -> Self { qqadd(self, y) } } impl<F:Base> Sub<Self> for Twin<F> { type Output = Self; fn sub(self, y: Self) -> Self { qqadd(self, -y) } } impl<F:Base> Mul<Self> for Twin<F> { type Output = Self; fn mul(self, y: Self) -> Self { qqprod(self, y) } } impl<F:Base> Div<Self> for Twin<F> { type Output = Self; fn div(self, y: Self) -> Self { qqdivide(self, y) } } impl<F:Base> Add<F> for Twin<F> { type Output = Self; fn add(self, y:F) -> Self { qdadd(self, y) } } impl<F:Base> Sub<F> for Twin<F> { type Output = Self; fn sub(self, y:F) -> Self { qdadd(self, -y) } } impl<F:Base> Mul<F> for Twin<F> { type Output = Self; fn mul(self, y:F) -> Self { qdprod(self, y) } } impl<F:Base> Div<F> for Twin<F> { type Output = Self; fn div(self, y:F) -> Self { qddivide(self, y) } } /* // Rust restrictions block these (generic) implementation, sigh impl<F:Base> Add<Twin<F>> for F { type Output = Twin<F>; fn add(self, y:Twin<F>) -> Self { dqadd(self, y) } } impl<F:Base> Sub<Twin<F>> for F { type Output = Twin<F>; fn sub(self, y:Twin<F>) -> Self { dqadd(self, -y) } } impl<F:Base> Mul<Twin<F>> for F { type Output = Twin<F>; fn mul(self, y:Twin<F>) -> Self { dqprod(self, y) } } impl<F:Base> Div<Twin<F>> for F { type Output = Twin<F>; fn div(self, y:Twin<F>) -> Self { dqdivide(self, y) } } */ impl<F:Base> std::fmt::Display for Twin<F> { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { let ZERO = Self::default(); let ONE = Self::ci(1); let TEN = Self::ci(10); let mut z = *self; if z < ZERO { z = -z; write!(f, "-")?; } let mut e = 0; if z == ZERO { while z >= TEN { e += 1; z = z / TEN; } while z < ONE { e -= 1; z = z * TEN; } } let digs = ((Self::epsilon().to64().recip()*1.1).log10().ceil() as isize) + 4; println!("{}", digs); for n in 0..digs { if n == 1 { write!(f, ".")?; } let d = z.floor().to64(); if d<0.0 || d>=10.0 { eprintln!("<<{}>>", d); } let dd = ((d as u8) + b'0') as char; write!(f, "{}", dd)?; let d0 = Self::cf(d); z = (z - d0) * TEN; } if e != 0 { write!(f, "e{}", e)?; } write!(f, "") } }
use std::ops::{Add,Sub,Mul,Div,Rem}; use std::ops::{AddAssign,SubAssign,MulAssign,DivAssign,RemAssign}; use std::ops::{Neg}; #[derive(Clone,Copy,Debug,Default,PartialEq,PartialOrd)] pub struct Twin<F>{hi:F, lo:F} pub trait Base: Sized + Copy + Add<Output=Self> + Sub<Output=Self> + Mul<Output=Self> + Div<Output=Self> + Neg<Output=Self> + PartialOrd + PartialEq + Default { fn SPLIT() -> Self; fn mul_add(self, b:Self, c:Self) -> Self; fn HAS_MUL_ADD() -> bool; fn recip(self) -> Self; fn sqrt(self) -> Self; fn cbrt(self) -> Self; fn ceil(self) -> Self; fn floor(self) -> Self; fn round(self) -> Self; fn trunc(self) -> Self; fn abs(self) -> Self; fn ci(c:isize) -> Self; fn cf(c:f64) -> Self; fn to64(self) -> f64; fn epsilon() -> Self; } impl Base for f32 { #[inline] fn SPLIT() -> Self { 4097.0 } #[inline] fn mul_add(self, b:Self, c:Self) -> Self { self.mul_add(b, c) } #[inline] fn HAS_MUL_ADD() -> bool { true } #[inline] fn recip(self) -> Self { self.recip() } #[inline] fn sqrt(self) -> Self { self.sqrt() } #[inline] fn cbrt(self) -> Self { self.cbrt() } #[inline] fn ceil(self) -> Self { self.ceil() } #[inline] fn floor(self) -> Self { self.floor() } #[inline] fn round(self) -> Self { self.round() } #[inline] fn trunc(self) -> Self { self.trunc() } #[inline] fn abs(self) -> Self { self.abs() } #[inline] fn ci(c:isize) -> Self { c as f32 } #[inline] fn cf(c:f64) -> Self { c as f32 } #[inline] fn to64(self) -> f64 { self as f64 } #[inline] fn epsilon() -> Self { f32::EPSILON } } impl Base for f64 { #[inline] fn SPLIT() -> Self { 134217729.0 } #[inline] fn mul_add(self, b:Self, c:Self) -> Self { self.mul_add(b, c) } #[inline] fn HAS_MUL_ADD() -> bool { true } #[inline] fn recip(self) -> Self { self.recip() } #[inline] fn sqrt(self) -> Self { self.sqrt() } #[inline] fn cbrt(self) -> Self { self.cbrt() } #[inline] fn ceil(self) -> Self { self.ceil() } #[inline] fn floor(self) -> Self { self.floor() } #[inline] fn round(self) -> Self { self.round() } #[inline] fn trunc(self) -> Self { self.trunc() } #[inline] fn abs(self) -> Self { self.abs() } #[inline] fn ci(c:isize) -> Self { c as f64 } #[inline] fn cf(c:f64) -> Self { c as f64 } #[inline] fn to64(self) -> f64 { self } #[inline] fn epsilon() -> Self { f64::EPSILON } } impl<F:Base> Base for Twin<F> { #[inline] fn SPLIT() -> Twin<F> { Twin::new((F::SPLIT()-F::ci(1))*(F::SPLIT()-F::ci(1)), F::ci(1)) } #[inline] fn mul_add(self, b:Self, c:Self) -> Self { unimplemented!() } #[inline] fn HAS_MUL_ADD() -> bool { false } #[inline] fn recip(self) -> Self { self.recip() } #[inline] fn sqrt(self) -> Self { self.sqrt() } #[inline] fn cbrt(self) -> Self { self.cbrt() } #[inline] fn ceil(self) -> Self { self.ceil() } #[inline] fn floor(self) -> Self { self.floor() } #[inline] fn round(self) -> Self { self.round() } #[inline] fn trunc(self) -> Self { self.trunc() } #[inline] fn abs(self) -> Self { self.abs() } #[inline] fn ci(c:isize) -> Self { Twin::new(F::ci(c),F::default()) } #[inline] fn cf(c:f64) -> Self { Twin::new(F::cf(c),F::default()) } #[inline] fn to64(self) -> f64 { self.hi.to64() } #[inline] fn epsilon() -> Self { Twin{hi:F::epsilon(),lo:F::default()}*Twin{hi:F::epsilon(),lo:F::default()} } } use crate::f128::*; impl Base for f128 { #[inline] fn SPLIT() -> Self { f128::from_bits(0x4037_0000_0000_0000__0100_0000_0000_0000) } #[inline] fn mul_add(self, b:Self, c:Self) -> Self { unimplemented!() } #[inline] fn HAS_MUL_ADD() -> bool { false } #[inline] fn recip(self) -> Self { self.recip() } #[inline] fn sqrt(self) -> Self { self.sqrt() } #[inline] fn cbrt(self) -> Self { self.cbrt() } #[inline] fn ceil(self) -> Self { self.ceil() } #[inline] fn floor(self) -> Self { self.floor() } #[inline] fn round(self) -> Self { self.round() } #[inline] fn trunc(self) -> Self { self.trunc() } #[inline] fn abs(self) -> Self { self.abs() } #[inline] fn ci(c:isize) -> Self { f128::from(c) } #[inline] fn cf(c:f64) -> Self { f128::from(c) } #[inline] fn to64(self) -> f64 { f64::from(self) } #[inline] fn epsilon() -> Self { f128::from(f64::EPSILON).sqr() } } #[inline] fn qtsum<F:Base>(a:F, b:F) -> (F, F) { let s = a + b; let e = b + (a - s); (s, e) } #[inline] fn ddsum<F:Base>(a:F, b:F) -> (F, F) { let s = a + b; let v = s - a; let e = (a + (v - s)) + (b - v); (s, e) } #[inline] fn split<F:Base>(a:F) -> (F, F) { let t = F::SPLIT() * a; let ahi = t - (t - a); let alo = a - ahi; (ahi, alo) } #[inline] fn ddprod<F:Base>(a:F, b:F) -> (F, F) {
} #[inline] fn qdadd<F:Base>(Twin{hi:xhi, lo:xlo}:Twin<F>, y:F) -> Twin<F> { let (shi, slo) = ddsum(y, xhi); let (hhi, hlo) = qtsum(shi, slo + xlo); let (hi, lo) = qtsum(hhi, hlo); Twin{hi, lo} } #[inline] fn dqadd<F:Base>(x:F, y:Twin<F>) -> Twin<F> { qdadd(y, x) } #[inline] fn qqadd<F:Base>(Twin{hi:xhi, lo:xlo}:Twin<F>, Twin{hi:yhi, lo:ylo}:Twin<F>) -> Twin<F> { let (hs, he) = ddsum(xhi, yhi); let (ls, le) = ddsum(xlo, ylo); let (h, k) = qtsum(hs, he + ls); let (hi, lo) = qtsum(h, le + k); Twin{hi, lo} } #[inline] fn qnegate<F:Base>(Twin{hi, lo}:Twin<F>) -> Twin<F> { Twin{hi:-hi, lo:-lo} } #[inline] fn qdprod<F:Base>(Twin{hi:xhi, lo:xlo}: Twin<F>, y:F) -> Twin<F> { let (thi, tlo) = ddprod(xhi, y); let (hi, lo) = qtsum(thi, tlo + y * xlo); Twin{hi, lo} } #[inline] fn dqprod<F:Base>(x:F, y: Twin<F>) -> Twin<F> { qdprod(y, x) } #[inline] fn qqprod<F:Base>(Twin{hi:xhi, lo:xlo}:Twin<F>, Twin{hi:yhi, lo:ylo}:Twin<F>) -> Twin<F> { let (p, e) = ddprod(xhi, yhi); let (hi, lo) = qtsum(p, e + (xhi * ylo + xlo * yhi)); Twin{hi, lo} } #[inline] fn qqdivide<F:Base>(Twin{hi:xhi, lo:xlo}:Twin<F>, Twin{hi:yhi, lo:ylo}:Twin<F>) -> Twin<F> { let cc = xhi / yhi; let (uu, u) = ddprod(cc, yhi); let c = ((((xhi - uu) - u) + xlo) - cc * ylo) / yhi; let (hi, lo) = qtsum(cc, c); Twin{hi, lo} } #[inline] fn dqdivide<F:Base>(x:F, Twin{hi:yhi, lo:ylo}:Twin<F>) -> Twin<F> { let cc = x / yhi; let (uu, u) = ddprod(cc, yhi); let c = (((x - uu) - u) - cc * ylo) / yhi; let (hi, lo) = qtsum(cc, c); Twin{hi, lo} } #[inline] fn qddivide<F:Base>(Twin{hi:xhi, lo:xlo}:Twin<F>, y:F) -> Twin<F> { let xdy = xhi / y; let (uu, u) = ddprod(xdy, y); let c = (((xhi - uu) - u) + xlo) / y; let (hi, lo) = qtsum(xdy, c); Twin{hi, lo} } impl<F:Base> Twin<F> { #[inline] pub fn new(a:F, b:F) -> Self { let (hi, lo) = ddsum(a, b); Twin{hi, lo} } #[inline] pub unsafe fn new_raw(a:F, b:F) -> Self { Twin{hi:a, lo:b} } #[inline] pub fn parts(Twin{hi, lo}: Self) -> (F, F) { (hi, lo) } #[inline] pub fn hi(self) -> F { self.hi } #[inline] pub fn lo(self) -> F { self.lo } #[inline] pub fn recip(self) -> Self { Self::new(F::ci(1),F::default()) / self } #[inline] pub fn sqr(self) -> Self { self*self } pub fn sqrt(self) -> Self { let q0 = self.hi.sqrt(); let x = Self::new(q0, F::default()); let x = (x+self/x)*F::cf(0.5); x } pub fn sqrt_recip(self) -> Self { let z = F::default(); let c3 = F::ci(3); let c1_2 = F::cf(0.5); let q0 = self.hi.sqrt().recip(); let x = Self::new(q0, z); let x = x*(-self*x.sqr() + c3)*c1_2; x } pub fn cbrt(self) -> Self { let z = F::default(); let c2 = F::ci(2); let c3 = F::ci(3); let q0 = self.hi.cbrt(); let x = Self::new(q0, z); let x = (x*c2 + self/x.sqr())/c3; x } pub fn cbrt_recip(self) -> Self { let z = F::default(); let c3 = F::ci(3); let c4 = F::ci(4); let q0 = self.hi.cbrt().recip(); let x = Self::new(q0, z); let x = x*(-self*x*x.sqr() + c4)/c3; let x = x*(-self*x*x.sqr() + c4)/c3; x } pub fn floor(self) -> Self { let xhi = self.hi.floor(); if self.hi == xhi { let xlo = self.lo.floor(); let (hi, lo) = qtsum(xhi, xlo); Twin{hi, lo} } else { Twin{hi:xhi, lo:F::default()} } } pub fn ceil(self) -> Self { let xhi = self.hi.ceil(); if self.hi == xhi { let xlo = self.lo.ceil(); let (hi, lo) = qtsum(xhi, xlo); Twin{hi, lo} } else { Twin{hi:xhi, lo:F::default()} } } pub fn round(self) -> Self { let xhi = self.hi.round(); if self.hi == xhi { let xlo = self.lo.round(); let (hi, lo) = qtsum(xhi, xlo); Twin{hi, lo} } else { if (xhi-self.hi).abs()==F::cf(0.5) && self.lo < F::default() { Twin{hi:xhi-F::ci(1), lo:F::default()} } else { Twin{hi:xhi, lo:F::default()} } } } pub fn trunc(self) -> Self { let xhi = self.hi.trunc(); if self.hi == xhi { let xlo = self.lo.trunc(); let (hi, lo) = qtsum(xhi, xlo); Twin{hi, lo} } else { Twin{hi:xhi, lo:F::default()} } } #[inline] pub fn abs(self) -> Self { if self < Self::default() { -self } else { self } } } impl<F:Base> Neg for Twin<F> { type Output = Self; fn neg(self) -> Self { qnegate(self) } } impl<F:Base> Add<Self> for Twin<F> { type Output = Self; fn add(self, y: Self) -> Self { qqadd(self, y) } } impl<F:Base> Sub<Self> for Twin<F> { type Output = Self; fn sub(self, y: Self) -> Self { qqadd(self, -y) } } impl<F:Base> Mul<Self> for Twin<F> { type Output = Self; fn mul(self, y: Self) -> Self { qqprod(self, y) } } impl<F:Base> Div<Self> for Twin<F> { type Output = Self; fn div(self, y: Self) -> Self { qqdivide(self, y) } } impl<F:Base> Add<F> for Twin<F> { type Output = Self; fn add(self, y:F) -> Self { qdadd(self, y) } } impl<F:Base> Sub<F> for Twin<F> { type Output = Self; fn sub(self, y:F) -> Self { qdadd(self, -y) } } impl<F:Base> Mul<F> for Twin<F> { type Output = Self; fn mul(self, y:F) -> Self { qdprod(self, y) } } impl<F:Base> Div<F> for Twin<F> { type Output = Self; fn div(self, y:F) -> Self { qddivide(self, y) } } /* // Rust restrictions block these (generic) implementation, sigh impl<F:Base> Add<Twin<F>> for F { type Output = Twin<F>; fn add(self, y:Twin<F>) -> Self { dqadd(self, y) } } impl<F:Base> Sub<Twin<F>> for F { type Output = Twin<F>; fn sub(self, y:Twin<F>) -> Self { dqadd(self, -y) } } impl<F:Base> Mul<Twin<F>> for F { type Output = Twin<F>; fn mul(self, y:Twin<F>) -> Self { dqprod(self, y) } } impl<F:Base> Div<Twin<F>> for F { type Output = Twin<F>; fn div(self, y:Twin<F>) -> Self { dqdivide(self, y) } } */ impl<F:Base> std::fmt::Display for Twin<F> { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { let ZERO = Self::default(); let ONE = Self::ci(1); let TEN = Self::ci(10); let mut z = *self; if z < ZERO { z = -z; write!(f, "-")?; } let mut e = 0; if z == ZERO { while z >= TEN { e += 1; z = z / TEN; } while z < ONE { e -= 1; z = z * TEN; } } let digs = ((Self::epsilon().to64().recip()*1.1).log10().ceil() as isize) + 4; println!("{}", digs); for n in 0..digs { if n == 1 { write!(f, ".")?; } let d = z.floor().to64(); if d<0.0 || d>=10.0 { eprintln!("<<{}>>", d); } let dd = ((d as u8) + b'0') as char; write!(f, "{}", dd)?; let d0 = Self::cf(d); z = (z - d0) * TEN; } if e != 0 { write!(f, "e{}", e)?; } write!(f, "") } }
if F::HAS_MUL_ADD() { let p = a * b; let e = a.mul_add(b, -p); (p, e) } else { let (ahi, alo) = split(a); let (bhi, blo) = split(b); let p = a * b; let e = (((ahi * bhi - p) + ahi * blo) + alo * bhi) + alo * blo; (p, e) }
if_condition
[ { "content": "pub trait Ordered: Base + PartialOrd<Self> {\n\n #[inline]\n\n fn min(self, b: Self) -> Self {\n\n if self < b { self } else { b }\n\n }\n\n #[inline]\n\n fn max(self, b: Self) -> Self {\n\n if self > b { self } else { b }\n\n }\n\n\n\n fn floor(self) -> Self;\n\n fn ceil(self) -> Se...
Rust
operators/src/processing/circle_merging_quadtree/grid.rs
koerberm/geoengine
61e0ec7a0c1136b4360b0f9c6306c34198e8ac3a
use geoengine_datatypes::primitives::{AxisAlignedRectangle, BoundingBox2D, Coordinate2D}; use super::{ circle_of_points::CircleOfPoints, circle_radius_model::CircleRadiusModel, hash_map::SeparateChainingHashMap, }; #[derive(Clone, Debug)] pub struct Grid<C: CircleRadiusModel> { offset: Coordinate2D, cell_width: f64, number_of_horizontal_cells: usize, cells: SeparateChainingHashMap<u16, CircleOfPoints>, radius_model: C, } impl<C: CircleRadiusModel> Grid<C> { pub fn new(bbox: BoundingBox2D, radius_model: C) -> Self { let cell_width = (2. * radius_model.min_radius() + radius_model.delta()) / std::f64::consts::SQRT_2; let map_width = bbox.size_x(); let map_height = bbox.size_y(); let mut number_of_horizontal_cells = (map_width / cell_width).ceil() as usize; let number_of_vertical_cells = (map_height / cell_width).ceil() as usize; if (number_of_horizontal_cells * number_of_vertical_cells) > 256 * 256 { number_of_horizontal_cells = number_of_horizontal_cells.max(256); } let offset_x = (bbox.lower_left().x / cell_width).floor() * cell_width; let offset_y = (bbox.lower_left().y / cell_width).floor() * cell_width; Self { offset: Coordinate2D { x: offset_x, y: offset_y, }, cell_width, number_of_horizontal_cells, cells: SeparateChainingHashMap::new(), radius_model, } } pub fn insert(&mut self, circle_of_points: CircleOfPoints) { let grid_x = ((circle_of_points.circle.x() - self.offset.x) / self.cell_width) as usize; let grid_y = ((circle_of_points.circle.y() - self.offset.x) / self.cell_width) as usize; let grid_pos = grid_y * self.number_of_horizontal_cells + grid_x; match self.cells.entry(grid_pos as u16) { super::hash_map::ValueRef::Vacant(entry_pos) => { self.cells.insert_unchecked(entry_pos, circle_of_points); } super::hash_map::ValueRef::Occupied(matched_circle_of_points) => { matched_circle_of_points.merge(&circle_of_points, &self.radius_model); } } } pub fn drain(self) -> impl Iterator<Item = CircleOfPoints> { self.cells.into_iter() } pub fn radius_model(&self) -> &C { &self.radius_model } } #[cfg(test)] mod tests { use geoengine_datatypes::primitives::{Circle, TimeInterval}; use crate::processing::circle_merging_quadtree::circle_radius_model::LogScaledRadius; use super::*; #[test] fn test_grid() { let mut grid = Grid::new( BoundingBox2D::new((0., 0.).into(), (10., 10.).into()).unwrap(), LogScaledRadius::new(2., 1.).unwrap(), ); grid.insert(CircleOfPoints::new_with_one_point( Circle::new(1., 1., 1.), TimeInterval::default(), Default::default(), )); grid.insert(CircleOfPoints::new_with_one_point( Circle::new(2., 1., 1.), TimeInterval::default(), Default::default(), )); grid.insert(CircleOfPoints::new_with_one_point( Circle::new(6., 6., 1.), TimeInterval::default(), Default::default(), )); assert_eq!( grid.drain().collect::<Vec<_>>(), vec![ CircleOfPoints::new( Circle::new(1.5, 1., 2.693_147_180_559_945_4), 2, TimeInterval::default(), Default::default(), ) .unwrap(), CircleOfPoints::new_with_one_point( Circle::new(6., 6., 1.), TimeInterval::default(), Default::default(), ), ] ); } }
use geoengine_datatypes::primitives::{AxisAlignedRectangle, BoundingBox2D, Coordinate2D}; use super::{ circle_of_points::CircleOfPoints, circle_radius_model::CircleRadiusModel, hash_map::SeparateChainingHashMap, }; #[derive(Clone, Debug)] pub struct Grid<C: CircleRadiusModel> { offset: Coordinate2D, cell_width: f64, number_of_horizontal_cells: usize, cells: SeparateChainingHashMap<u16, CircleOfPoints>, radius_model: C, } impl<C: CircleRadiusModel> Grid<C> { pub fn new(bbox: BoundingBox2D, radius_model: C) -> Self { let cell_width = (2. * radius_model.min_radius() + radius_model.delta()) / std::f64::consts::SQRT_2; let map_width = bbox.size_x(); let map_height = bbox.size_y(); let mut number_of_horizontal_cells = (map_width / cell_width).ceil() as usize; let number_of_vertical_cells = (map_height / cell_width).ceil() as usize; if (number_of_horizontal_cells * number_of_vertical_cells) > 256 * 256 { number_of_horizontal_cells = number_of_horizontal_cells.max(256); } let offset_x = (bbox.lower_left().x / cell_width).floor() * cell_width; let offset_y = (bbox.lower_left().y / cell_width).floor() * cell_width; Self { offset: Coordinate2D { x: offset_x, y: offset_y, }, cell_width, number_of_horizontal_cells, cells: SeparateChainingHashMap::new(), radius_model, } } pub fn insert(&mut self, circle_of_points: CircleOfPoints) { let grid_x = ((circle_of_points.circle.x() - self.offset.x) / self.cell_width) as usize; let grid_y = ((circle_of_points.circle.y() - self.offset.x) / self.cell_width) as usize; let grid_pos = grid_y * self.number_of_horizontal_cells + grid_x; match self.cells.entry(grid_pos as u16) { super::hash_map::ValueRef::Vacant(entry_pos) => { self.cells.insert_unchecked(entry_pos, circle_of_points); } super::hash_map::ValueRef::Occupied(matched_circle_of_points) => { matched_circle_of_points.merge(&circle_of_points, &self.radius_model); } } } pub fn drain(self) -> impl Iterator<Item = CircleOfPoints> { self.cells.into_iter() } pub fn radius_model(&self) -> &C { &self.radius_model } } #[cfg(test)] mod tests { use geoengine_datatypes::primitives::{Circle, TimeInterval}; use crate::processing::circle_merging_quadtree::circle_radius_model::LogScaledRadius; use super::*; #[test]
}
fn test_grid() { let mut grid = Grid::new( BoundingBox2D::new((0., 0.).into(), (10., 10.).into()).unwrap(), LogScaledRadius::new(2., 1.).unwrap(), ); grid.insert(CircleOfPoints::new_with_one_point( Circle::new(1., 1., 1.), TimeInterval::default(), Default::default(), )); grid.insert(CircleOfPoints::new_with_one_point( Circle::new(2., 1., 1.), TimeInterval::default(), Default::default(), )); grid.insert(CircleOfPoints::new_with_one_point( Circle::new(6., 6., 1.), TimeInterval::default(), Default::default(), )); assert_eq!( grid.drain().collect::<Vec<_>>(), vec![ CircleOfPoints::new( Circle::new(1.5, 1., 2.693_147_180_559_945_4), 2, TimeInterval::default(), Default::default(), ) .unwrap(), CircleOfPoints::new_with_one_point( Circle::new(6., 6., 1.), TimeInterval::default(), Default::default(), ), ] ); }
function_block-full_function
[ { "content": "pub fn fn_stream() -> impl Stream<Item = usize> {\n\n let mut counter: usize = 2;\n\n\n\n stream::poll_fn(move |_| -> Poll<Option<usize>> {\n\n if counter == 0 {\n\n return Poll::Ready(None);\n\n }\n\n counter -= 1;\n\n Poll::Ready(Some(counter))\n\n ...
Rust
daemon/src/rest_api/mod.rs
peterschwarz/grid
1b3859f74faa777fe7b72edb6ae4e7d80ba5c753
pub mod error; mod routes; use std::sync::mpsc; use std::thread; use crate::database::ConnectionPool; pub use crate::rest_api::error::RestApiServerError; use crate::rest_api::routes::DbExecutor; use crate::rest_api::routes::{ fetch_agent, fetch_grid_schema, fetch_organization, fetch_product, fetch_record, fetch_record_property, get_batch_statuses, list_agents, list_grid_schemas, list_organizations, list_products, list_records, submit_batches, }; use crate::submitter::BatchSubmitter; use actix::{Addr, SyncArbiter}; use actix_web::{web, App, HttpServer, Result}; use futures::Future; const SYNC_ARBITER_THREAD_COUNT: usize = 2; #[derive(Clone)] pub struct AppState { batch_submitter: Box<dyn BatchSubmitter + 'static>, database_connection: Addr<DbExecutor>, } impl AppState { pub fn new( batch_submitter: Box<dyn BatchSubmitter + 'static>, connection_pool: ConnectionPool, ) -> Self { let database_connection = SyncArbiter::start(SYNC_ARBITER_THREAD_COUNT, move || { DbExecutor::new(connection_pool.clone()) }); AppState { batch_submitter, database_connection, } } } pub struct RestApiShutdownHandle { do_shutdown: Box<dyn Fn() -> Result<(), RestApiServerError> + Send>, } impl RestApiShutdownHandle { pub fn shutdown(&self) -> Result<(), RestApiServerError> { (*self.do_shutdown)() } } pub fn run( bind_url: &str, database_connection: ConnectionPool, batch_submitter: Box<dyn BatchSubmitter + 'static>, ) -> Result< ( RestApiShutdownHandle, thread::JoinHandle<Result<(), RestApiServerError>>, ), RestApiServerError, > { let bind_url = bind_url.to_owned(); let (tx, rx) = mpsc::channel(); let join_handle = thread::Builder::new() .name("GridRestApi".into()) .spawn(move || { let sys = actix::System::new("Grid-Rest-API"); let state = AppState::new(batch_submitter, database_connection); let addr = HttpServer::new(move || { App::new() .data(state.clone()) .service(web::resource("batches").route(web::post().to_async(submit_batches))) .service( web::resource("/batch_statuses") .route(web::get().to_async(get_batch_statuses)), ) .service( web::scope("/agent") .service(web::resource("").route(web::get().to_async(list_agents))) .service( web::resource("/{public_key}") .route(web::get().to_async(fetch_agent)), ), ) .service( web::scope("/organization") .service( web::resource("").route(web::get().to_async(list_organizations)), ) .service( web::resource("/{id}") .route(web::get().to_async(fetch_organization)), ), ) .service( web::scope("/product") .service(web::resource("").route(web::get().to_async(list_products))) .service( web::resource("/{id}").route(web::get().to_async(fetch_product)), ), ) .service( web::scope("/schema") .service( web::resource("").route(web::get().to_async(list_grid_schemas)), ) .service( web::resource("/{name}") .route(web::get().to_async(fetch_grid_schema)), ), ) .service( web::scope("/record") .service(web::resource("").route(web::get().to_async(list_records))) .service( web::scope("/{record_id}") .service( web::resource("").route(web::get().to_async(fetch_record)), ) .service( web::resource("/property/{property_name}") .route(web::get().to_async(fetch_record_property)), ), ), ) }) .bind(bind_url)? .disable_signals() .system_exit() .start(); tx.send(addr).map_err(|err| { RestApiServerError::StartUpError(format!("Unable to send Server Addr: {}", err)) })?; sys.run()?; info!("Rest API terminating"); Ok(()) })?; let addr = rx.recv().map_err(|err| { RestApiServerError::StartUpError(format!("Unable to receive Server Addr: {}", err)) })?; let do_shutdown = Box::new(move || { debug!("Shutting down Rest API"); if let Err(err) = addr.stop(true).wait() { error!("Failed to shutdown rest api cleanly: {:?}", err); } debug!("Graceful signal sent to Rest API"); Ok(()) }); Ok((RestApiShutdownHandle { do_shutdown }, join_handle)) }
pub mod error; mod routes; use std::sync::mpsc; use std::thread; use crate::database::ConnectionPool; pub use crate::rest_api::error::RestApiServerError; use crate::rest_api::routes::DbExecutor; use crate::rest_api::routes::{ fetch_agent, fetch_grid_schema, fetch_organization, fetch_product, fetch_record, fetch_record_property, get_batch_statuses, list_agents, list_grid_schemas, list_organizations, list_products, list_records, submit_batches, }; use crate::submitter::BatchSubmitter; use actix::{Addr, SyncArbiter}; use actix_web::{web, App, HttpServer, Result}; use futures::Future; const SYNC_ARBITER_THREAD_COUNT: usize = 2; #[derive(Clone)] pub struct AppState { batch_submitter: Box<dyn BatchSubmitter + 'static>, database_connection: Addr<DbExecutor>, } impl AppState { pub fn new( batch_submitter: Box<dyn BatchSubmitter + 'static>, connection_pool: ConnectionPool, ) -> Self {
AppState { batch_submitter, database_connection, } } } pub struct RestApiShutdownHandle { do_shutdown: Box<dyn Fn() -> Result<(), RestApiServerError> + Send>, } impl RestApiShutdownHandle { pub fn shutdown(&self) -> Result<(), RestApiServerError> { (*self.do_shutdown)() } } pub fn run( bind_url: &str, database_connection: ConnectionPool, batch_submitter: Box<dyn BatchSubmitter + 'static>, ) -> Result< ( RestApiShutdownHandle, thread::JoinHandle<Result<(), RestApiServerError>>, ), RestApiServerError, > { let bind_url = bind_url.to_owned(); let (tx, rx) = mpsc::channel(); let join_handle = thread::Builder::new() .name("GridRestApi".into()) .spawn(move || { let sys = actix::System::new("Grid-Rest-API"); let state = AppState::new(batch_submitter, database_connection); let addr = HttpServer::new(move || { App::new() .data(state.clone()) .service(web::resource("batches").route(web::post().to_async(submit_batches))) .service( web::resource("/batch_statuses") .route(web::get().to_async(get_batch_statuses)), ) .service( web::scope("/agent") .service(web::resource("").route(web::get().to_async(list_agents))) .service( web::resource("/{public_key}") .route(web::get().to_async(fetch_agent)), ), ) .service( web::scope("/organization") .service( web::resource("").route(web::get().to_async(list_organizations)), ) .service( web::resource("/{id}") .route(web::get().to_async(fetch_organization)), ), ) .service( web::scope("/product") .service(web::resource("").route(web::get().to_async(list_products))) .service( web::resource("/{id}").route(web::get().to_async(fetch_product)), ), ) .service( web::scope("/schema") .service( web::resource("").route(web::get().to_async(list_grid_schemas)), ) .service( web::resource("/{name}") .route(web::get().to_async(fetch_grid_schema)), ), ) .service( web::scope("/record") .service(web::resource("").route(web::get().to_async(list_records))) .service( web::scope("/{record_id}") .service( web::resource("").route(web::get().to_async(fetch_record)), ) .service( web::resource("/property/{property_name}") .route(web::get().to_async(fetch_record_property)), ), ), ) }) .bind(bind_url)? .disable_signals() .system_exit() .start(); tx.send(addr).map_err(|err| { RestApiServerError::StartUpError(format!("Unable to send Server Addr: {}", err)) })?; sys.run()?; info!("Rest API terminating"); Ok(()) })?; let addr = rx.recv().map_err(|err| { RestApiServerError::StartUpError(format!("Unable to receive Server Addr: {}", err)) })?; let do_shutdown = Box::new(move || { debug!("Shutting down Rest API"); if let Err(err) = addr.stop(true).wait() { error!("Failed to shutdown rest api cleanly: {:?}", err); } debug!("Graceful signal sent to Rest API"); Ok(()) }); Ok((RestApiShutdownHandle { do_shutdown }, join_handle)) }
let database_connection = SyncArbiter::start(SYNC_ARBITER_THREAD_COUNT, move || { DbExecutor::new(connection_pool.clone()) });
assignment_statement
[ { "content": "pub fn create_connection_pool(database_url: &str) -> Result<ConnectionPool, DatabaseError> {\n\n let connection_manager = ConnectionManager::<PgConnection>::new(database_url);\n\n Ok(ConnectionPool {\n\n pool: Pool::builder()\n\n .build(connection_manager)\n\n .m...
Rust
metrics/src/recorder.rs
kevyang/rpc-perf
30cad3701837cab25c156d7d76e8df10e10d23da
use crate::*; use datastructures::HistogramConfig; use datastructures::RwWrapper; use std::collections::HashSet; use std::sync::Arc; use datastructures::Wrapper; use evmap::{ReadHandle, WriteHandle}; use std::collections::HashMap; #[derive(Clone)] pub struct Recorder { data_read: ReadHandle<String, Arc<Channel>>, data_write: Wrapper<WriteHandle<String, Arc<Channel>>>, labels: RwWrapper<HashSet<String>>, } impl Recorder { pub fn new() -> Self { let (read, write) = evmap::new(); Self { data_read: read, data_write: Wrapper::new(write), labels: RwWrapper::new(HashSet::new()), } } pub fn record(&self, channel: String, measurement: Measurement) { self.data_read .get_and(&channel, |channel| (*channel)[0].record(measurement)); } pub fn counter(&self, channel: String) -> usize { self.data_read .get_and(&channel, |channel| (*channel)[0].counter()) .unwrap_or(0) } pub fn percentile(&self, channel: String, percentile: f64) -> Option<usize> { self.data_read .get_and(&channel, |channel| (*channel)[0].percentile(percentile)) .unwrap_or(None) } pub fn add_channel( &self, name: String, source: Source, histogram_config: Option<HistogramConfig>, ) { debug!("add channel: {} source: {:?}", name, source); let channel = Channel::new(name.clone(), source, histogram_config); if self .data_read .get_and(&name, |channel| channel.len()) .unwrap_or(0) == 0 { unsafe { (*self.data_write.get()).insert(name.clone(), Arc::new(channel)); (*self.data_write.get()).refresh(); (*self.labels.lock()).insert(name); } } } pub fn delete_channel(&self, name: String) { debug!("delete channel: {}", name); unsafe { (*self.data_write.get()).empty(name.clone()); (*self.data_write.get()).refresh(); (*self.labels.lock()).remove(&name); } } pub fn readings(&self) -> Vec<Reading> { let mut result = Vec::new(); unsafe { for label in &*self.labels.get() { let readings = self .data_read .get_and(label, |channel| (*channel)[0].readings()); if let Some(readings) = readings { result.extend(readings); } } } result } pub fn hash_map(&self) -> HashMap<String, HashMap<Output, usize>> { let mut result = HashMap::new(); unsafe { for label in &*self.labels.get() { let readings = self .data_read .get_and(label, |channel| (*channel)[0].hash_map()); if let Some(readings) = readings { result.insert(label.to_owned(), readings); } } } result } #[cfg(feature = "waterfall")] pub fn save_files(&self) { unsafe { for label in &*self.labels.get() { let readings = self .data_read .get_and(label, |channel| (*channel)[0].save_files()); } } } pub fn add_output(&self, name: String, output: Output) { self.data_read .get_and(&name, |channel| (*channel)[0].add_output(output)); } pub fn delete_output(&self, name: String, output: Output) { self.data_read .get_and(&name, |channel| (*channel)[0].delete_output(output)); } pub fn latch(&self) { unsafe { for label in &*self.labels.get() { self.data_read .get_and(label, |channel| (*channel)[0].latch()); } } } pub fn clear(&self) { unsafe { for label in &*self.labels.get() { self.data_read .get_and(label, |channel| (*channel)[0].clear()); } } } } impl Default for Recorder { fn default() -> Self { Self::new() } }
use crate::*; use datastructures::HistogramConfig; use datastructures::RwWrapper; use std::collections::HashSet; use std::sync::Arc; use datastructures::Wrapper; use evmap::{ReadHandle, WriteHandle}; use std::collections::HashMap; #[derive(Clone)] pub struct Recorder { data_read: ReadHandle<String, Arc<Channel>>, data_write: Wrapper<WriteHandle<String, Arc<Channel>>>, labels: RwWrapper<HashSet<String>>, } impl Recorder { pub fn new() -> Self { let (read, write) = evmap::new(); Self { data_read: read, data_write: Wrapper::new(write), labels: RwWrapper::new(HashSet::new()), } } pub fn record(&self, channel: String, measurement: Measurement) { self.data_read .get_and(&channel, |channel| (*channel)[0].record(measurement)); } pub fn counter(&self, channel: String) -> usize { self.data_read .get_and(&channel, |channel| (*channel)[0].counter()) .unwrap_or(0) } pub fn percentile(&self, channel: String, percentile: f64) -> Option<usize> { self.data_read .get_and(&channel, |channel| (*channel)[0].percentile(pe
source: {:?}", name, source); let channel = Channel::new(name.clone(), source, histogram_config); if self .data_read .get_and(&name, |channel| channel.len()) .unwrap_or(0) == 0 { unsafe { (*self.data_write.get()).insert(name.clone(), Arc::new(channel)); (*self.data_write.get()).refresh(); (*self.labels.lock()).insert(name); } } } pub fn delete_channel(&self, name: String) { debug!("delete channel: {}", name); unsafe { (*self.data_write.get()).empty(name.clone()); (*self.data_write.get()).refresh(); (*self.labels.lock()).remove(&name); } } pub fn readings(&self) -> Vec<Reading> { let mut result = Vec::new(); unsafe { for label in &*self.labels.get() { let readings = self .data_read .get_and(label, |channel| (*channel)[0].readings()); if let Some(readings) = readings { result.extend(readings); } } } result } pub fn hash_map(&self) -> HashMap<String, HashMap<Output, usize>> { let mut result = HashMap::new(); unsafe { for label in &*self.labels.get() { let readings = self .data_read .get_and(label, |channel| (*channel)[0].hash_map()); if let Some(readings) = readings { result.insert(label.to_owned(), readings); } } } result } #[cfg(feature = "waterfall")] pub fn save_files(&self) { unsafe { for label in &*self.labels.get() { let readings = self .data_read .get_and(label, |channel| (*channel)[0].save_files()); } } } pub fn add_output(&self, name: String, output: Output) { self.data_read .get_and(&name, |channel| (*channel)[0].add_output(output)); } pub fn delete_output(&self, name: String, output: Output) { self.data_read .get_and(&name, |channel| (*channel)[0].delete_output(output)); } pub fn latch(&self) { unsafe { for label in &*self.labels.get() { self.data_read .get_and(label, |channel| (*channel)[0].latch()); } } } pub fn clear(&self) { unsafe { for label in &*self.labels.get() { self.data_read .get_and(label, |channel| (*channel)[0].clear()); } } } } impl Default for Recorder { fn default() -> Self { Self::new() } }
rcentile)) .unwrap_or(None) } pub fn add_channel( &self, name: String, source: Source, histogram_config: Option<HistogramConfig>, ) { debug!("add channel: {}
random
[ { "content": "pub fn runner(runtime: f64, source: Source, measurement_type: MeasurementType, label: String) {\n\n for single_channel in [true, false].iter() {\n\n for i in [1, 2, 4, 8, 16, 32, 64].iter() {\n\n timed_run(\n\n *i,\n\n runtime,\n\n ...
Rust
tests/serde_tests.rs
wrobstory/brickline
16b7281242744398cfaaaae87dda3b5c3c76ba4e
extern crate brickline; use std::convert::TryFrom; use brickline::wanted::{ Color, Condition, Item, ItemID, ItemType, MaxPrice, MinQty, Notify, QtyFilled, Remarks, WantedList, }; mod common; #[cfg(test)] mod tests { use super::*; #[test] fn test_xml_to_wanted_list() { let bricklink_wanted_list: WantedList = common::resource_name_to_wanted_list("bricklink_example.xml"); let item_1 = Item { item_type: ItemType::Part, item_id: ItemID(String::from("3622")), color: Some(Color(11)), max_price: None, min_qty: None, qty_filled: Some(QtyFilled(4)), condition: None, remarks: None, notify: None, wanted_show: None, wanted_list_id: None, }; let item_2 = Item { item_type: ItemType::Part, item_id: ItemID(String::from("3039")), color: None, max_price: None, min_qty: None, qty_filled: None, condition: None, remarks: None, notify: None, wanted_show: None, wanted_list_id: None, }; let item_3 = Item { item_type: ItemType::Part, item_id: ItemID(String::from("3001")), color: Some(Color(5)), max_price: Some(MaxPrice(1.00)), min_qty: Some(MinQty(100)), qty_filled: None, condition: Some(Condition::New), remarks: Some(Remarks(String::from("for MOC AB154A"))), notify: Some(Notify::N), wanted_show: None, wanted_list_id: None, }; let items = vec![item_1, item_2, item_3]; let expected_wanted_list = WantedList { items: items }; assert_eq!(bricklink_wanted_list, expected_wanted_list); } #[test] fn test_wanted_list_to_string_1() { let item_1 = Item::build_test_item( ItemType::Part, ItemID(String::from("3622")), Some(Color(11)), Some(MinQty(4)), ); let items = vec![item_1]; let wanted_list = WantedList { items: items }; let stringified = String::try_from(wanted_list).unwrap(); let expected = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\ <INVENTORY>\ <ITEM>\ <ITEMTYPE>P</ITEMTYPE>\ <ITEMID>3622</ITEMID>\ <COLOR>11</COLOR>\ <MINQTY>4</MINQTY>\ </ITEM>\ </INVENTORY>\ "; assert_eq!(String::from(expected), stringified); } #[test] fn test_wanted_list_to_string_2() { let item_1 = Item::build_test_item( ItemType::Part, ItemID(String::from("3622")), Some(Color(11)), Some(MinQty(4)), ); let item_2 = Item::build_test_item( ItemType::Part, ItemID(String::from("3623")), Some(Color(11)), Some(MinQty(4)), ); let item_3 = Item::build_test_item( ItemType::Part, ItemID(String::from("3624")), Some(Color(11)), Some(MinQty(4)), ); let items = vec![item_1, item_2, item_3]; let wanted_list = WantedList { items: items }; let stringified = String::try_from(wanted_list).unwrap(); let expected = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\ <INVENTORY>\ <ITEM>\ <ITEMTYPE>P</ITEMTYPE>\ <ITEMID>3622</ITEMID>\ <COLOR>11</COLOR>\ <MINQTY>4</MINQTY>\ </ITEM>\ <ITEM>\ <ITEMTYPE>P</ITEMTYPE>\ <ITEMID>3623</ITEMID>\ <COLOR>11</COLOR>\ <MINQTY>4</MINQTY>\ </ITEM>\ <ITEM>\ <ITEMTYPE>P</ITEMTYPE>\ <ITEMID>3624</ITEMID>\ <COLOR>11</COLOR>\ <MINQTY>4</MINQTY>\ </ITEM>\ </INVENTORY>\ "; assert_eq!(String::from(expected), stringified); } #[test] fn test_roundtrips() { for resource_name in vec![ "bricklink_example.xml", "test_wanted_list_1.xml", "test_wanted_list_2.xml", "test_wanted_list_3.xml", ] .iter() { let wanted_list = common::resource_name_to_wanted_list(resource_name); let stringified = String::try_from(wanted_list).unwrap(); let expected_string = common::resource_name_to_string(resource_name); assert_eq!(expected_string, stringified); } } }
extern crate brickline; use std::convert::TryFrom; use brickline::wanted::{ Color, Condition, Item, ItemID, ItemType, MaxPrice, MinQty, Notify, QtyFilled, Remarks, WantedList, }; mod common; #[cfg(test)] mod tests { use super::*; #[test]
#[test] fn test_wanted_list_to_string_1() { let item_1 = Item::build_test_item( ItemType::Part, ItemID(String::from("3622")), Some(Color(11)), Some(MinQty(4)), ); let items = vec![item_1]; let wanted_list = WantedList { items: items }; let stringified = String::try_from(wanted_list).unwrap(); let expected = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\ <INVENTORY>\ <ITEM>\ <ITEMTYPE>P</ITEMTYPE>\ <ITEMID>3622</ITEMID>\ <COLOR>11</COLOR>\ <MINQTY>4</MINQTY>\ </ITEM>\ </INVENTORY>\ "; assert_eq!(String::from(expected), stringified); } #[test] fn test_wanted_list_to_string_2() { let item_1 = Item::build_test_item( ItemType::Part, ItemID(String::from("3622")), Some(Color(11)), Some(MinQty(4)), ); let item_2 = Item::build_test_item( ItemType::Part, ItemID(String::from("3623")), Some(Color(11)), Some(MinQty(4)), ); let item_3 = Item::build_test_item( ItemType::Part, ItemID(String::from("3624")), Some(Color(11)), Some(MinQty(4)), ); let items = vec![item_1, item_2, item_3]; let wanted_list = WantedList { items: items }; let stringified = String::try_from(wanted_list).unwrap(); let expected = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\ <INVENTORY>\ <ITEM>\ <ITEMTYPE>P</ITEMTYPE>\ <ITEMID>3622</ITEMID>\ <COLOR>11</COLOR>\ <MINQTY>4</MINQTY>\ </ITEM>\ <ITEM>\ <ITEMTYPE>P</ITEMTYPE>\ <ITEMID>3623</ITEMID>\ <COLOR>11</COLOR>\ <MINQTY>4</MINQTY>\ </ITEM>\ <ITEM>\ <ITEMTYPE>P</ITEMTYPE>\ <ITEMID>3624</ITEMID>\ <COLOR>11</COLOR>\ <MINQTY>4</MINQTY>\ </ITEM>\ </INVENTORY>\ "; assert_eq!(String::from(expected), stringified); } #[test] fn test_roundtrips() { for resource_name in vec![ "bricklink_example.xml", "test_wanted_list_1.xml", "test_wanted_list_2.xml", "test_wanted_list_3.xml", ] .iter() { let wanted_list = common::resource_name_to_wanted_list(resource_name); let stringified = String::try_from(wanted_list).unwrap(); let expected_string = common::resource_name_to_string(resource_name); assert_eq!(expected_string, stringified); } } }
fn test_xml_to_wanted_list() { let bricklink_wanted_list: WantedList = common::resource_name_to_wanted_list("bricklink_example.xml"); let item_1 = Item { item_type: ItemType::Part, item_id: ItemID(String::from("3622")), color: Some(Color(11)), max_price: None, min_qty: None, qty_filled: Some(QtyFilled(4)), condition: None, remarks: None, notify: None, wanted_show: None, wanted_list_id: None, }; let item_2 = Item { item_type: ItemType::Part, item_id: ItemID(String::from("3039")), color: None, max_price: None, min_qty: None, qty_filled: None, condition: None, remarks: None, notify: None, wanted_show: None, wanted_list_id: None, }; let item_3 = Item { item_type: ItemType::Part, item_id: ItemID(String::from("3001")), color: Some(Color(5)), max_price: Some(MaxPrice(1.00)), min_qty: Some(MinQty(100)), qty_filled: None, condition: Some(Condition::New), remarks: Some(Remarks(String::from("for MOC AB154A"))), notify: Some(Notify::N), wanted_show: None, wanted_list_id: None, }; let items = vec![item_1, item_2, item_3]; let expected_wanted_list = WantedList { items: items }; assert_eq!(bricklink_wanted_list, expected_wanted_list); }
function_block-full_function
[ { "content": "use brickline::wanted::{SerdeWantedList, WantedList};\n\nuse brickline::xml_to_string;\n\n\n\nuse quick_xml::de::from_str;\n\n\n\nuse std::fs::File;\n\nuse std::io::Read;\n\nuse std::path::PathBuf;\n\n\n", "file_path": "tests/common/mod.rs", "rank": 0, "score": 64840.782517102314 }, ...
Rust
pingr/src/main.rs
13ABEL/internship-application-systems
1865456b46d621637147b194322dfc5d5791a126
mod main_clap; extern crate pnet; extern crate regex; use pnet::packet::icmp::{IcmpCode, IcmpTypes, MutableIcmpPacket}; use pnet::packet::icmpv6::{Icmpv6Types, MutableIcmpv6Packet}; use pnet::packet::ip::IpNextHeaderProtocols::{Icmp, Icmpv6}; use pnet::packet::ipv4::MutableIpv4Packet; use pnet::packet::ipv6::MutableIpv6Packet; use pnet::packet::ip::IpNextHeaderProtocols; use pnet::packet::MutablePacket; use pnet::transport::{icmp_packet_iter, transport_channel, TransportChannelType}; use pnet::util::checksum; use dns_lookup::lookup_host; use regex::Regex; use signal_hook::{register, SIGINT}; use std::error::Error; use std::net::{IpAddr, Ipv4Addr, Ipv6Addr}; use std::time::Instant; use std::{process, thread, time}; const IPV4_HEADER_LEN: usize = 20; const IPV4_HEADER_WORD_LEN: u8 = 5; const IPV6_HEADER_LEN: usize = 40; const ICMP_HEADER_LEN: usize = 8; const ICMP_PAYLOAD_LEN_DEFAULT: u16 = 56; const ICMP_CHECKSUM_POS: usize = 1; const ICMP_CODE: u8 = 0; const DEFAULT_BUFF_SIZE: usize = 4096; const DEFAULT_SLEEP_TIME: u64 = 1000; const DEFAULT_TIMEOUT: usize = 1; const DEFAULT_TTL: u8 = 64; const MAX_IPV4_PACKET_LEN: usize = 1044; const MAX_ICMP_PACKET_LEN: usize = MAX_IPV4_PACKET_LEN - IPV4_HEADER_LEN; const MAX_TTL: u64 = 255; const MAX_TIMEOUT: usize = 20; enum SupportedPacketType<'a> { V4(MutableIpv4Packet<'a>), V6(MutableIpv6Packet<'a>), } static mut SENT: usize = 0; static mut RECEIVED: usize = 0; fn main() { let arg_matches = main_clap::clap_desc(); let arg_ping_dest = match arg_matches.value_of(main_clap::ARG_ADDRESS) { Some(input) => String::from(input), None => panic!("Please supply the address to ping"), }; let ttl = match arg_matches.value_of(main_clap::ARG_TTL) { Some(input) => { let full_ttl = input.parse::<u64>().expect("the ttl must be an integer"); match full_ttl { 1..=MAX_TTL => full_ttl as u8, _ => panic!("the ttl is 1 to {}", MAX_TTL), } } None => DEFAULT_TTL, }; let icmp_packet_len: usize = match arg_matches.value_of(main_clap::ARG_PACKET_SIZE) { Some(input) => { let full_payload_len = input .parse::<usize>() .expect("the packet size must be an integer"); match full_payload_len { 8..=MAX_ICMP_PACKET_LEN => full_payload_len, _ => panic!( "the icmp packet length must be between {} and {} bytes", ICMP_HEADER_LEN, MAX_ICMP_PACKET_LEN ), } } None => ICMP_PAYLOAD_LEN_DEFAULT as usize, }; let timeout_length = match arg_matches.value_of(main_clap::ARG_TIMEOUT) { Some(input) => { let full_timeout_len = input .parse::<usize>() .expect("the timeout must be an integer"); match full_timeout_len { 1..=MAX_TIMEOUT => full_timeout_len, _ => panic!( "the timeout must be between {} and {} seconds", 1, MAX_TIMEOUT ), } } None => DEFAULT_TIMEOUT, }; unsafe { register(SIGINT, || finish()).unwrap(); } let address = resolve_ip_address(&arg_ping_dest).unwrap(); let (ip_packet_size, protocol) = match address { IpAddr::V4(_) => { let size = IPV4_HEADER_LEN + icmp_packet_len; let protocol = Icmp; (size, protocol) } IpAddr::V6(_) => { let size = IPV6_HEADER_LEN + icmp_packet_len; let protocol = Icmpv6; (size, protocol) } }; let duration = time::Duration::from_millis(DEFAULT_SLEEP_TIME); let channel_type = TransportChannelType::Layer3(protocol); let (mut sender, mut receiver) = match transport_channel(DEFAULT_BUFF_SIZE, channel_type) { Ok((sender, receiver)) => (sender, receiver), Err(e) => panic!("Error initializing the channel {}", e), }; let mut receiver_iter = icmp_packet_iter(&mut receiver); println!( "PINGER: {}({}) with {} bytes of data", arg_ping_dest, address, icmp_packet_len ); loop { let mut ip_packet_buf = vec![0u8; ip_packet_size]; let mut icmp_packet_buf = vec![0u8; icmp_packet_len]; let packet = create_packet( address, ttl, &mut ip_packet_buf, &mut icmp_packet_buf, icmp_packet_len, ); let time_sent = Instant::now(); let send_result = match packet { SupportedPacketType::V4(packet) => sender.send_to(packet, address), SupportedPacketType::V6(packet) => sender.send_to(packet, address), }; match send_result { Ok(_) => { unsafe { SENT += 1 } } Err(e) => panic!("Error sending packet {}", e), }; match receiver_iter.next_with_timeout(time::Duration::from_secs(timeout_length as u64)) { Ok(Some((_, ip_addr))) => { println!( "{} bytes from {}: ttl={} time={} ms", icmp_packet_len, ip_addr, ttl, (time_sent.elapsed().as_micros() as f64) / 1000.0 ); unsafe { RECEIVED += 1 } } Ok(None) => { println!( "packet timed out: time={} ms", time_sent.elapsed().as_millis() ); } Err(e) => println!("Error receiving packet {}", e), } thread::sleep(duration); } } /* parse input as IP address */ fn resolve_ip_address(input: &String) -> Result<IpAddr, Box<dyn Error>> { let reg_ipv4 = Regex::new(r#"^(?:[0-9]{1,3}\.){3}[0-9]{1,3}$"#)?; let reg_ipv6 = Regex::new("^(?:[a-fA-F0-9]{1,4}:){7}[a-fA-F0-9]{1,4}$")?; let reg_hostname = Regex::new( r#"^(?:[a-z0-9](?:[a-z0-9-]{0,61}[a-z0-9])?\.){0,2}[a-z0-9][a-z0-9-]{0,61}[a-z0-9]$"#, )?; let ip_addr: IpAddr; if reg_hostname.is_match(input) { let lookup_results = lookup_host(input)?; match lookup_results.len() { 2 => ip_addr = lookup_results[1], 1 => ip_addr = lookup_results[0], _ => panic!("host name lookup returned with not results"), } } else if reg_ipv4.is_match(input) || reg_ipv6.is_match(input) { ip_addr = input.parse()?; } else { panic!("Please enter a valid domain or IP address"); } return Ok(ip_addr); } /* creates a packet for a given address based on address spec ie. Ipv4Packet for Ipv4Addr and Ipv6Packet for Ipv6Addr */ fn create_packet<'a>( address: IpAddr, ttl: u8, ip_packet_buf: &'a mut [u8], icmp_packet_buf: &'a mut [u8], icmp_packet_len: usize, ) -> SupportedPacketType<'a> { return match address { IpAddr::V4(ip_addr) => SupportedPacketType::V4(create_ipv4_packet( ip_addr, ttl, ip_packet_buf, icmp_packet_buf, icmp_packet_len, )), IpAddr::V6(ip_addr) => SupportedPacketType::V6(create_ipv6_packet( ip_addr, ttl, ip_packet_buf, icmp_packet_buf, icmp_packet_len, )), }; } /* I referenced this: https://codereview.stackexchange.com/questions/208875/traceroute-implementation-in-rust which helped me understand I had to wrap my ICMP packet within a IP[v4/v6] packet */ fn create_ipv4_packet<'a>( dest: Ipv4Addr, ttl: u8, ip_packet_buf: &'a mut [u8], icmp_packet_buf: &'a mut [u8], icmp_packet_len: usize, ) -> MutableIpv4Packet<'a> { let mut ipv4_packet = MutableIpv4Packet::new(ip_packet_buf).expect("unable to create ipv4 packet"); ipv4_packet.set_version(4); ipv4_packet.set_header_length(IPV4_HEADER_WORD_LEN); ipv4_packet.set_total_length((IPV4_HEADER_LEN + icmp_packet_len) as u16); ipv4_packet.set_ttl(ttl); ipv4_packet.set_next_level_protocol(IpNextHeaderProtocols::Icmp); ipv4_packet.set_destination(dest); let mut icmp_packet = MutableIcmpPacket::new(icmp_packet_buf).expect("unable to create icmp packet"); icmp_packet.set_icmp_type(IcmpTypes::EchoRequest); icmp_packet.set_icmp_code(IcmpCode::new(ICMP_CODE)); let checksum = checksum(&icmp_packet.packet_mut(), ICMP_CHECKSUM_POS); icmp_packet.set_checksum(checksum); ipv4_packet.set_payload(icmp_packet.packet_mut()); return ipv4_packet; } fn create_ipv6_packet<'a>( dest: Ipv6Addr, ttl: u8, ip_packet_buf: &'a mut [u8], icmp_packet_buf: &'a mut [u8], icmp_packet_len: usize, ) -> MutableIpv6Packet<'a> { let mut ipv6_packet = MutableIpv6Packet::new(ip_packet_buf).expect("invalid packet buffer size"); ipv6_packet.set_version(6); ipv6_packet.set_destination(dest); ipv6_packet.set_hop_limit(ttl); let mut icmp_packet = MutableIcmpv6Packet::new(icmp_packet_buf).unwrap(); let checksum = checksum(&icmp_packet.packet_mut(), ICMP_CHECKSUM_POS); icmp_packet.set_checksum(checksum); icmp_packet.set_icmpv6_type(Icmpv6Types::EchoRequest); ipv6_packet.set_payload_length((ICMP_HEADER_LEN + icmp_packet_len) as u16); ipv6_packet.set_payload(icmp_packet.packet_mut()); return ipv6_packet; } unsafe fn finish() { println!("\n--- ping statistics ---"); let packet_loss = (SENT - RECEIVED) / (SENT + RECEIVED) * 100; println!( "{} packets transmitted, {} received, {}% packet loss", SENT, RECEIVED, packet_loss, ); process::exit(0); }
mod main_clap; extern crate pnet; extern crate regex; use pnet::packet::icmp::{IcmpCode, IcmpTypes, MutableIcmpPacket}; use pnet::packet::icmpv6::{Icmpv6Types, MutableIcmpv6Packet}; use pnet::packet::ip::IpNextHeaderProtocols::{Icmp, Icmpv6}; use pnet::packet::ipv4::MutableIpv4Packet; use pnet::packet::ipv6::MutableIpv6Packet; use pnet::packet::ip::IpNextHeaderProtocols; use pnet::packet::MutablePacket; use pnet::transport::{icmp_packet_iter, transport_channel, TransportChannelType}; use pnet::util::checksum; use dns_lookup::lookup_host; use regex::Regex; use signal_hook::{register, SIGINT}; use std::error::Error; use std::net::{IpAddr, Ipv4Addr, Ipv6Addr}; use std::time::Instant; use std::{process, thread, time}; const IPV4_HEADER_LEN: usize = 20; const IPV4_HEADER_WORD_LEN: u8 = 5; const IPV6_HEADER_LEN: usize = 40; const ICMP_HEADER_LEN: usize = 8; const ICMP_PAYLOAD_LEN_DEFAULT: u16 = 56; const ICMP_CHECKSUM_POS: usize = 1; const ICMP_CODE: u8 = 0; const DEFAULT_BUFF_SIZE: usize = 4096; const DEFAULT_SLEEP_TIME: u64 = 1000; const DEFAULT_TIMEOUT: usize = 1; const DEFAULT_TTL: u8 = 64; const MAX_IPV4_PACKET_LEN: usize = 1044; const MAX_ICMP_PACKET_LEN: usize = MAX_IPV4_PACKET_LEN - IPV4_HEADER_LEN; const MAX_TTL: u64 = 255; const MAX_TIMEOUT: usize = 20; enum SupportedPacketType<'a> { V4(MutableIpv4Packet<'a>), V6(MutableIpv6Packet<'a>), } static mut SENT: usize = 0; static mut RECEIVED: usize = 0; fn main() { let arg_matches = main_clap::clap_desc(); let arg_ping_dest = match arg_matches.value_of(main_clap::ARG_ADDRESS) { Some(input) => String::from(input), None => panic!("Please supply the address to ping"), }; let ttl = match arg_matches.value_of(main_clap::ARG_TTL) { Some(input) => { let full_ttl = input.parse::<u64>().expect("the ttl must be an integer"); match full_ttl { 1..=MAX_TTL => full_ttl as u8, _ => panic!("the ttl is 1 to {}", MAX_TTL), } } None => DEFAULT_TTL, }; let icmp_packet_len: usize = match arg_matches.value_of(main_clap::ARG_PACKET_SIZE) { Some(input) => { let full_payload_len = input .parse::<usize>() .expect("the packet size must be an integer"); match full_payload_len { 8..=MAX_ICMP_PACKET_LEN => full_payload_len, _ => panic!( "the icmp packet length must be between {} and {} bytes", ICMP_HEADER_LEN, MAX_ICMP_PACKET_LEN ), } } None => ICMP_PAYLOAD_LEN_DEFAULT as usize, }; let timeout_length = match arg_matches.value_of(main_clap::ARG_TIMEOUT) { Some(input) => { let full_timeout_len = input .parse::<usize>() .expect("the timeout must be an integer"); match full_timeout_len { 1..=MAX_TIMEOUT => full_timeout_len, _ => panic!( "the timeout must be between {} and {} seconds", 1, MAX_TIMEOUT ), } } None => DEFAULT_TIMEOUT, }; unsafe { register(SIGINT, || finish()).unwrap(); } let address = resolve_ip_address(&arg_ping_dest).unwrap(); let (ip_packet_size, protocol) = match address { IpAddr::V4(_) => { let size = IPV4_HEADER_LEN + icmp_packet_len; let protocol = Icmp; (size, protocol) } IpAddr::V6(_) => { let size = IPV6_HEADER_LEN + icmp_packet_len; let protocol = Icmpv6; (size, protocol) } }; let duration = time::Duration::from_millis(DEFAULT_SLEEP_TIME); let channel_type = TransportChannelType::Layer3(protocol); let (mut sender, mut receiver) = match transport_channel(DEFAULT_BUFF_SIZE, channel_type) { Ok((sender, receiver)) => (sender, receiver), Err(e) => panic!("Error initializing the channel {}", e), }; let mut receiver_iter = icmp_packet_iter(&mut receiver); println!( "PINGER: {}({}) with {} bytes of data", arg_ping_dest, address, icmp_packet_len ); loop { let mut ip_packet_buf = vec![0u8; ip_packet_size]; let mut icmp_packet_buf = vec![0u8; icmp_packet_len]; let packet = create_packet( address, ttl, &mut ip_packet_buf, &mut icmp_packet_buf, icmp_packet_len, ); let time_sent = Instant::now(); let send_result = match packet { SupportedPacketType::V4(packet) => sender.send_to(packet, address), SupportedPacketType::V6(packet) => sender.send_to(packet, address), }; match send_result { Ok(_) => { unsafe { SENT += 1 } } Err(e) => panic!("Error sending packet {}", e), }; match receiver_iter.next_with_timeout(time::Duration::from_secs(timeout_length as u64)) { Ok(Some((_, ip_addr))) => { println!( "{} bytes from {}: ttl={} time={} ms", icmp_packet_len, ip_addr, ttl, (time_sent.elapsed().as_micros() as f64) / 1000.0 ); unsafe { RECEIVED += 1 } } Ok(None) => { println!( "packet timed out: time={} ms", time_sent.elapsed().as_millis() ); } Err(e) => println!("Error receiving packet {}", e), } thread::sleep(duration); } } /* parse input as IP address */ fn resolve_ip_address(input: &String) -> Result<IpAddr, Box<dyn Error>> { let reg_ipv4 = Regex::new(r#"^(?:[0-9]{1,3}\.){3}[0-9]{1,3}$"#)?; let reg_ipv6 = Regex::new("^(?:[a-fA-F0-9]{1,4}:){7}[a-fA-F0-9]{1,4}$")?; let reg_hostname = Regex::new( r#"^(?:[a-z0-9](?:[a-z0-9-]{0,61}[a-z0-9])?\.){0,2}[a-z0-9][a-z0-9-]{0,61}[a-z0-9]$"#, )?; let ip_addr: IpAddr; if reg_hostname.is_match(input) { let lookup_results = lookup_host(input)?; match lookup_results.len() { 2 => ip_addr = lookup_results[1], 1 => ip_addr = lookup_results[0], _ => panic!("host name lookup returned with not results"), } } else if reg_ipv4.is_match(input) || reg_ipv6.is_match(input) { ip_addr = input.parse()?; } else { panic!("Please enter a valid domain or IP address"); } return Ok(ip_addr); } /* creates a packet for a given address based on address spec ie. Ipv4Packet for Ipv4Addr and Ipv6Packet for Ipv6Addr */ fn create_packet<'a>( address: IpAddr, ttl: u8, ip_packet_buf: &'a mut [u8], icmp_packet_buf: &'a mut [u8], icmp_packet_len: usize, ) -> SupportedPacketType<'a> { return match address { IpAddr::V4(ip_addr) => SupportedPacketType::V4(create_ipv4_packet( ip_addr, ttl, ip_packet_buf, icmp_packet_buf, icmp_packet_len, )), IpAddr::V6(ip_addr) => SupportedPacketType::V6(create_ipv6_packet( ip_addr, ttl, ip_packet_buf, icmp_packet_buf, icmp_packet_len, )), }; } /* I referenced this: https://codereview.stackexchange.com/que
ER_WORD_LEN); ipv4_packet.set_total_length((IPV4_HEADER_LEN + icmp_packet_len) as u16); ipv4_packet.set_ttl(ttl); ipv4_packet.set_next_level_protocol(IpNextHeaderProtocols::Icmp); ipv4_packet.set_destination(dest); let mut icmp_packet = MutableIcmpPacket::new(icmp_packet_buf).expect("unable to create icmp packet"); icmp_packet.set_icmp_type(IcmpTypes::EchoRequest); icmp_packet.set_icmp_code(IcmpCode::new(ICMP_CODE)); let checksum = checksum(&icmp_packet.packet_mut(), ICMP_CHECKSUM_POS); icmp_packet.set_checksum(checksum); ipv4_packet.set_payload(icmp_packet.packet_mut()); return ipv4_packet; } fn create_ipv6_packet<'a>( dest: Ipv6Addr, ttl: u8, ip_packet_buf: &'a mut [u8], icmp_packet_buf: &'a mut [u8], icmp_packet_len: usize, ) -> MutableIpv6Packet<'a> { let mut ipv6_packet = MutableIpv6Packet::new(ip_packet_buf).expect("invalid packet buffer size"); ipv6_packet.set_version(6); ipv6_packet.set_destination(dest); ipv6_packet.set_hop_limit(ttl); let mut icmp_packet = MutableIcmpv6Packet::new(icmp_packet_buf).unwrap(); let checksum = checksum(&icmp_packet.packet_mut(), ICMP_CHECKSUM_POS); icmp_packet.set_checksum(checksum); icmp_packet.set_icmpv6_type(Icmpv6Types::EchoRequest); ipv6_packet.set_payload_length((ICMP_HEADER_LEN + icmp_packet_len) as u16); ipv6_packet.set_payload(icmp_packet.packet_mut()); return ipv6_packet; } unsafe fn finish() { println!("\n--- ping statistics ---"); let packet_loss = (SENT - RECEIVED) / (SENT + RECEIVED) * 100; println!( "{} packets transmitted, {} received, {}% packet loss", SENT, RECEIVED, packet_loss, ); process::exit(0); }
stions/208875/traceroute-implementation-in-rust which helped me understand I had to wrap my ICMP packet within a IP[v4/v6] packet */ fn create_ipv4_packet<'a>( dest: Ipv4Addr, ttl: u8, ip_packet_buf: &'a mut [u8], icmp_packet_buf: &'a mut [u8], icmp_packet_len: usize, ) -> MutableIpv4Packet<'a> { let mut ipv4_packet = MutableIpv4Packet::new(ip_packet_buf).expect("unable to create ipv4 packet"); ipv4_packet.set_version(4); ipv4_packet.set_header_length(IPV4_HEAD
random
[ { "content": "pub fn clap_desc() -> ArgMatches<'static> {\n\n return App::new(\"pingr\")\n\n .version(\"0.1.0\")\n\n .author(\"Richard Wei <therichardwei@gmail.com>\")\n\n .arg(\n\n Arg::with_name(ARG_ADDRESS)\n\n .index(1)\n\n .takes_value(true)\...
Rust
hive-core/src/lua/http/request.rs
hackerer1c/hive
a98ab9a97836f208646df252175283067a398b7b
use super::body::LuaBody; use super::header_map::LuaHeaderMap; use super::uri::LuaUri; use crate::path::Params; use hyper::header::{HeaderName, HeaderValue}; use hyper::http::request::Parts; use hyper::{Body, HeaderMap, Method, Request}; use mlua::{ExternalError, ExternalResult, FromLua, Lua, Table, UserData}; use std::cell::RefCell; use std::rc::Rc; pub struct LuaRequest { pub(crate) method: Method, pub(crate) uri: hyper::Uri, pub(crate) headers: Rc<RefCell<HeaderMap>>, pub(crate) body: Option<LuaBody>, params: Option<Params>, } impl LuaRequest { #[rustfmt::skip] pub fn new(req: Request<Body>, params: Params) -> Self { let (Parts { method, uri, headers, .. }, body) = req.into_parts(); let headers = Rc::new(RefCell::new(headers)); let body = Some(body.into()); let params = Some(params); Self { method, uri, headers, body, params } } } impl Default for LuaRequest { fn default() -> Self { Self { method: Method::GET, uri: Default::default(), headers: Default::default(), body: Some(LuaBody::Empty), params: None, } } } impl UserData for LuaRequest { fn add_fields<'lua, F: mlua::UserDataFields<'lua, Self>>(fields: &mut F) { fields.add_field_function_get("params", |lua, this| { this .get_named_user_value::<_, Table>("params") .or_else(|_err| { let mut this_ref = this.borrow_mut::<Self>()?; let params = this_ref .params .take() .map(|x| { let iter = x .into_iter() .map(|(k, v)| (k.into_string(), v.into_string())); lua.create_table_from(iter) }) .unwrap_or_else(|| lua.create_table())?; this.set_named_user_value("params", params.clone())?; Ok(params) }) }); fields.add_field_method_get("method", |lua, this| lua.pack(this.method.as_str())); fields.add_field_method_get("uri", |_lua, this| Ok(LuaUri(this.uri.clone()))); fields.add_field_function_get("body", |lua, this| { let mut this_ = this.borrow_mut::<Self>()?; let body = this_.body.take(); if let Some(body) = body { let x = lua.pack(body)?; this.set_named_user_value("body", x.clone())?; Ok(x) } else { this.get_named_user_value("body") } }); fields.add_field_method_get("headers", |_lua, this| { Ok(LuaHeaderMap(this.headers.clone())) }); } } impl<'lua> FromLua<'lua> for LuaRequest { fn from_lua(lua_value: mlua::Value<'lua>, _lua: &'lua Lua) -> mlua::Result<Self> { match lua_value { mlua::Value::String(uri) => Ok(Self { uri: hyper::Uri::try_from(uri.as_bytes()).to_lua_err()?, ..Default::default() }), mlua::Value::Table(table) => { let method = table .raw_get::<_, Option<mlua::String>>("method")? .map(|x| Method::from_bytes(x.as_bytes())) .transpose() .to_lua_err()? .unwrap_or(Method::GET); let uri: hyper::Uri = table .raw_get::<_, mlua::String>("uri")? .as_bytes() .try_into() .to_lua_err()?; let headers_table: Option<Table> = table.raw_get("headers")?; let mut headers = HeaderMap::new(); if let Some(headers_table) = headers_table { for entry in headers_table.pairs::<mlua::String, mlua::Value>() { let (k, v) = entry?; let k = HeaderName::from_bytes(k.as_bytes()).to_lua_err()?; match v { mlua::Value::String(v) => { headers.append(k, HeaderValue::from_bytes(v.as_bytes()).to_lua_err()?); } mlua::Value::Table(vs) => { for v in vs.sequence_values::<mlua::String>() { let v = v?; headers.append(&k, HeaderValue::from_bytes(v.as_bytes()).to_lua_err()?); } } _ => return Err("expected string or table".to_lua_err()), } } } Ok(Self { method, uri, headers: Rc::new(RefCell::new(headers)), body: Some(table.raw_get("body")?), ..Default::default() }) } mlua::Value::UserData(x) => { let mut u = x.take::<Self>()?; if u.body.is_none() { let t = x.get_named_user_value::<_, LuaBody>("body")?; u.body = Some(t); } Ok(u) } _ => Err("expected string or table".to_lua_err()), } } } impl From<LuaRequest> for Request<Body> { fn from(x: LuaRequest) -> Self { let headers = Rc::try_unwrap(x.headers) .map(RefCell::into_inner) .unwrap_or_else(|x| x.borrow().clone()); let mut builder = Request::builder().method(x.method).uri(x.uri); *builder.headers_mut().unwrap() = headers; builder.body(x.body.unwrap().into()).unwrap() } }
use super::body::LuaBody; use super::header_map::LuaHeaderMap; use super::uri::LuaUri; use crate::path::Params; use hyper::header::{HeaderName, HeaderValue}; use hyper::http::request::Parts; use hyper::{Body, HeaderMap, Method, Request}; use mlua::{ExternalError, ExternalResult, FromLua, Lua, Table, UserData}; use std::cell::RefCell; use std::rc::Rc; pub struct LuaRequest { pub(crate) method: Method, pub(crate) uri: hyper::Uri, pub(crate) headers: Rc<RefCell<HeaderMap>>, pub(crate) body: Option<LuaBody>, params: Option<Params>, } impl LuaRequest { #[rustfmt::skip] pub fn new(req: Request<Body>, params: Params) -> Self { let (Parts { method, uri, headers, .. }, body) = req.into_parts(); let headers = Rc::new(RefCell::new(headers)); let body = Some(body.into()); let params = Some(params); Self { method, uri, headers, body, params } } } impl Default for LuaRequest { fn default() -> Self { Self { method: Method::GET, uri: Default::default(), headers: Default::default(), body: Some(LuaBody::Empty), params: None, } } } impl UserData for LuaRequest { fn add_fields<'lua, F: mlua::UserDataFields<'lua, Self>>(fields: &mut F) { fields.add_field_function_get("params", |lua, this| { this .get_named_user_value::<_, Table>("params") .or_else(|_err| { let mut this_ref = this.borrow_mut::<Self>()?; let params = this_ref .params .take() .map(|x| { let iter = x .into_iter() .map(|(k, v)| (k.into_string(), v.into_string())); lua.create_table_from(iter) }) .unwrap_or_else(|| lua.create_table())?; this.set_named_user_value("params", params.clone())?; Ok(params) }) }); fields.add_field_method_get("method", |lua, this| lua.pack(this.method.as_str())); fields.add_field_method_get("uri", |_lua, this| Ok(LuaUri(this.uri.clone()))); fields.add_field_function_get("body", |lua, this| { let mut this_ = this.borrow_mut::<Self>()?; let body = this_.body.take(); if let Some(body) = body { let x = lua.pack(body)?; this.set_named_user_value("body", x.clone())?; Ok(x) } else { this.get_named_user_value("body") } }); fields.add_field_method_get("headers", |_lua, this| { Ok(LuaHeaderMap(this.headers.clone())) }); } } impl<'lua> FromLua<'lua> for LuaRequest { fn from_lua(lua_value: mlua::Value<'lua>, _lua: &'lua Lua) -> mlua::Result<Self> { match lua_value { mlua::Value::String(uri) => Ok(Self { uri: hyper::Uri::try_from(uri.as_bytes()).to_lua_err()?, ..Default::default() }), mlua::Value::Table(table) => { let method = table .raw_get::<_, Option<mlua::String>>("method")? .map(|x| Method::from_bytes(x.as_bytes())) .transpose() .to_lua_err()? .unwrap_or(Method::GET); let uri: hyper::Uri = table .raw_get::<_, mlua::String>("uri")? .as_bytes() .
} impl From<LuaRequest> for Request<Body> { fn from(x: LuaRequest) -> Self { let headers = Rc::try_unwrap(x.headers) .map(RefCell::into_inner) .unwrap_or_else(|x| x.borrow().clone()); let mut builder = Request::builder().method(x.method).uri(x.uri); *builder.headers_mut().unwrap() = headers; builder.body(x.body.unwrap().into()).unwrap() } }
try_into() .to_lua_err()?; let headers_table: Option<Table> = table.raw_get("headers")?; let mut headers = HeaderMap::new(); if let Some(headers_table) = headers_table { for entry in headers_table.pairs::<mlua::String, mlua::Value>() { let (k, v) = entry?; let k = HeaderName::from_bytes(k.as_bytes()).to_lua_err()?; match v { mlua::Value::String(v) => { headers.append(k, HeaderValue::from_bytes(v.as_bytes()).to_lua_err()?); } mlua::Value::Table(vs) => { for v in vs.sequence_values::<mlua::String>() { let v = v?; headers.append(&k, HeaderValue::from_bytes(v.as_bytes()).to_lua_err()?); } } _ => return Err("expected string or table".to_lua_err()), } } } Ok(Self { method, uri, headers: Rc::new(RefCell::new(headers)), body: Some(table.raw_get("body")?), ..Default::default() }) } mlua::Value::UserData(x) => { let mut u = x.take::<Self>()?; if u.body.is_none() { let t = x.get_named_user_value::<_, LuaBody>("body")?; u.body = Some(t); } Ok(u) } _ => Err("expected string or table".to_lua_err()), } }
function_block-function_prefix_line
[ { "content": "pub fn create_fn_create_uri(lua: &Lua) -> mlua::Result<Function> {\n\n lua.create_function(|_lua, s: mlua::String| {\n\n Ok(LuaUri(hyper::Uri::try_from(s.as_bytes()).to_lua_err()?))\n\n })\n\n}\n", "file_path": "hive-core/src/lua/http/uri.rs", "rank": 0, "score": 196827.6443182151...
Rust
src/main.rs
wikrsh/raytracing_in_one_weekend_rust
d752ab4cdbc67d951553797792bbd93e51aa6b39
use rand::prelude::random; use raytracing_in_one_weekend::camera::Camera; use raytracing_in_one_weekend::geometry::{Hittable, HittableList, Ray, Sphere}; use raytracing_in_one_weekend::material::{Dielectric, Lambertian, Material, Metal}; use raytracing_in_one_weekend::utils::color::{write_color, Color}; use raytracing_in_one_weekend::utils::vec3::Vec3; use std::io::{self, BufWriter}; use std::rc::Rc; fn ray_color<T: Hittable>(r: &Ray, world: &T, depth: i32) -> Color { if depth <= 0 { return Color::new(0.0, 0.0, 0.0); } if let Some(rec) = world.hit(r, 0.001, f64::INFINITY) { return if let Some((attenuation, scattered)) = rec.mat.as_ref().scatter(r, &rec) { attenuation * ray_color(&scattered, world, depth - 1) } else { Color::new(0.0, 0.0, 0.0) }; } let unit_direction = r.direction().unit(); let t = 0.5 * (unit_direction.y() + 1.0); (1.0 - t) * Color::new(1.0, 1.0, 1.0) + t * Color::new(0.5, 0.7, 1.0) } fn random_scene() -> HittableList { let mut world = HittableList::new(); let ground_material: Rc<Box<dyn Material>> = Rc::new(Box::new(Lambertian::new(Color::new(0.5, 0.5, 0.5)))); world.add(Box::new(Sphere::new( Vec3::new(0.0, -1000.0, 0.0), 1000.0, &ground_material, ))); for a in -11..11 { for b in -11..11 { let choose_mat: f64 = random(); let center = Vec3::new( (a as f64) + 0.9 * random::<f64>(), 0.2, (b as f64) + 0.9 * random::<f64>(), ); let sphere_material: Rc<Box<dyn Material>> = if choose_mat < 0.8 { let albedo = Color::new_random(0.0, 1.0) * Color::new_random(0.0, 1.0); Rc::new(Box::new(Lambertian::new(albedo))) } else if choose_mat < 0.95 { let albedo = Color::new_random(0.5, 1.0); let fuzz = 0.5 * random::<f64>(); Rc::new(Box::new(Metal::new(albedo, fuzz))) } else { Rc::new(Box::new(Dielectric::new(1.5))) }; world.add(Box::new(Sphere::new(center, 0.2, &sphere_material))); } } let material1: Rc<Box<dyn Material>> = Rc::new(Box::new(Dielectric::new(1.5))); world.add(Box::new(Sphere::new( Vec3::new(0.0, 1.0, 0.0), 1.0, &material1, ))); let material2: Rc<Box<dyn Material>> = Rc::new(Box::new(Lambertian::new(Color::new(0.4, 0.2, 0.1)))); world.add(Box::new(Sphere::new( Vec3::new(-4.0, 1.0, 0.0), 1.0, &material2, ))); let material3: Rc<Box<dyn Material>> = Rc::new(Box::new(Metal::new(Color::new(0.7, 0.6, 0.5), 0.0))); world.add(Box::new(Sphere::new( Vec3::new(4.0, 1.0, 0.0), 1.0, &material3, ))); world } fn main() -> io::Result<()> { let aspect_ratio = 3.0 / 2.0; let image_width: usize = 1200; let image_height: usize = (image_width as f64 / aspect_ratio) as usize; let samples_per_pixel = 500; let max_depth = 50; let world = random_scene(); let lookfrom = Vec3::new(13.0, 2.0, 3.0); let lookat = Vec3::new(0.0, 0.0, 0.0); let dist_to_focus = 10.0; let aperture = 0.1; let camera = Camera::new( lookfrom, lookat, Vec3::new(0.0, 1.0, 0.0), 20.0, aspect_ratio, aperture, dist_to_focus, ); println!("P3"); println!("{} {}", image_width, image_height); println!("255"); let mut writer = BufWriter::new(io::stdout()); for h in (0..image_height).rev() { eprintln!("Scan lines remaining: {}", h); for w in 0..image_width { let mut pixel_color = Color::new(0.0, 0.0, 0.0); for _ in 0..samples_per_pixel { let u = (w as f64 + random::<f64>()) / (image_width - 1) as f64; let v = (h as f64 + random::<f64>()) / (image_height - 1) as f64; let r = camera.get_ray(u, v); pixel_color += ray_color(&r, &world, max_depth); } write_color(&mut writer, &pixel_color, samples_per_pixel)?; } } eprintln!("Done."); Ok(()) }
use rand::prelude::random; use raytracing_in_one_weekend::camera::Camera; use raytracing_in_one_weekend::geometry::{Hittable, HittableList, Ray, Sphere}; use raytracing_in_one_weekend::material::{Dielectric, Lambertian, Material, Metal}; use raytracing_in_one_weekend::utils::color::{write_color, Color}; use raytracing_in_one_weekend::utils::vec3::Vec3; use std::io::{self, BufWriter}; use std::rc::Rc; fn ray_color<T: Hittable>(r: &Ray, world: &T, depth: i32) -> Color { if depth <= 0 { return Color::new(0.0, 0.0, 0.0); } if let Some(rec) = world.hit(r, 0.001, f64::INFINITY) { return if let Some((attenuation, scattered)) = rec.mat.as_ref().scatter(r, &rec) { attenuation * ray_color(&scattered, world, depth - 1) } else { Color::new(0.0, 0.0, 0.0) }; } let unit_direction = r.direction().unit(); let t = 0.5 * (unit_direction.y() + 1.0); (1.0 - t) * Color::new(1.0, 1.0, 1.0) + t * Color::new(0.5, 0.7, 1.0) } fn random_scene() -> HittableList { let mut world = HittableList::new(); let ground_material: Rc<Box<dyn Material>> = Rc::new(Box::new(Lambertian::new(Color::new(0.5, 0.5, 0.5)))); world.add(Box::new(Sphere::new( Vec3::new(0.0, -1000.0, 0.0), 1000.0, &ground_material, ))); for a in -11..11 { for b in -11..11 { let choose_mat: f64 = random(); let center = Vec3::new( (a as f64) + 0.9 * random::<f64>(), 0.2, (b as f64) + 0.9 * random::<f64>(), ); let sphere_material: Rc<Box<dyn Material>> = if choose_mat < 0.8 { let albedo = Color::new_random(0.0, 1.0) * Color::new_random(0.0, 1.0); Rc::new(Box::new(Lambertian::new(albedo))) } else if choose_mat < 0.95 { let albedo = Color::new_random(0.5, 1.0); let fuzz = 0.5 * random::<f64>(); Rc::new(Box::new(Metal::new(albedo, fuzz))) } else { Rc::new(Box::new(Dielectric::new(1.5))) };
fn main() -> io::Result<()> { let aspect_ratio = 3.0 / 2.0; let image_width: usize = 1200; let image_height: usize = (image_width as f64 / aspect_ratio) as usize; let samples_per_pixel = 500; let max_depth = 50; let world = random_scene(); let lookfrom = Vec3::new(13.0, 2.0, 3.0); let lookat = Vec3::new(0.0, 0.0, 0.0); let dist_to_focus = 10.0; let aperture = 0.1; let camera = Camera::new( lookfrom, lookat, Vec3::new(0.0, 1.0, 0.0), 20.0, aspect_ratio, aperture, dist_to_focus, ); println!("P3"); println!("{} {}", image_width, image_height); println!("255"); let mut writer = BufWriter::new(io::stdout()); for h in (0..image_height).rev() { eprintln!("Scan lines remaining: {}", h); for w in 0..image_width { let mut pixel_color = Color::new(0.0, 0.0, 0.0); for _ in 0..samples_per_pixel { let u = (w as f64 + random::<f64>()) / (image_width - 1) as f64; let v = (h as f64 + random::<f64>()) / (image_height - 1) as f64; let r = camera.get_ray(u, v); pixel_color += ray_color(&r, &world, max_depth); } write_color(&mut writer, &pixel_color, samples_per_pixel)?; } } eprintln!("Done."); Ok(()) }
world.add(Box::new(Sphere::new(center, 0.2, &sphere_material))); } } let material1: Rc<Box<dyn Material>> = Rc::new(Box::new(Dielectric::new(1.5))); world.add(Box::new(Sphere::new( Vec3::new(0.0, 1.0, 0.0), 1.0, &material1, ))); let material2: Rc<Box<dyn Material>> = Rc::new(Box::new(Lambertian::new(Color::new(0.4, 0.2, 0.1)))); world.add(Box::new(Sphere::new( Vec3::new(-4.0, 1.0, 0.0), 1.0, &material2, ))); let material3: Rc<Box<dyn Material>> = Rc::new(Box::new(Metal::new(Color::new(0.7, 0.6, 0.5), 0.0))); world.add(Box::new(Sphere::new( Vec3::new(4.0, 1.0, 0.0), 1.0, &material3, ))); world }
function_block-function_prefix_line
[ { "content": "pub fn write_color<T: Write>(\n\n writer: &mut T,\n\n pixel_color: &Color,\n\n samples_per_pixels: i32,\n\n) -> io::Result<()> {\n\n // Divide the color by the number of samples and gamma-correct for gamma=2.0\n\n let scale = 1.0 / samples_per_pixels as f64;\n\n let r = (pixel_co...
Rust
src/client/protocol/keyboard.rs
lummax/wayland-client-rs
5f41fe43d8a287d0b107cc10a2cc5045d2a537b6
#![allow(unused_imports)] use std::{ptr, mem}; use std::ffi::{CStr, CString}; use std::os::unix::io::RawFd; use libc::{c_void, c_int, uint32_t}; use ffi; use client::protocol::{FromPrimitive, GetInterface}; use client::base::Proxy as BaseProxy; use client::base::{FromRawPtr, AsRawPtr, EventQueue}; #[link(name="wayland-client")] extern { static wl_keyboard_interface: ffi::wayland::WLInterface; } #[repr(C)] #[derive(Debug)] pub enum KeyboardKeymapFormat { NoKeymap = 0, XkbV1 = 1, } impl FromPrimitive for KeyboardKeymapFormat { fn from_u32(num: u32) -> Option<Self> { return match num { 0 => Some(KeyboardKeymapFormat::NoKeymap), 1 => Some(KeyboardKeymapFormat::XkbV1), _ => None } } fn from_i32(num: i32) -> Option<Self> { return Self::from_u32(num as u32); } } pub trait KeyboardKeymapFormatSet { fn has_no_keymap(&self) -> bool; fn has_xkb_v1(&self) -> bool; } impl KeyboardKeymapFormatSet for u32 { fn has_no_keymap(&self) -> bool { return self & (KeyboardKeymapFormat::NoKeymap as u32) != 0; } fn has_xkb_v1(&self) -> bool { return self & (KeyboardKeymapFormat::XkbV1 as u32) != 0; } } impl KeyboardKeymapFormatSet for i32 { fn has_no_keymap(&self) -> bool { return self & (KeyboardKeymapFormat::NoKeymap as i32) != 0; } fn has_xkb_v1(&self) -> bool { return self & (KeyboardKeymapFormat::XkbV1 as i32) != 0; } } #[repr(C)] #[derive(Debug)] pub enum KeyboardKeyState { Released = 0, Pressed = 1, } impl FromPrimitive for KeyboardKeyState { fn from_u32(num: u32) -> Option<Self> { return match num { 0 => Some(KeyboardKeyState::Released), 1 => Some(KeyboardKeyState::Pressed), _ => None } } fn from_i32(num: i32) -> Option<Self> { return Self::from_u32(num as u32); } } pub trait KeyboardKeyStateSet { fn has_released(&self) -> bool; fn has_pressed(&self) -> bool; } impl KeyboardKeyStateSet for u32 { fn has_released(&self) -> bool { return self & (KeyboardKeyState::Released as u32) != 0; } fn has_pressed(&self) -> bool { return self & (KeyboardKeyState::Pressed as u32) != 0; } } impl KeyboardKeyStateSet for i32 { fn has_released(&self) -> bool { return self & (KeyboardKeyState::Released as i32) != 0; } fn has_pressed(&self) -> bool { return self & (KeyboardKeyState::Pressed as i32) != 0; } } #[repr(C)] enum KeyboardEvent { Keymap = 0, Enter = 1, Leave = 2, Key = 3, Modifiers = 4, RepeatInfo = 5, } impl FromPrimitive for KeyboardEvent { fn from_u32(num: u32) -> Option<Self> { return match num { 0 => Some(KeyboardEvent::Keymap), 1 => Some(KeyboardEvent::Enter), 2 => Some(KeyboardEvent::Leave), 3 => Some(KeyboardEvent::Key), 4 => Some(KeyboardEvent::Modifiers), 5 => Some(KeyboardEvent::RepeatInfo), _ => None } } fn from_i32(num: i32) -> Option<Self> { return Self::from_u32(num as u32); } } #[repr(C)] enum KeyboardRequest { Release = 0, _Dummy, } impl FromPrimitive for KeyboardRequest { fn from_u32(num: u32) -> Option<Self> { return match num { 0 => Some(KeyboardRequest::Release), _ => None } } fn from_i32(num: i32) -> Option<Self> { return Self::from_u32(num as u32); } } #[derive(Debug)] pub struct Keyboard { proxy: BaseProxy, } impl Keyboard { pub fn release(mut self) { let proxy = self.as_mut_ptr() as *mut ffi::wayland::WLProxy; unsafe { ffi::wayland::wl_proxy_marshal( proxy, KeyboardRequest::Release as u32 ); } } pub fn get_id(&mut self) -> u32 { return self.proxy.get_id(); } pub fn get_class(&mut self) -> String { return self.proxy.get_class(); } pub fn set_queue(&mut self, queue: Option<&mut EventQueue>) { self.proxy.set_queue(queue); } } impl FromRawPtr<ffi::wayland::WLProxy> for Keyboard { fn from_mut_ptr(ptr: *mut ffi::wayland::WLProxy) -> Result<Self, &'static str> { return match FromRawPtr::from_mut_ptr(ptr) { Ok(proxy) => Ok(Keyboard { proxy: proxy, }), Err(str) => Err(str), } } } impl AsRawPtr<ffi::wayland::WLProxy> for Keyboard { fn as_mut_ptr(&mut self) -> *mut ffi::wayland::WLProxy { return self.proxy.as_mut_ptr(); } } impl GetInterface for Keyboard { fn get_interface() -> *const ffi::wayland::WLInterface { return &wl_keyboard_interface as *const ffi::wayland::WLInterface; } } #[allow(unused_variables)] extern fn keyboard_event_dispatcher<T: KeyboardEventHandler>( user_data: *mut c_void, _target: *mut c_void, opcode: uint32_t, _message: *const ffi::wayland::WLMessage, arguments: *mut ffi::wayland::WLArgument) -> c_int { let object = user_data as *mut T; return match KeyboardEvent::from_u32(opcode) { Some(event) => { match event { KeyboardEvent::Keymap => { let format = unsafe { *(*arguments.offset(0)).uint() }; let fd = unsafe { *(*arguments.offset(1)).file_descriptor() }; let size = unsafe { *(*arguments.offset(2)).uint() }; unsafe { (*object).on_keymap(format, fd, size); } }, KeyboardEvent::Enter => { let serial = unsafe { *(*arguments.offset(0)).uint() }; let surface = unsafe { *(*arguments.offset(1)).object() }; let keys = unsafe { *(*arguments.offset(2)).array() }; unsafe { (*object).on_enter(serial, surface, keys); } }, KeyboardEvent::Leave => { let serial = unsafe { *(*arguments.offset(0)).uint() }; let surface = unsafe { *(*arguments.offset(1)).object() }; unsafe { (*object).on_leave(serial, surface); } }, KeyboardEvent::Key => { let serial = unsafe { *(*arguments.offset(0)).uint() }; let time = unsafe { *(*arguments.offset(1)).uint() }; let key = unsafe { *(*arguments.offset(2)).uint() }; let state = unsafe { *(*arguments.offset(3)).uint() }; unsafe { (*object).on_key(serial, time, key, state); } }, KeyboardEvent::Modifiers => { let serial = unsafe { *(*arguments.offset(0)).uint() }; let mods_depressed = unsafe { *(*arguments.offset(1)).uint() }; let mods_latched = unsafe { *(*arguments.offset(2)).uint() }; let mods_locked = unsafe { *(*arguments.offset(3)).uint() }; let group = unsafe { *(*arguments.offset(4)).uint() }; unsafe { (*object).on_modifiers(serial, mods_depressed, mods_latched, mods_locked, group); } }, KeyboardEvent::RepeatInfo => { let rate = unsafe { *(*arguments.offset(0)).int() }; let delay = unsafe { *(*arguments.offset(1)).int() }; unsafe { (*object).on_repeat_info(rate, delay); } }, } 0 }, _ => -1, } } pub trait KeyboardEventHandler: Sized { fn connect_dispatcher(&mut self) { unsafe { ffi::wayland::wl_proxy_add_dispatcher( self.get_keyboard().as_mut_ptr(), keyboard_event_dispatcher::<Self>, self as *mut Self as *mut c_void, ptr::null_mut()); } } fn get_keyboard(&mut self) -> &mut Keyboard; #[allow(unused_variables)] fn on_keymap(&mut self, format: u32, fd: RawFd, size: u32) {} #[allow(unused_variables)] fn on_enter(&mut self, serial: u32, surface: *mut ffi::wayland::WLObject, keys: *mut ffi::wayland::WLArray) {} #[allow(unused_variables)] fn on_leave(&mut self, serial: u32, surface: *mut ffi::wayland::WLObject) {} #[allow(unused_variables)] fn on_key(&mut self, serial: u32, time: u32, key: u32, state: u32) {} #[allow(unused_variables)] fn on_modifiers(&mut self, serial: u32, mods_depressed: u32, mods_latched: u32, mods_locked: u32, group: u32) {} #[allow(unused_variables)] fn on_repeat_info(&mut self, rate: i32, delay: i32) {} }
#![allow(unused_imports)] use std::{ptr, mem}; use std::ffi::{CStr, CString}; use std::os::unix::io::RawFd; use libc::{c_void, c_int, uint32_t}; use ffi; use client::protocol::{FromPrimitive, GetInterface}; use client::base::Proxy as BaseProxy; use client::base::{FromRawPtr, AsRawPtr, EventQueue}; #[link(name="wayland-client")] extern { static wl_keyboard_interface: ffi::wayland::WLInterface; } #[repr(C)] #[derive(Debug)] pub enum KeyboardKeymapFormat { NoKeymap = 0, XkbV1 = 1, } impl FromPrimitive for KeyboardKeymapFormat { fn from_u32(num: u32) -> Option<Self> { return match num { 0 => Some(KeyboardKeymapFormat::NoKeymap), 1 => Some(KeyboardKeymapFormat::XkbV1), _ => None } } fn from_i32(num: i32) -> Option<Self> { return Self::from_u32(num as u32); } } pub trait KeyboardKeymapFormatSet { fn has_no_keymap(&self) -> bool; fn has_xkb_v1(&self) -> bool; } impl KeyboardKeymapFormatSet for u32 { fn has_no_keymap(&self) -> bool { return self & (KeyboardKeymapFormat::NoKeymap as u32) != 0; } fn has_xkb_v1(&self) -> bool { return self & (KeyboardKeymapFormat::XkbV1 as u32) != 0; } } impl KeyboardKeymapFormatSet for i32 { fn has_no_keymap(&self) -> bool { return self & (KeyboardKeymapFormat::NoKeymap as i32) != 0; } fn has_xkb_v1(&self) -> bool { return self & (KeyboardKeymapFormat::XkbV1 as i32) != 0; } } #[repr(C)] #[derive(Debug)] pub enum KeyboardKeyState { Released = 0, Pressed = 1, } impl FromPrimitive for KeyboardKeyState { fn from_u32(num: u32) -> Option<Self> { return match num { 0 => Some(KeyboardKeyState::Released), 1 => Some(KeyboardKeyState::Pressed), _ => None } } fn from_i32(num: i32) -> Option<Self> { return Self::from_u32(num as u32); } } pub trait KeyboardKeyStateSet { fn has_released(&self) -> bool; fn has_pressed(&self) -> bool; } impl KeyboardKeyStateSet for u32 { fn has_released(&self) -> bool { return self & (KeyboardKeyState::Released as u32) != 0; } fn has_pressed(&self) -> bool { return self & (KeyboardKeyState::Pressed as u32) != 0; } } impl KeyboardKeyStateSet for i32 { fn has_released(&self) -> bool { return self & (KeyboardKeyState::Released as i32) != 0; } fn has_pressed(&self) -> bool { return self & (KeyboardKeyState::Pressed as i32) != 0; } } #[repr(C)] enum KeyboardEvent { Keymap = 0, Enter = 1, Leave = 2, Key = 3, Modifiers = 4, RepeatInfo = 5, } impl FromPrimitive for KeyboardEvent { fn from_u32(num: u32) -> Option<Self> {
4 => Some(KeyboardEvent::Modifiers), 5 => Some(KeyboardEvent::RepeatInfo), _ => None } } fn from_i32(num: i32) -> Option<Self> { return Self::from_u32(num as u32); } } #[repr(C)] enum KeyboardRequest { Release = 0, _Dummy, } impl FromPrimitive for KeyboardRequest { fn from_u32(num: u32) -> Option<Self> { return match num { 0 => Some(KeyboardRequest::Release), _ => None } } fn from_i32(num: i32) -> Option<Self> { return Self::from_u32(num as u32); } } #[derive(Debug)] pub struct Keyboard { proxy: BaseProxy, } impl Keyboard { pub fn release(mut self) { let proxy = self.as_mut_ptr() as *mut ffi::wayland::WLProxy; unsafe { ffi::wayland::wl_proxy_marshal( proxy, KeyboardRequest::Release as u32 ); } } pub fn get_id(&mut self) -> u32 { return self.proxy.get_id(); } pub fn get_class(&mut self) -> String { return self.proxy.get_class(); } pub fn set_queue(&mut self, queue: Option<&mut EventQueue>) { self.proxy.set_queue(queue); } } impl FromRawPtr<ffi::wayland::WLProxy> for Keyboard { fn from_mut_ptr(ptr: *mut ffi::wayland::WLProxy) -> Result<Self, &'static str> { return match FromRawPtr::from_mut_ptr(ptr) { Ok(proxy) => Ok(Keyboard { proxy: proxy, }), Err(str) => Err(str), } } } impl AsRawPtr<ffi::wayland::WLProxy> for Keyboard { fn as_mut_ptr(&mut self) -> *mut ffi::wayland::WLProxy { return self.proxy.as_mut_ptr(); } } impl GetInterface for Keyboard { fn get_interface() -> *const ffi::wayland::WLInterface { return &wl_keyboard_interface as *const ffi::wayland::WLInterface; } } #[allow(unused_variables)] extern fn keyboard_event_dispatcher<T: KeyboardEventHandler>( user_data: *mut c_void, _target: *mut c_void, opcode: uint32_t, _message: *const ffi::wayland::WLMessage, arguments: *mut ffi::wayland::WLArgument) -> c_int { let object = user_data as *mut T; return match KeyboardEvent::from_u32(opcode) { Some(event) => { match event { KeyboardEvent::Keymap => { let format = unsafe { *(*arguments.offset(0)).uint() }; let fd = unsafe { *(*arguments.offset(1)).file_descriptor() }; let size = unsafe { *(*arguments.offset(2)).uint() }; unsafe { (*object).on_keymap(format, fd, size); } }, KeyboardEvent::Enter => { let serial = unsafe { *(*arguments.offset(0)).uint() }; let surface = unsafe { *(*arguments.offset(1)).object() }; let keys = unsafe { *(*arguments.offset(2)).array() }; unsafe { (*object).on_enter(serial, surface, keys); } }, KeyboardEvent::Leave => { let serial = unsafe { *(*arguments.offset(0)).uint() }; let surface = unsafe { *(*arguments.offset(1)).object() }; unsafe { (*object).on_leave(serial, surface); } }, KeyboardEvent::Key => { let serial = unsafe { *(*arguments.offset(0)).uint() }; let time = unsafe { *(*arguments.offset(1)).uint() }; let key = unsafe { *(*arguments.offset(2)).uint() }; let state = unsafe { *(*arguments.offset(3)).uint() }; unsafe { (*object).on_key(serial, time, key, state); } }, KeyboardEvent::Modifiers => { let serial = unsafe { *(*arguments.offset(0)).uint() }; let mods_depressed = unsafe { *(*arguments.offset(1)).uint() }; let mods_latched = unsafe { *(*arguments.offset(2)).uint() }; let mods_locked = unsafe { *(*arguments.offset(3)).uint() }; let group = unsafe { *(*arguments.offset(4)).uint() }; unsafe { (*object).on_modifiers(serial, mods_depressed, mods_latched, mods_locked, group); } }, KeyboardEvent::RepeatInfo => { let rate = unsafe { *(*arguments.offset(0)).int() }; let delay = unsafe { *(*arguments.offset(1)).int() }; unsafe { (*object).on_repeat_info(rate, delay); } }, } 0 }, _ => -1, } } pub trait KeyboardEventHandler: Sized { fn connect_dispatcher(&mut self) { unsafe { ffi::wayland::wl_proxy_add_dispatcher( self.get_keyboard().as_mut_ptr(), keyboard_event_dispatcher::<Self>, self as *mut Self as *mut c_void, ptr::null_mut()); } } fn get_keyboard(&mut self) -> &mut Keyboard; #[allow(unused_variables)] fn on_keymap(&mut self, format: u32, fd: RawFd, size: u32) {} #[allow(unused_variables)] fn on_enter(&mut self, serial: u32, surface: *mut ffi::wayland::WLObject, keys: *mut ffi::wayland::WLArray) {} #[allow(unused_variables)] fn on_leave(&mut self, serial: u32, surface: *mut ffi::wayland::WLObject) {} #[allow(unused_variables)] fn on_key(&mut self, serial: u32, time: u32, key: u32, state: u32) {} #[allow(unused_variables)] fn on_modifiers(&mut self, serial: u32, mods_depressed: u32, mods_latched: u32, mods_locked: u32, group: u32) {} #[allow(unused_variables)] fn on_repeat_info(&mut self, rate: i32, delay: i32) {} }
return match num { 0 => Some(KeyboardEvent::Keymap), 1 => Some(KeyboardEvent::Enter), 2 => Some(KeyboardEvent::Leave), 3 => Some(KeyboardEvent::Key),
function_block-random_span
[ { "content": "pub trait AsRawPtr<T> {\n\n fn as_mut_ptr(&mut self) -> *mut T;\n\n}\n", "file_path": "src/client/base/mod.rs", "rank": 2, "score": 128661.67143358607 }, { "content": "pub trait FromPrimitive {\n\n fn from_u32(num: u32) -> Option<Self>;\n\n fn from_i32(num: i32) -> Opt...
Rust
src/lib.rs
norse-rs/norse-billow
cfef6391371ee44137a977a44b564d5920fd2a06
/*! Allocator for SoA data layout. `billow` allows to define a [`BlockLayout`](struct.BlockLayout.html) which encodes a SoA data layout. This layout can be used to subdivide user allocated memory blocks in a tight and aligned fashion. ## Struct of Arrays Struct of Arrays (SoA) describes a deinterleaved memory layout of struct fields. Each array has the same number of elements. This layout is usually better suited for SIMD operations, ```ignore +-----+-----+-----+----- | A | A | A | ... +-----+-----+-----+----- +-------+-------+-------+----- | B | B | B | ... +-------+-------+-------+----- +---+---+---+----- | C | C | C | ... +---+---+---+----- ``` ## Examples Allocating an aligned memory block from the system allocator and define a layout for the following struct in SoA layout: ```rust type Transform = [[f32; 4]; 4]; type Velocity = [f32; 3]; struct Block<'a> { transforms: &'a mut [Transform], velocity: &'a mut [Velocity], } ``` ```rust # use norse_billow as billow; # use std::alloc::{self, Layout, LayoutErr}; # use std::ptr::NonNull; # type Transform = [[f32; 4]; 4]; # type Velocity = [f32; 3]; # fn main() -> Result<(), LayoutErr> { const NUM_ELEMENTS: usize = 128; // Define SoA layout. let mut layout = billow::BlockLayout::build(); let transform_id = layout.add::<Transform>(); let velocity_id = layout.add::<Velocity>(); let block_layout = layout.finish(); // Allocate memory block for holding the elements. let layout = block_layout.layout(); let size = layout.size() * NUM_ELEMENTS; let memory = unsafe { alloc::alloc(Layout::from_size_align(size, layout.align())?) }; let block = block_layout.apply(NonNull::new(memory).unwrap(), layout.size() * 128); assert_eq!(block.len(), NUM_ELEMENTS); // Get struct fields. let transforms = unsafe { block.as_slice::<Transform>(transform_id) }; let velocities = unsafe { block.as_slice::<Velocity>(velocity_id) }; assert_eq!(transforms.len(), velocities.len()); # Ok(()) # } ``` */ use indexmap::IndexMap; use std::alloc::Layout; use std::ops::Range; use std::ptr::NonNull; use std::slice; pub type LayoutSlot = usize; pub struct LayoutBuilder { layouts: Vec<(LayoutSlot, Layout)>, max_alignment: usize, element_size: usize, } impl LayoutBuilder { pub fn add<T>(&mut self) -> LayoutSlot { let layout = Layout::new::<T>(); self.max_alignment = self.max_alignment.max(layout.align()); self.element_size += layout.size(); let slot = self.layouts.len(); self.layouts.push((slot, layout)); slot } pub fn finish(mut self) -> BlockLayout { self.layouts .sort_by(|(slot_a, layout_a), (slot_b, layout_b)| { layout_a .align() .cmp(&layout_b.align()) .reverse() .then(slot_a.cmp(slot_b)) }); &self.layouts; let slot_map = self .layouts .iter() .enumerate() .map(|(i, (slot, _))| (*slot, i)) .collect(); &slot_map; let sub_layouts = self.layouts.into_iter().map(|(_, layout)| layout).collect(); let layout = Layout::from_size_align(self.element_size, self.max_alignment).unwrap(); BlockLayout { slot_map, layout, sub_layouts, } } } pub struct BlockLayout { slot_map: IndexMap<LayoutSlot, usize>, layout: Layout, sub_layouts: Vec<Layout>, } impl BlockLayout { pub fn build() -> LayoutBuilder { LayoutBuilder { layouts: Vec::new(), max_alignment: 1, element_size: 0, } } pub fn layout(&self) -> Layout { self.layout } pub fn apply(&self, data: NonNull<u8>, size: usize) -> Block { if self.sub_layouts.is_empty() { return Block { range: 0..0, len: 0, slices: Vec::new(), }; } assert_eq!(self.layout.align() & (self.layout.align() - 1), 0); let ptr = data.as_ptr(); let start = (ptr as usize + self.layout.align() - 1) & !(self.layout.align() - 1); let end = (ptr as usize + size) & !(self.layout.align() - 1); let initial_offset = start - ptr as usize; let size_aligned = end - start; let len = if self.layout.size() == 0 { !0 } else { size_aligned / self.layout.size() }; let mut offset = 0; let mut offsets = Vec::with_capacity(self.sub_layouts.len()); for layout in &self.sub_layouts { assert_eq!(offset % layout.align(), 0); offsets.push(offset); offset += layout.size() * len; } let mut slices = Vec::with_capacity(self.sub_layouts.len()); for slot in self.slot_map.values() { let offset = offsets[*slot]; slices.push(NonNull::new(unsafe { (start as *mut u8).offset(offset as _) }).unwrap()); } Block { range: initial_offset..initial_offset + size_aligned, len, slices, } } } pub struct Block { range: Range<usize>, len: usize, slices: Vec<NonNull<u8>>, } impl Block { pub fn range(&self) -> Range<usize> { self.range.clone() } pub fn len(&self) -> usize { self.len } pub unsafe fn as_raw<T>(&self, slot: LayoutSlot) -> (*mut T, usize) { let slice = &self.slices[slot]; (slice.cast::<T>().as_ptr(), self.len) } pub unsafe fn as_slice<T: Copy>(&self, slot: LayoutSlot) -> &mut [T] { let slice = &self.slices[slot]; slice::from_raw_parts_mut(slice.cast::<T>().as_ptr(), self.len) } } #[cfg(test)] mod test { use super::*; #[test] fn empty() { let layout = BlockLayout::build().finish(); let mut block = [0; 32]; layout.apply(NonNull::new(block.as_mut_ptr()).unwrap(), 32); } #[test] fn single_zst() { struct Foo; let (layout, foo) = { let mut layout = BlockLayout::build(); let foo = layout.add::<Foo>(); (layout.finish(), foo) }; let mut data = [0; 32]; let block = layout.apply(NonNull::new(data.as_mut_ptr()).unwrap(), 32); unsafe { block.as_raw::<Foo>(foo); } } #[test] fn ordering() { #[derive(Copy, Clone)] struct Small { _a: u8, _b: u8, _c: u8, } #[derive(Copy, Clone)] struct Large { _a: f32, _b: [u64; 8], } let (layout, small, large) = { let mut layout = BlockLayout::build(); let small = layout.add::<Small>(); let large = layout.add::<Large>(); (layout.finish(), small, large) }; let mut data = [0; 512]; let block = layout.apply(NonNull::new(data.as_mut_ptr()).unwrap(), 512); let small_layout = Layout::new::<Small>(); let large_layout = Layout::new::<Large>(); assert_eq!( layout.layout().align(), small_layout.align().max(large_layout.align()) ); assert_eq!( layout.layout().size(), small_layout.size() + large_layout.size() ); unsafe { block.as_slice::<Small>(small); block.as_slice::<Large>(large); } } }
/*! Allocator for SoA data layout. `billow` allows to define a [`BlockLayout`](struct.BlockLayout.html) which encodes a SoA data layout. This layout can be used to subdivide user allocated memory blocks in a tight and aligned fashion. ## Struct of Arrays Struct of Arrays (SoA) describes a deinterleaved memory layout of struct fields. Each array has the same number of elements. This layout is usually better suited for SIMD operations, ```ignore +-----+-----+-----+----- | A | A | A | ... +-----+-----+-----+----- +-------+-------+-------+----- | B | B | B | ... +-------+-------+-------+----- +---+---+---+----- | C | C | C | ... +---+---+---+----- ``` ## Examples Allocating an aligned memory block from the system allocator and define a layout for the following struct in SoA layout: ```rust type Transform = [[f32; 4]; 4]; type Velocity = [f32; 3]; struct Block<'a> { transforms: &'a mut [Transform], velocity: &'a mut [Velocity], } ``` ```rust # use norse_billow as billow; # use std::alloc::{self, Layout, LayoutErr}; # use std::ptr::NonNull; # type Transform = [[f32; 4]; 4]; # type Velocity = [f32; 3]; # fn main() -> Result<(), LayoutErr> { const NUM_ELEMENTS: usize = 128; // Define SoA layout. let mut layout = billow::BlockLayout::build(); let transform_id = layout.add::<Transform>(); let velocity_id = layout.add::<Velocity>(); let block_layout = layout.finish(); // Allocate memory block for holding the elements. let layout = block_layout.layout(); let size = layout.size() * NUM_ELEMENTS; let memory = unsafe { alloc::alloc(Layout::from_size_align(size, layout.align())?) }; let block = block_layout.apply(NonNull::new(memory).unwrap(), layout.size() * 128); assert_eq!(block.len(), NUM_ELEMENTS); // Get struct fields. let transforms = unsafe { block.as_slice::<Transform>(transform_id) }; let velocities = unsafe { block.as_slice::<Velocity>(velocity_id) }; assert_eq!(transforms.len(), velocities.len()); # Ok(()) # } ``` */ use indexmap::IndexMap; use std::alloc::Layout; use std::ops::Range; use std::ptr::NonNull; use std::slice; pub type LayoutSlot = usize; pub struct LayoutBuilder { layouts: Vec<(LayoutSlot, Layout)>, max_alignment: usize, element_size: usize, } impl LayoutBuilder { pub fn add<T>(&mut self) -> LayoutSlot { let layout = Layout::new::<T>(); self.max_alignment = self.max_alignment.max(layout.align()); self.element_size += layout.size(); let slot = self.layouts.len(); self.layouts.push((slot, layout)); slot } pub fn finish(mut self) -> BlockLayout { self.layouts .sort_by(|(slot_a, layout_a), (slot_b, layout_b)| { layout_a .align() .cmp(&layout_b.align()) .reverse() .then(slot_a.cmp(slot_b)) }); &self.layouts; let slot_map = self .layouts .iter() .enumerate() .map(|(i, (slot, _))| (*slot, i)) .collect(); &slot_map; let sub_layouts = self.layouts.into_iter().map(|(_, layout)| layout).collect(); let layout = Layout::from_size_align(self.element_size, self.max_alignment).unwrap(); BlockLayout { slot_map, layout, sub_layouts, } } } pub struct BlockLayout { slot_map: IndexMap<LayoutSlot, usize>, layout: Layout, sub_layouts: Vec<Layout>, } impl BlockLayout { pub fn build() -> LayoutBuilder { LayoutBuilder { layouts: Vec::new(), max_alignment: 1, element_size: 0, } } pub fn layout(&self) -> Layout { self.layout } pub fn apply(&self, data: NonNull<u8>, size: usize) -> Block { if self.sub_layouts.is_empty() { return Block { range: 0..0, len: 0, slices: Vec::new(), }; } assert_eq!(self.layout.align() & (self.layout.align() - 1), 0); let ptr = data.as_ptr(); let start = (ptr as usize + self.layout.align() - 1) & !(self.layout.align() - 1); let end = (ptr as usize + size) & !(self.layout.align() - 1); let initial_offset = start - ptr as usize; let size_aligned = end - start; let len = if self.layout.size() == 0 { !0 } else { size_aligned / self.layout.size() }; let mut offset = 0; let mut offsets = Vec::with_capacity(self.sub_layouts.le
} pub struct Block { range: Range<usize>, len: usize, slices: Vec<NonNull<u8>>, } impl Block { pub fn range(&self) -> Range<usize> { self.range.clone() } pub fn len(&self) -> usize { self.len } pub unsafe fn as_raw<T>(&self, slot: LayoutSlot) -> (*mut T, usize) { let slice = &self.slices[slot]; (slice.cast::<T>().as_ptr(), self.len) } pub unsafe fn as_slice<T: Copy>(&self, slot: LayoutSlot) -> &mut [T] { let slice = &self.slices[slot]; slice::from_raw_parts_mut(slice.cast::<T>().as_ptr(), self.len) } } #[cfg(test)] mod test { use super::*; #[test] fn empty() { let layout = BlockLayout::build().finish(); let mut block = [0; 32]; layout.apply(NonNull::new(block.as_mut_ptr()).unwrap(), 32); } #[test] fn single_zst() { struct Foo; let (layout, foo) = { let mut layout = BlockLayout::build(); let foo = layout.add::<Foo>(); (layout.finish(), foo) }; let mut data = [0; 32]; let block = layout.apply(NonNull::new(data.as_mut_ptr()).unwrap(), 32); unsafe { block.as_raw::<Foo>(foo); } } #[test] fn ordering() { #[derive(Copy, Clone)] struct Small { _a: u8, _b: u8, _c: u8, } #[derive(Copy, Clone)] struct Large { _a: f32, _b: [u64; 8], } let (layout, small, large) = { let mut layout = BlockLayout::build(); let small = layout.add::<Small>(); let large = layout.add::<Large>(); (layout.finish(), small, large) }; let mut data = [0; 512]; let block = layout.apply(NonNull::new(data.as_mut_ptr()).unwrap(), 512); let small_layout = Layout::new::<Small>(); let large_layout = Layout::new::<Large>(); assert_eq!( layout.layout().align(), small_layout.align().max(large_layout.align()) ); assert_eq!( layout.layout().size(), small_layout.size() + large_layout.size() ); unsafe { block.as_slice::<Small>(small); block.as_slice::<Large>(large); } } }
n()); for layout in &self.sub_layouts { assert_eq!(offset % layout.align(), 0); offsets.push(offset); offset += layout.size() * len; } let mut slices = Vec::with_capacity(self.sub_layouts.len()); for slot in self.slot_map.values() { let offset = offsets[*slot]; slices.push(NonNull::new(unsafe { (start as *mut u8).offset(offset as _) }).unwrap()); } Block { range: initial_offset..initial_offset + size_aligned, len, slices, } }
function_block-function_prefixed
[ { "content": "\n\n<h1 align=\"center\">billow</h1>\n\n<p align=\"center\">\n\n <a href=\"https://github.com/norse-rs\">\n\n <img src=\"https://img.shields.io/badge/project-norse-9cf.svg?style=flat-square\" alt=\"NORSE\">\n\n </a>\n\n <a href=\"LICENSE-MIT\">\n\n <img src=\"https://img.shield...
Rust
noria-server/dataflow/src/state/mk_key.rs
JustusAdam/noria
093fed9d7fec410a3f1876870ab39455d8056b78
use prelude::*; pub(super) trait MakeKey<A> { fn from_row(key: &[usize], row: &[A]) -> Self; fn from_key(key: &[A]) -> Self; } impl<A: Clone> MakeKey<A> for (A, A) { #[inline(always)] fn from_row(key: &[usize], row: &[A]) -> Self { debug_assert_eq!(key.len(), 2); (row[key[0]].clone(), row[key[1]].clone()) } #[inline(always)] fn from_key(key: &[A]) -> Self { debug_assert_eq!(key.len(), 2); (key[0].clone(), key[1].clone()) } } impl<A: Clone> MakeKey<A> for (A, A, A) { #[inline(always)] fn from_row(key: &[usize], row: &[A]) -> Self { debug_assert_eq!(key.len(), 3); ( row[key[0]].clone(), row[key[1]].clone(), row[key[2]].clone(), ) } #[inline(always)] fn from_key(key: &[A]) -> Self { debug_assert_eq!(key.len(), 3); (key[0].clone(), key[1].clone(), key[2].clone()) } } impl<A: Clone> MakeKey<A> for (A, A, A, A) { #[inline(always)] fn from_row(key: &[usize], row: &[A]) -> Self { debug_assert_eq!(key.len(), 4); ( row[key[0]].clone(), row[key[1]].clone(), row[key[2]].clone(), row[key[3]].clone(), ) } #[inline(always)] fn from_key(key: &[A]) -> Self { debug_assert_eq!(key.len(), 4); ( key[0].clone(), key[1].clone(), key[2].clone(), key[3].clone(), ) } } impl<A: Clone> MakeKey<A> for (A, A, A, A, A) { #[inline(always)] fn from_row(key: &[usize], row: &[A]) -> Self { debug_assert_eq!(key.len(), 5); ( row[key[0]].clone(), row[key[1]].clone(), row[key[2]].clone(), row[key[3]].clone(), row[key[4]].clone(), ) } #[inline(always)] fn from_key(key: &[A]) -> Self { debug_assert_eq!(key.len(), 5); ( key[0].clone(), key[1].clone(), key[2].clone(), key[3].clone(), key[4].clone(), ) } } impl<A: Clone> MakeKey<A> for (A, A, A, A, A, A) { #[inline(always)] fn from_row(key: &[usize], row: &[A]) -> Self { debug_assert_eq!(key.len(), 6); ( row[key[0]].clone(), row[key[1]].clone(), row[key[2]].clone(), row[key[3]].clone(), row[key[4]].clone(), row[key[5]].clone(), ) } #[inline(always)] fn from_key(key: &[A]) -> Self { debug_assert_eq!(key.len(), 6); ( key[0].clone(), key[1].clone(), key[2].clone(), key[3].clone(), key[4].clone(), key[5].clone(), ) } } #[inline(always)] pub fn key_type_from_row<'a>(key: &[usize], row: &'a[DataType]) -> KeyType<'a> { match key.len() { 1 => KeyType::Single(&row[0]), 2 => KeyType::Double(MakeKey::from_row(key, row)), 3 => KeyType::Tri(MakeKey::from_row(key, row)), 4 => KeyType::Quad(MakeKey::from_row(key, row)), 5 => KeyType::Quin(MakeKey::from_row(key, row)), 6 => KeyType::Sex(MakeKey::from_row(key, row)), s => panic!("No state key implemented for keys of size {}", s), } } #[inline(always)] pub fn key_type_from_key<'a>(key: &'a [DataType]) -> KeyType<'a> { match key.len() { 1 => KeyType::Single(&key[0]), 2 => KeyType::Double(MakeKey::from_key(key)), 3 => KeyType::Tri(MakeKey::from_key(key)), 4 => KeyType::Quad(MakeKey::from_key(key)), 5 => KeyType::Quin(MakeKey::from_key(key)), 6 => KeyType::Sex(MakeKey::from_key(key)), s => panic!("No state key implemented for keys of size {}", s), } }
use prelude::*; pub(super) trait MakeKey<A> { fn from_row(key: &[usize], row: &[A]) -> Self; fn from_key(key: &[A]) -> Self; } impl<A: Clone> MakeKey<A> for (A, A) { #[inline(always)] fn from_row(key: &[usize], row: &[A]) -> Self { debug_assert_eq!(key.len(), 2); (row[key[0]].clone(), row[key[1]].clone()) } #[inline(always)] fn from_key(key: &[A]) -> Self { debug_assert_eq!(key.len(), 2); (key[0].clone(), key[1].clone()) } } impl<A: Clone> MakeKey<A> for (A, A, A) { #[inline(always)] fn from_row(key: &[usize], row: &[A]) -> Self { debug_assert_eq!(key.len(), 3); ( row[key[0]].clone(), row[key[1]].clone(), row[key[2]].clone(), ) } #[inline(always)] fn from_key(key: &[A]) -> Self { debug_assert_eq!(key.len(), 3); (key[0].clone(), key[1].clone(), key[2].clone()) } } impl<A: Clone> MakeKey<A> for (A, A, A, A) { #[inline(always)] fn from_row(key: &[usize], row: &[A]) -> Self { debug_assert_eq!(key.len(), 4); ( row[key[0]].clone(), row[key[1]].clone(), row[key[2]].clone(), row[key[3]].clone(), ) } #[inline(always)] fn from_key(key: &[A]) -> Self { debug_assert_eq!(key.len(), 4); ( key[0].clone(), key[1].clone(), key[2].clone(), key[3].clone(), ) } } impl<A: Clone> MakeKey<A> for (A, A, A, A, A) { #[inline(always)] fn from_row(key: &[usize], row: &[A]) -> Self { debug_assert_eq!(key.len(), 5); ( row[key[0]].clone(), row[key[1]].clone(), row[key[2]].clone(), row[key[3]].clone(), row[key[4]].clone(), ) } #[inline(always)]
} impl<A: Clone> MakeKey<A> for (A, A, A, A, A, A) { #[inline(always)] fn from_row(key: &[usize], row: &[A]) -> Self { debug_assert_eq!(key.len(), 6); ( row[key[0]].clone(), row[key[1]].clone(), row[key[2]].clone(), row[key[3]].clone(), row[key[4]].clone(), row[key[5]].clone(), ) } #[inline(always)] fn from_key(key: &[A]) -> Self { debug_assert_eq!(key.len(), 6); ( key[0].clone(), key[1].clone(), key[2].clone(), key[3].clone(), key[4].clone(), key[5].clone(), ) } } #[inline(always)] pub fn key_type_from_row<'a>(key: &[usize], row: &'a[DataType]) -> KeyType<'a> { match key.len() { 1 => KeyType::Single(&row[0]), 2 => KeyType::Double(MakeKey::from_row(key, row)), 3 => KeyType::Tri(MakeKey::from_row(key, row)), 4 => KeyType::Quad(MakeKey::from_row(key, row)), 5 => KeyType::Quin(MakeKey::from_row(key, row)), 6 => KeyType::Sex(MakeKey::from_row(key, row)), s => panic!("No state key implemented for keys of size {}", s), } } #[inline(always)] pub fn key_type_from_key<'a>(key: &'a [DataType]) -> KeyType<'a> { match key.len() { 1 => KeyType::Single(&key[0]), 2 => KeyType::Double(MakeKey::from_key(key)), 3 => KeyType::Tri(MakeKey::from_key(key)), 4 => KeyType::Quad(MakeKey::from_key(key)), 5 => KeyType::Quin(MakeKey::from_key(key)), 6 => KeyType::Sex(MakeKey::from_key(key)), s => panic!("No state key implemented for keys of size {}", s), } }
fn from_key(key: &[A]) -> Self { debug_assert_eq!(key.len(), 5); ( key[0].clone(), key[1].clone(), key[2].clone(), key[3].clone(), key[4].clone(), ) }
function_block-full_function
[ { "content": "/// Trait for implementing operations that collapse a group of records into a single record.\n\n///\n\n/// Implementors of this trait can be used as nodes in a `flow::FlowGraph` by wrapping them in a\n\n/// `GroupedOperator`.\n\n///\n\n/// At a high level, the operator is expected to work in the f...
Rust
src/config.rs
pckilgore/syngesture
fe30480f7567e802713c0f85c15f6b60a72e4d9e
use crate::events::*; use serde::Deserialize; use std::collections::BTreeMap; use std::ffi::OsStr; use std::path::{Path, PathBuf}; const PREFIX: Option<&'static str> = option_env!("PREFIX"); pub(crate) type Device = String; pub(crate) type GestureMap = BTreeMap<Gesture, Action>; type BoxedError = Box<dyn std::error::Error + Send + Sync>; type Result<T> = std::result::Result<T, BoxedError>; pub(crate) struct Configuration { pub devices: BTreeMap<Device, GestureMap>, } impl Configuration { pub fn new() -> Self { Self { devices: Default::default(), } } } #[derive(Deserialize)] #[serde(rename_all = "lowercase")] pub(crate) enum Action { #[serde(skip)] None, Execute(String), } impl Default for Action { fn default() -> Self { Action::None } } pub(crate) fn load() -> Configuration { let mut config = Configuration::new(); let prefix = PathBuf::from(PREFIX.unwrap_or("/usr/local")); let global_config = prefix.join("etc/syngestures.toml"); if global_config.exists() { try_load_config_file(&mut config, &global_config); } let global_config_dir = prefix.join("etc/syngestures.d"); try_load_config_dir(&mut config, &global_config_dir); load_user_config(&mut config); if config.devices.is_empty() { eprintln!("No configuration found!"); eprintln!("Searched for configuration files in the following locations:"); eprintln!("* {}/etc/syngestures.toml", global_config_dir.display()); eprintln!("* {}/etc/syngestures.d/*.toml", global_config_dir.display()); eprintln!("* $XDG_HOME/syngestures.toml"); eprintln!("* $XDG_HOME/syngestures.d/*.toml"); eprintln!("* $HOME/.config/syngestures.toml"); eprintln!("* $HOME/.config/syngestures.d/*.toml"); } config } fn try_load_config_file(config: &mut Configuration, path: &Path) { if let Err(e) = load_config_file(config, &path) { eprintln!( "Error loading configuration file at {}: {}", path.display(), e ); } } fn try_load_config_dir(config: &mut Configuration, dir: &Path) { if let Err(e) = load_config_dir(config, &dir) { eprintln!( "Error reading from configuration directory {}: {}", dir.display(), e ); } } fn load_user_config(mut config: &mut Configuration) { use std::env::VarError; let config_home = match std::env::var("XDG_CONFIG_HOME") { Ok(xdg_config_home) => PathBuf::from(xdg_config_home), Err(VarError::NotPresent) => match get_user_config_dir() { Ok(dir) => PathBuf::from(dir), Err(e) => { eprintln!("{}", e); return; } }, Err(VarError::NotUnicode(_)) => { eprintln!("Invalid XDG_CONFIG_HOME"); return; } }; let user_config_file = config_home.join("syngestures.toml"); if user_config_file.exists() { try_load_config_file(&mut config, &user_config_file); } let user_config_dir = config_home.join("syngestures.d"); try_load_config_dir(&mut config, &user_config_dir); } fn get_user_config_dir() -> Result<PathBuf> { #[allow(deprecated)] let home = std::env::home_dir(); if home.is_none() || home.as_ref().unwrap() == &PathBuf::new() { return Err("Could not determine user home directory!".into()); } let config_home = home.unwrap().join(".config/"); Ok(config_home) } fn load_config_dir(mut config: &mut Configuration, dir: &Path) -> Result<()> { use std::fs::DirEntry; if !dir.exists() || !dir.is_dir() { return Ok(()); } let toml = OsStr::new("toml"); for item in dir.read_dir()? { let item = match item { Ok(item) => item, Err(e) => { eprintln!( "Error reading file from configuration directory {}: {}", dir.display(), e ); continue; } }; let mut process_item = |item: &DirEntry| -> Result<()> { if item.file_type()?.is_dir() { return Ok(()); } let item = item.path(); if item.extension() != Some(toml) { return Ok(()); } try_load_config_file(&mut config, &item); Ok(()) }; if let Err(e) = process_item(&item) { eprintln!("Error loading {}: {}", item.path().to_string_lossy(), e); } } Ok(()) } fn load_config_file(config: &mut Configuration, path: &Path) -> Result<()> { #[derive(Deserialize)] struct ConfigGestureAndAction { #[serde(flatten)] pub gesture: Gesture, #[serde(flatten)] pub action: Action, } #[derive(Deserialize)] struct ConfigDeviceGestures { pub device: Device, pub gestures: Vec<ConfigGestureAndAction>, } #[derive(Deserialize)] struct ConfigFile { #[serde(alias = "device")] pub devices: Vec<ConfigDeviceGestures>, } let bytes = std::fs::read(path)?; let config_file: ConfigFile = toml::from_slice(&bytes)?; for device_config in config_file.devices { let device = device_config.device; let device_gestures = config.devices.entry(device).or_default(); for gesture_action in device_config.gestures { device_gestures.insert(gesture_action.gesture, gesture_action.action); } } Ok(()) }
use crate::events::*; use serde::Deserialize; use std::collections::BTreeMap; use std::ffi::OsStr; use std::path::{Path, PathBuf}; const PREFIX: Option<&'static str> = option_env!("PREFIX"); pub(crate) type Device = String; pub(crate) type GestureMap = BTreeMap<Gesture, Action>; type BoxedError = Box<dyn std::error::Error + Send + Sync>; type Result<T> = std::result::Result<T, BoxedError>; pub(crate) struct Configuration { pub devices: BTreeMap<Device, GestureMap>, } impl Configuration { pub fn new() -> Self { Self { devices: Default::default(), } } } #[derive(Deserialize)] #[serde(rename_all = "lowercase")] pub(crate) enum Action { #[serde(skip)] None, Execute(String), } impl Default for Action { fn default() -> Self { Action::None } } pub(crate) fn load() -> Configuration { let mut config = Configuration::new(); let prefix = PathBuf::from(PREFIX.unwrap_or("/usr/local")); let global_config = prefix.join("etc/syngestures.toml"); if global_config.exists() { try_load_config_file(&mut config, &global_config); } let global_config_dir = prefix.join("etc/syngestures.d"); try_load_config_dir(&mut config, &global_config_dir); load_user_config(&mut config); if config.devices.is_empty() { eprintln!("No configuration found!"); eprintln!("Searched for configuration files in the following locations:"); eprintln!("* {}/etc/syngestures.toml", global_config_dir.display()); eprintln!("* {}/etc/syngestures.d/*.toml", global_config_dir.display()); eprintln!("* $XDG_HOME/syngestures.toml"); eprintln!("* $XDG_HOME/syngestures.d/*.toml"); eprintln!("* $HOME/.config/syngestures.toml"); eprintln!("* $HOME/.config/syngestures.d/*.toml"); } config } fn try_load_config_file(config: &mut Configuration, pat
fn try_load_config_dir(config: &mut Configuration, dir: &Path) { if let Err(e) = load_config_dir(config, &dir) { eprintln!( "Error reading from configuration directory {}: {}", dir.display(), e ); } } fn load_user_config(mut config: &mut Configuration) { use std::env::VarError; let config_home = match std::env::var("XDG_CONFIG_HOME") { Ok(xdg_config_home) => PathBuf::from(xdg_config_home), Err(VarError::NotPresent) => match get_user_config_dir() { Ok(dir) => PathBuf::from(dir), Err(e) => { eprintln!("{}", e); return; } }, Err(VarError::NotUnicode(_)) => { eprintln!("Invalid XDG_CONFIG_HOME"); return; } }; let user_config_file = config_home.join("syngestures.toml"); if user_config_file.exists() { try_load_config_file(&mut config, &user_config_file); } let user_config_dir = config_home.join("syngestures.d"); try_load_config_dir(&mut config, &user_config_dir); } fn get_user_config_dir() -> Result<PathBuf> { #[allow(deprecated)] let home = std::env::home_dir(); if home.is_none() || home.as_ref().unwrap() == &PathBuf::new() { return Err("Could not determine user home directory!".into()); } let config_home = home.unwrap().join(".config/"); Ok(config_home) } fn load_config_dir(mut config: &mut Configuration, dir: &Path) -> Result<()> { use std::fs::DirEntry; if !dir.exists() || !dir.is_dir() { return Ok(()); } let toml = OsStr::new("toml"); for item in dir.read_dir()? { let item = match item { Ok(item) => item, Err(e) => { eprintln!( "Error reading file from configuration directory {}: {}", dir.display(), e ); continue; } }; let mut process_item = |item: &DirEntry| -> Result<()> { if item.file_type()?.is_dir() { return Ok(()); } let item = item.path(); if item.extension() != Some(toml) { return Ok(()); } try_load_config_file(&mut config, &item); Ok(()) }; if let Err(e) = process_item(&item) { eprintln!("Error loading {}: {}", item.path().to_string_lossy(), e); } } Ok(()) } fn load_config_file(config: &mut Configuration, path: &Path) -> Result<()> { #[derive(Deserialize)] struct ConfigGestureAndAction { #[serde(flatten)] pub gesture: Gesture, #[serde(flatten)] pub action: Action, } #[derive(Deserialize)] struct ConfigDeviceGestures { pub device: Device, pub gestures: Vec<ConfigGestureAndAction>, } #[derive(Deserialize)] struct ConfigFile { #[serde(alias = "device")] pub devices: Vec<ConfigDeviceGestures>, } let bytes = std::fs::read(path)?; let config_file: ConfigFile = toml::from_slice(&bytes)?; for device_config in config_file.devices { let device = device_config.device; let device_gestures = config.devices.entry(device).or_default(); for gesture_action in device_config.gestures { device_gestures.insert(gesture_action.gesture, gesture_action.action); } } Ok(()) }
h: &Path) { if let Err(e) = load_config_file(config, &path) { eprintln!( "Error loading configuration file at {}: {}", path.display(), e ); } }
function_block-function_prefixed
[ { "content": "fn which(target: &str) -> Option<String> {\n\n let mut cmd = Command::new(\"which\");\n\n cmd.stdout(Stdio::piped());\n\n cmd.stderr(Stdio::null());\n\n cmd.args(&[target]);\n\n let output = match cmd.output() {\n\n Err(_) => {\n\n warn!(\"Failed to find/execute `w...
Rust
src/monadio.rs
TeaEntityLab/fpRust
5381203f823c3b0d0080d7070022379bbd525c02
/*! In this module there're implementations & tests of `MonadIO`. It's inspired by `Rx` & `MonadIO` in `Haskell` */ use std::sync::{Arc, Mutex}; #[cfg(feature = "for_futures")] use super::common::shared_thread_pool; #[cfg(feature = "for_futures")] use crate::futures::task::SpawnExt; #[cfg(feature = "for_futures")] use std::error::Error; use super::handler::Handler; use super::sync::CountDownLatch; use super::common::{RawFunc, Subscription, SubscriptionFunc}; /** `MonadIO` implements basic `Rx`/`MonadIO` APIs. The `observe` and `subscribe` actions could be sync/async, and `observe` & `subscribe` could be on other `thread`s (by setting up `observe_on` and `subscribe_on`). # Arguments * `Y` - The generic type of data # Remarks It's inspired by `Rx` & `MonadIO` in `Haskell` , and easily run it on sync/async scenaios. `` */ #[derive(Clone)] pub struct MonadIO<Y> { effect: Arc<Mutex<dyn FnMut() -> Y + Send + Sync + 'static>>, ob_handler: Option<Arc<Mutex<dyn Handler>>>, sub_handler: Option<Arc<Mutex<dyn Handler>>>, } pub fn of<Z: 'static + Send + Sync + Clone>(r: Z) -> impl FnMut() -> Z + Send + Sync + 'static { let _r = Box::new(r); move || *_r.clone() } impl<Y: 'static + Send + Sync + Clone> From<Y> for MonadIO<Y> { fn from(r: Y) -> Self { MonadIO::just(r) } } impl<Y: 'static + Send + Sync + Clone> MonadIO<Y> { pub fn just(r: Y) -> MonadIO<Y> { MonadIO::new(of(r)) } #[cfg(feature = "for_futures")] pub async fn to_future(&self) -> Result<Arc<Y>, Box<dyn Error>> { let mio = self.clone(); let future = { shared_thread_pool() .inner .lock() .unwrap() .spawn_with_handle(async move { mio.eval() })? }; let result = future.await; Ok(result) } } impl<Y: 'static + Send + Sync> MonadIO<Y> { pub fn new(effect: impl FnMut() -> Y + Send + Sync + 'static) -> MonadIO<Y> { MonadIO::new_with_handlers(effect, None, None) } pub fn new_with_handlers( effect: impl FnMut() -> Y + Send + Sync + 'static, ob: Option<Arc<Mutex<dyn Handler + 'static>>>, sub: Option<Arc<Mutex<dyn Handler + 'static>>>, ) -> MonadIO<Y> { MonadIO { effect: Arc::new(Mutex::new(effect)), ob_handler: ob, sub_handler: sub, } } pub fn observe_on(&mut self, h: Option<Arc<Mutex<dyn Handler + 'static>>>) { self.ob_handler = h; } pub fn subscribe_on(&mut self, h: Option<Arc<Mutex<dyn Handler + 'static>>>) { self.sub_handler = h; } pub fn map<Z: 'static + Send + Sync + Clone>( &self, func: impl FnMut(Y) -> Z + Send + Sync + 'static, ) -> MonadIO<Z> { let _func = Arc::new(Mutex::new(func)); let mut _effect = self.effect.clone(); MonadIO::new_with_handlers( move || (_func.lock().unwrap())((_effect.lock().unwrap())()), self.ob_handler.clone(), self.sub_handler.clone(), ) } pub fn fmap<Z: 'static + Send + Sync + Clone>( &self, func: impl FnMut(Y) -> MonadIO<Z> + Send + Sync + 'static, ) -> MonadIO<Z> { let mut _func = Arc::new(Mutex::new(func)); self.map(move |y: Y| ((_func.lock().unwrap())(y).effect.lock().unwrap())()) } pub fn subscribe(&self, s: Arc<impl Subscription<Y>>) { let mut _effect = self.effect.clone(); match &self.ob_handler { Some(ob_handler) => { let mut sub_handler_thread = Arc::new(self.sub_handler.clone()); ob_handler.lock().unwrap().post(RawFunc::new(move || { match Arc::make_mut(&mut sub_handler_thread) { Some(ref mut sub_handler) => { let effect = _effect.clone(); let s = s.clone(); sub_handler.lock().unwrap().post(RawFunc::new(move || { let result = { Arc::new(effect.lock().unwrap()()) }; s.on_next(result); })); } None => { s.on_next(Arc::new(_effect.lock().unwrap()())); } } })); } None => { s.on_next(Arc::new(_effect.lock().unwrap()())); } } } pub fn subscribe_fn(&self, func: impl FnMut(Arc<Y>) + Send + Sync + 'static) { self.subscribe(Arc::new(SubscriptionFunc::new(func))) } pub fn eval(&self) -> Arc<Y> { let latch = CountDownLatch::new(1); let latch_thread = latch.clone(); let result = Arc::new(Mutex::new(None::<Arc<Y>>)); let result_thread = result.clone(); self.subscribe_fn(move |y| { result_thread.lock().unwrap().replace(y); latch_thread.countdown(); }); latch.wait(); let result = result.lock().as_mut().unwrap().to_owned(); result.unwrap() } } #[cfg(feature = "for_futures")] #[futures_test::test] async fn test_monadio_async() { assert_eq!(Arc::new(3), MonadIO::just(3).eval()); assert_eq!( Arc::new(3), MonadIO::just(3).to_future().await.ok().unwrap() ); assert_eq!( Arc::new(6), MonadIO::just(3) .map(|i| i * 2) .to_future() .await .ok() .unwrap() ); } #[test] fn test_monadio_new() { use super::common::SubscriptionFunc; use super::handler::HandlerThread; use std::sync::Arc; use std::{thread, time}; use super::sync::CountDownLatch; let monadio_simple = MonadIO::just(3); { assert_eq!(3, (monadio_simple.effect.lock().unwrap())()); } let monadio_simple_map = monadio_simple.map(|x| x * 3); monadio_simple_map.subscribe_fn(move |x| { println!("monadio_simple_map {:?}", x); assert_eq!(9, *Arc::make_mut(&mut x.clone())); }); let mut _subscription = Arc::new(SubscriptionFunc::new(move |x: Arc<u16>| { println!("monadio_sync {:?}", x); assert_eq!(36, *Arc::make_mut(&mut x.clone())); })); let subscription = _subscription.clone(); let monadio_sync = MonadIO::just(1) .fmap(|x| MonadIO::new(move || x * 4)) .map(|x| x * 3) .map(|x| x * 3); monadio_sync.subscribe(subscription); let mut _handler_observe_on = HandlerThread::new_with_mutex(); let mut _handler_subscribe_on = HandlerThread::new_with_mutex(); let monadio_async = MonadIO::new_with_handlers( || { println!("In string"); String::from("ok") }, Some(_handler_observe_on.clone()), Some(_handler_subscribe_on.clone()), ); let latch = CountDownLatch::new(1); let latch2 = latch.clone(); thread::sleep(time::Duration::from_millis(1)); let subscription = Arc::new(SubscriptionFunc::new(move |x: Arc<String>| { println!("monadio_async {:?}", x); latch2.countdown(); })); monadio_async.subscribe(subscription); monadio_async.subscribe(Arc::new(SubscriptionFunc::new(move |x: Arc<String>| { println!("monadio_async sub2 {:?}", x); }))); { let mut handler_observe_on = _handler_observe_on.lock().unwrap(); let mut handler_subscribe_on = _handler_subscribe_on.lock().unwrap(); println!("hh2"); handler_observe_on.start(); handler_subscribe_on.start(); println!("hh2 running"); handler_observe_on.post(RawFunc::new(move || {})); handler_observe_on.post(RawFunc::new(move || {})); handler_observe_on.post(RawFunc::new(move || {})); handler_observe_on.post(RawFunc::new(move || {})); handler_observe_on.post(RawFunc::new(move || {})); } thread::sleep(time::Duration::from_millis(1)); latch.clone().wait(); }
/*! In this module there're implementations & tests of `MonadIO`. It's inspired by `Rx` & `MonadIO` in `Haskell` */ use std::sync::{Arc, Mutex}; #[cfg(feature = "for_futures")] use super::common::shared_thread_pool; #[cfg(feature = "for_futures")] use crate::futures::task::SpawnExt; #[cfg(feature = "for_futures")] use std::error::Error; use super::handler::Handler; use super::sync::CountDownLatch; use super::common::{RawFunc, Subscription, SubscriptionFunc}; /** `MonadIO` implements basic `Rx`/`MonadIO` APIs. The `observe` and `subscribe` actions could be sync/async, and `observe` & `subscribe` could be on other `thread`s (by setting up `observe_on` and `subscribe_on`). # Arguments * `Y` - The generic type of data # Remarks It's inspired by `Rx` & `MonadIO` in `Haskell` , and easily run it on sync/async scenaios. `` */ #[derive(Clone)] pub struct MonadIO<Y> { effect: Arc<Mutex<dyn FnMut() -> Y + Send + Sync + 'static>>, ob_handler: Option<Arc<Mutex<dyn Handler>>>, sub_handler: Option<Arc<Mutex<dyn Handler>>>, } pub fn of<Z: 'static + Send + Sync + Clone>(r: Z) -> impl FnMut() -> Z + Send + Sync + 'static { let _r = Box::new(r); move || *_r.clone() } impl<Y: 'static + Send + Sync + Clone> From<Y> for MonadIO<Y> { fn from(r: Y) -> Self { MonadIO::just(r) } } impl<Y: 'static + Send + Sync + Clone> MonadIO<Y> { pub fn just(r: Y) -> MonadIO<Y> { MonadIO::new(of(r)) } #[cfg(feature = "for_futures")] pub async fn to_future(&self) -> Result<Arc<Y>, Box<dyn Error>> { let mio = self.clone(); let future = { shared_thread_pool() .inner .lock() .unwrap() .spawn_with_handle(async move { mio.eval() })? }; let result = future.await; Ok(result) } } impl<Y: 'static + Send + Sync> MonadIO<Y> { pub fn new(effect: impl FnMut() -> Y + Send + Sync + 'static) -> MonadIO<Y> { MonadIO::new_with_handlers(effect, None, None) } pub fn new_with_handlers( effect: impl FnMut() -> Y + Send + Sync + 'static, ob: Option<Arc<Mutex<dyn Handler + 'static>>>, sub: Option<Arc<Mutex<dyn Handler + 'static>>>, ) -> MonadIO<Y> { MonadIO { effect: Arc::new(Mutex::new(effect)), ob_handler: ob, sub_handler: sub, } } pub fn observe_on(&mut self, h: Option<Arc<Mutex<dyn Handler + 'static>>>) { self.ob_handler = h; } pub fn subscribe_on(&mut self, h: Option<Arc<Mutex<dyn Handler + 'static>>>) { self.sub_handler = h; }
pub fn fmap<Z: 'static + Send + Sync + Clone>( &self, func: impl FnMut(Y) -> MonadIO<Z> + Send + Sync + 'static, ) -> MonadIO<Z> { let mut _func = Arc::new(Mutex::new(func)); self.map(move |y: Y| ((_func.lock().unwrap())(y).effect.lock().unwrap())()) } pub fn subscribe(&self, s: Arc<impl Subscription<Y>>) { let mut _effect = self.effect.clone(); match &self.ob_handler { Some(ob_handler) => { let mut sub_handler_thread = Arc::new(self.sub_handler.clone()); ob_handler.lock().unwrap().post(RawFunc::new(move || { match Arc::make_mut(&mut sub_handler_thread) { Some(ref mut sub_handler) => { let effect = _effect.clone(); let s = s.clone(); sub_handler.lock().unwrap().post(RawFunc::new(move || { let result = { Arc::new(effect.lock().unwrap()()) }; s.on_next(result); })); } None => { s.on_next(Arc::new(_effect.lock().unwrap()())); } } })); } None => { s.on_next(Arc::new(_effect.lock().unwrap()())); } } } pub fn subscribe_fn(&self, func: impl FnMut(Arc<Y>) + Send + Sync + 'static) { self.subscribe(Arc::new(SubscriptionFunc::new(func))) } pub fn eval(&self) -> Arc<Y> { let latch = CountDownLatch::new(1); let latch_thread = latch.clone(); let result = Arc::new(Mutex::new(None::<Arc<Y>>)); let result_thread = result.clone(); self.subscribe_fn(move |y| { result_thread.lock().unwrap().replace(y); latch_thread.countdown(); }); latch.wait(); let result = result.lock().as_mut().unwrap().to_owned(); result.unwrap() } } #[cfg(feature = "for_futures")] #[futures_test::test] async fn test_monadio_async() { assert_eq!(Arc::new(3), MonadIO::just(3).eval()); assert_eq!( Arc::new(3), MonadIO::just(3).to_future().await.ok().unwrap() ); assert_eq!( Arc::new(6), MonadIO::just(3) .map(|i| i * 2) .to_future() .await .ok() .unwrap() ); } #[test] fn test_monadio_new() { use super::common::SubscriptionFunc; use super::handler::HandlerThread; use std::sync::Arc; use std::{thread, time}; use super::sync::CountDownLatch; let monadio_simple = MonadIO::just(3); { assert_eq!(3, (monadio_simple.effect.lock().unwrap())()); } let monadio_simple_map = monadio_simple.map(|x| x * 3); monadio_simple_map.subscribe_fn(move |x| { println!("monadio_simple_map {:?}", x); assert_eq!(9, *Arc::make_mut(&mut x.clone())); }); let mut _subscription = Arc::new(SubscriptionFunc::new(move |x: Arc<u16>| { println!("monadio_sync {:?}", x); assert_eq!(36, *Arc::make_mut(&mut x.clone())); })); let subscription = _subscription.clone(); let monadio_sync = MonadIO::just(1) .fmap(|x| MonadIO::new(move || x * 4)) .map(|x| x * 3) .map(|x| x * 3); monadio_sync.subscribe(subscription); let mut _handler_observe_on = HandlerThread::new_with_mutex(); let mut _handler_subscribe_on = HandlerThread::new_with_mutex(); let monadio_async = MonadIO::new_with_handlers( || { println!("In string"); String::from("ok") }, Some(_handler_observe_on.clone()), Some(_handler_subscribe_on.clone()), ); let latch = CountDownLatch::new(1); let latch2 = latch.clone(); thread::sleep(time::Duration::from_millis(1)); let subscription = Arc::new(SubscriptionFunc::new(move |x: Arc<String>| { println!("monadio_async {:?}", x); latch2.countdown(); })); monadio_async.subscribe(subscription); monadio_async.subscribe(Arc::new(SubscriptionFunc::new(move |x: Arc<String>| { println!("monadio_async sub2 {:?}", x); }))); { let mut handler_observe_on = _handler_observe_on.lock().unwrap(); let mut handler_subscribe_on = _handler_subscribe_on.lock().unwrap(); println!("hh2"); handler_observe_on.start(); handler_subscribe_on.start(); println!("hh2 running"); handler_observe_on.post(RawFunc::new(move || {})); handler_observe_on.post(RawFunc::new(move || {})); handler_observe_on.post(RawFunc::new(move || {})); handler_observe_on.post(RawFunc::new(move || {})); handler_observe_on.post(RawFunc::new(move || {})); } thread::sleep(time::Duration::from_millis(1)); latch.clone().wait(); }
pub fn map<Z: 'static + Send + Sync + Clone>( &self, func: impl FnMut(Y) -> Z + Send + Sync + 'static, ) -> MonadIO<Z> { let _func = Arc::new(Mutex::new(func)); let mut _effect = self.effect.clone(); MonadIO::new_with_handlers( move || (_func.lock().unwrap())((_effect.lock().unwrap())()), self.ob_handler.clone(), self.sub_handler.clone(), ) }
function_block-full_function
[ { "content": "pub trait Handler: Send + Sync + 'static {\n\n /**\n\n Did this `Handler` start?\n\n Return `true` when it did started (no matter it has stopped or not)\n\n\n\n */\n\n fn is_started(&mut self) -> bool;\n\n\n\n /**\n\n Is this `Handler` alive?\n\n Return `true` when it has s...
Rust
xv7-kernel/src/memory/buddy.rs
imtsuki/xv7
edab461a6a7ceab2236e24ca726598107d346467
use crate::config::*; use crate::pretty::Pretty; use bitvec::prelude::*; use boot::PhysAddr; use core::mem; use core::ptr; use lazy_static::lazy_static; use spin::Mutex; pub use x86_64::structures::paging::{FrameAllocator, FrameDeallocator}; use x86_64::structures::paging::{PageSize, PhysFrame, Size4KiB}; pub struct BuddyFrameAllocator<'zone> { zone: &'zone mut BuddyZone, frames: &'zone mut [BuddyFrame; MAX_FRAMES_SUPPORTED], } const MAX_ORDER: u8 = 11; macro_rules! align_to_upper { ($val: expr, $align:expr) => { (($val + $align - 1) / $align * $align) }; } macro_rules! align_to_lower { ($val: expr, $align:expr) => { ($val / $align * $align) }; } macro_rules! get_buddy { ($frame_index: expr, $order:expr) => { if ($frame_index) & (1 << ($order + 1) - 1) == 0 { $frame_index + (1 << $order) } else { $frame_index - (1 << $order) } }; } #[repr(u8)] #[derive(Copy, Clone, PartialEq)] enum BuddyFrameStatus { UNCHECKED = 0, USED = 1, NOTUSED = 2, } #[derive(Copy, Clone)] struct BuddyFrame { next: *mut BuddyFrame, use_status: BuddyFrameStatus, order: u8, } #[derive(Copy, Clone)] struct BuddyFreeArea { head: *mut BuddyFrame, length: usize, } #[derive(Copy, Clone)] struct BuddyZone { head: *mut BuddyFrame, free_area: [BuddyFreeArea; MAX_ORDER as usize], } impl BuddyFreeArea { unsafe fn drop_frame(&mut self, frame: *mut BuddyFrame) -> *mut BuddyFrame { let mut next = self.head; let mut pre = ptr::null_mut(); while !next.is_null() && next != frame { pre = next; next = (*next).next; } if next.is_null() { return ptr::null_mut(); } self.length -= 1; let next = (*frame).next; if !pre.is_null() { (*pre).next = next; } else { self.head = next; } (*frame).next = ptr::null_mut(); (*frame).use_status = BuddyFrameStatus::NOTUSED; return frame; } unsafe fn push_frame(&mut self, frame: *mut BuddyFrame) { self.length += 1; (*frame).next = self.head; self.head = frame; (*frame).use_status = BuddyFrameStatus::NOTUSED; } unsafe fn pop_frame(&mut self) -> *mut BuddyFrame { if self.length == 0 { return ptr::null_mut(); } self.length -= 1; let head = self.head; self.head = (*head).next; (*head).next = ptr::null_mut(); (*head).use_status = BuddyFrameStatus::USED; return head; } } impl BuddyZone { fn count_free_mem(&self) -> usize { let mut mem_count = 0usize; for i in 0..MAX_ORDER { mem_count += self.free_area[i as usize].length * (1 << i); } return mem_count; } } unsafe impl<'zone> Send for BuddyFrameAllocator<'zone> {} impl<'zone> BuddyFrameAllocator<'zone> { fn new( zone: &'zone mut BuddyZone, frames: &'zone mut [BuddyFrame; MAX_FRAMES_SUPPORTED], ) -> Self { Self { zone: zone, frames: frames, } } fn index_of_frame(&mut self, frame: *mut BuddyFrame) -> usize { (frame as usize - (&mut self.frames[0] as *mut BuddyFrame) as usize) / mem::size_of::<BuddyFrame>() } pub fn install_memory_region(&mut self, phys_start: PhysAddr, page_count: usize) { assert!(phys_start.is_aligned(Size4KiB::SIZE)); let frame_start = (phys_start.as_u64() / Size4KiB::SIZE) as usize; unsafe { self.free_frame_range(frame_start, frame_start + page_count); } } unsafe fn free_frame_range(&mut self, index_l: usize, index_r: usize) { self.free_frame_range_top_down(index_l, index_r, MAX_ORDER - 1) } unsafe fn free_frame_range_top_down(&mut self, index_l: usize, index_r: usize, order: u8) { if index_l >= index_r || order >= MAX_ORDER { return; } let block_size: usize = 1 << order; let align_index_l: usize = align_to_upper!(index_l, block_size); let align_index_r: usize = align_to_lower!(index_r, block_size); if align_index_l <= align_index_r { self.free_frame_range_top_down(index_l, align_index_l, order.wrapping_sub(1)); for frame_index in (align_index_l..align_index_r).step_by(1 << order) { self.free_frame_specific_order(frame_index, order); } self.free_frame_range_top_down(align_index_r, index_r, order.wrapping_sub(1)); } else { self.free_frame_range_top_down(index_l, index_r, order.wrapping_sub(1)); } } unsafe fn free_frame_specific_order(&mut self, mut frame_index: usize, mut order: u8) { if order >= MAX_ORDER { return; } if self.frames[frame_index].use_status == BuddyFrameStatus::NOTUSED { println!( "BuddyFrameAllocator: free twice on frame({}) detected", frame_index ); return; } while order < MAX_ORDER { if order == MAX_ORDER - 1 { break; } let area = &mut self.zone.free_area[order as usize]; let buddy_index = get_buddy!(frame_index, order); let buddy_frame = area.drop_frame(&mut self.frames[buddy_index]); if !buddy_frame.is_null() { frame_index = if frame_index < buddy_index { frame_index } else { buddy_index }; order += 1; } else { break; } } assert_eq!( frame_index, align_to_lower!(frame_index, (1 << order) as usize), "frame_index {} cannot match order {}", frame_index, order ); self.frames[frame_index].order = order; self.zone.free_area[order as usize].push_frame(&mut self.frames[frame_index]); } unsafe fn alloc_frame_specific_order(&mut self, order: u8) -> *mut BuddyFrame { let mut upper_order = order; while upper_order < MAX_ORDER && self.zone.free_area[upper_order as usize].length <= 0 { upper_order += 1; } if upper_order >= MAX_ORDER { return ptr::null_mut(); } let large_frame = self.zone.free_area[upper_order as usize].pop_frame(); while upper_order > order { let offset = (1 << (upper_order - 1)) + self.index_of_frame(large_frame); self.frames[offset].order = upper_order - 1; self.zone.free_area[(upper_order - 1) as usize] .push_frame(&mut self.frames[offset] as *mut BuddyFrame); upper_order -= 1; } (*large_frame).use_status = BuddyFrameStatus::USED; (*large_frame).order = order; return large_frame; } pub unsafe fn check_bugs(&mut self) { for i in 0..MAX_ORDER { let area = self.zone.free_area[i as usize]; let mut j = 0; let mut cur = area.head; while !cur.is_null() { let next = (*cur).next; let offset = self.index_of_frame(cur); assert_eq!( offset, align_to_lower!(offset, (1 << i) as usize), "area({})'s frame at index({}) has offset({}), cannot match order", i, j, offset ); j += 1; cur = next; } assert_eq!( j, area.length, "area({})'s length was not equals to it's link length", i ); } } #[allow(unused)] pub unsafe fn print_statistics(&mut self) { self.check_bugs(); let free_mem_count = self.zone.count_free_mem(); println!( "BuddyFrameAllocator: {} frames available, which is {} of memory", free_mem_count, (free_mem_count * Size4KiB::SIZE as usize).pretty(), ); print!("default zone:\t"); for i in 0..MAX_ORDER { print!("{:>8}", self.zone.free_area[i as usize].length); } println!(); } } unsafe impl<'zone> FrameAllocator<Size4KiB> for BuddyFrameAllocator<'zone> { fn allocate_frame(&mut self) -> Option<PhysFrame<Size4KiB>> { let frame = unsafe { self.alloc_frame_specific_order(0) }; if !frame.is_null() { Some(PhysFrame::containing_address(PhysAddr::new( self.index_of_frame(frame) as u64 * Size4KiB::SIZE, ))) } else { None } } } impl<'zone> FrameDeallocator<Size4KiB> for BuddyFrameAllocator<'zone> { unsafe fn deallocate_frame(&mut self, frame: PhysFrame<Size4KiB>) { let index = frame.start_address().as_u64() / Size4KiB::SIZE; self.free_frame_specific_order(index as usize, 0); } } lazy_static! { pub static ref FRAME_ALLOCATOR: Mutex<BuddyFrameAllocator<'static>> = { unsafe { static mut FRAMES: [BuddyFrame; MAX_FRAMES_SUPPORTED] = [BuddyFrame { next: ptr::null_mut(), use_status: BuddyFrameStatus::UNCHECKED, order: 0, }; MAX_FRAMES_SUPPORTED]; static mut DEFAULT_ZONE: BuddyZone = BuddyZone { head: ptr::null_mut(), free_area: [BuddyFreeArea { head: ptr::null_mut(), length: 0, }; MAX_ORDER as usize], }; DEFAULT_ZONE.head = &mut FRAMES[0]; Mutex::new(BuddyFrameAllocator::new(&mut DEFAULT_ZONE, &mut FRAMES)) } }; }
use crate::config::*; use crate::pretty::Pretty; use bitvec::prelude::*; use boot::PhysAddr; use core::mem; use core::ptr; use lazy_static::lazy_static; use spin::Mutex; pub use x86_64::structures::paging::{FrameAllocator, FrameDeallocator}; use x86_64::structures::paging::{PageSize, PhysFrame, Size4KiB}; pub struct BuddyFrameAllocator<'zone> { zone: &'zone mut BuddyZone, frames: &'zone mut [BuddyFrame; MAX_FRAMES_SUPPORTED], } const MAX_ORDER: u8 = 11; macro_rules! align_to_upper { ($val: expr, $align:expr) => { (($val + $align - 1) / $align * $align) }; } macro_rules! align_to_lower { ($val: expr, $align:expr) => { ($val / $align * $align) }; } macro_rules! get_buddy { ($frame_index: expr, $order:expr) => { if ($frame_index) & (1 << ($order + 1) - 1) == 0 { $frame_index + (1 << $order) } else { $frame_index - (1 << $order) } }; } #[repr(u8)] #[derive(Copy, Clone, PartialEq)] enum BuddyFrameStatus { UNCHECKED = 0, USED = 1, NOTUSED = 2, } #[derive(Copy, Clone)] struct BuddyFrame { next: *mut BuddyFrame, use_status: BuddyFrameStatus, order: u8, } #[derive(Copy, Clone)] struct BuddyFreeArea { head: *mut BuddyFrame, length: usize, } #[derive(Copy, Clone)] struct BuddyZone { head: *mut BuddyFrame, free_area: [BuddyFreeArea; MAX_ORDER as usize], } impl BuddyFreeArea { unsafe fn drop_frame(&mut self, frame: *mut BuddyFrame) -> *mut BuddyFrame { let mut next = self.head; let mut pre = ptr::null_mut(); while !next.is_null() && next != frame { pre = next; next = (*next).next; } if next.is_null() { return ptr::null_mut(); } self.length -= 1; let next = (*frame).next; if !pre.is_null() { (*pre).next = next; } else { self.head = next; } (*frame).next = ptr::null_mut(); (*frame).use_status = BuddyFrameStatus::NOTUSED; return frame; } unsafe fn push_frame(&mut self, frame: *mut BuddyFrame) { self.length += 1; (*frame).next = self.head; self.head = frame; (*frame).use_status = BuddyFrameStatus::NOTUSED; } unsafe fn pop_frame(&mut self) -> *mut BuddyFrame { if self.length == 0 { return ptr::null_mut(); } self.length -= 1; let head = self.head; self.head = (*head).next; (*head).next = ptr::null_mut(); (*head).use_status = BuddyFrameStatus::USED; return head; } } impl BuddyZone { fn count_free_mem(&self) -> usize { let mut mem_count = 0usize; for i in 0..MAX_ORDER { mem_count += self.free_area[i as usize].length * (1 << i); } return mem_count; } } unsafe impl<'zone> Send for BuddyFrameAllocator<'zone> {} impl<'zone> BuddyFrameAllocator<'zone> { fn new( zone: &'zone mut BuddyZone, frames: &'zone mut [BuddyFrame; MAX_FRAMES_SUPPORTED], ) -> Self { Self { zone: zone, frames: frames, } } fn index_of_frame(&mut self, frame: *mut BuddyFrame) -> usize { (frame as usize - (&mut self.frames[0] as *mut BuddyFrame) as usize) / mem::size_of::<BuddyFrame>() } pub fn install_memory_region(&mut self, phys_start: PhysAddr, page_count: usize) { assert!(phys_start.is_aligned(Size4KiB::SIZE)); let frame_start = (phys_start.as_u64() / Size4KiB::SIZE) as usize; unsafe { self.free_frame_range(frame_start, frame_start + page_count); } } unsafe fn free_frame_range(&mut self, index_l: usize, index_r: usize) { self.free_frame_range_top_down(index_l, index_r, MAX_ORDER - 1) } unsafe fn free_frame_range_top_down(&mut self, index_l: usize, index_r: usize, order: u8) { if index_l >= index_r || order >= MAX_ORDER { return; } let block_size: usize = 1 << order; let align_index_l: usize = align_to_upper!(index_l, block_size); let align_index_r: usize = align_to_lower!(index_r, block_size);
} unsafe fn free_frame_specific_order(&mut self, mut frame_index: usize, mut order: u8) { if order >= MAX_ORDER { return; } if self.frames[frame_index].use_status == BuddyFrameStatus::NOTUSED { println!( "BuddyFrameAllocator: free twice on frame({}) detected", frame_index ); return; } while order < MAX_ORDER { if order == MAX_ORDER - 1 { break; } let area = &mut self.zone.free_area[order as usize]; let buddy_index = get_buddy!(frame_index, order); let buddy_frame = area.drop_frame(&mut self.frames[buddy_index]); if !buddy_frame.is_null() { frame_index = if frame_index < buddy_index { frame_index } else { buddy_index }; order += 1; } else { break; } } assert_eq!( frame_index, align_to_lower!(frame_index, (1 << order) as usize), "frame_index {} cannot match order {}", frame_index, order ); self.frames[frame_index].order = order; self.zone.free_area[order as usize].push_frame(&mut self.frames[frame_index]); } unsafe fn alloc_frame_specific_order(&mut self, order: u8) -> *mut BuddyFrame { let mut upper_order = order; while upper_order < MAX_ORDER && self.zone.free_area[upper_order as usize].length <= 0 { upper_order += 1; } if upper_order >= MAX_ORDER { return ptr::null_mut(); } let large_frame = self.zone.free_area[upper_order as usize].pop_frame(); while upper_order > order { let offset = (1 << (upper_order - 1)) + self.index_of_frame(large_frame); self.frames[offset].order = upper_order - 1; self.zone.free_area[(upper_order - 1) as usize] .push_frame(&mut self.frames[offset] as *mut BuddyFrame); upper_order -= 1; } (*large_frame).use_status = BuddyFrameStatus::USED; (*large_frame).order = order; return large_frame; } pub unsafe fn check_bugs(&mut self) { for i in 0..MAX_ORDER { let area = self.zone.free_area[i as usize]; let mut j = 0; let mut cur = area.head; while !cur.is_null() { let next = (*cur).next; let offset = self.index_of_frame(cur); assert_eq!( offset, align_to_lower!(offset, (1 << i) as usize), "area({})'s frame at index({}) has offset({}), cannot match order", i, j, offset ); j += 1; cur = next; } assert_eq!( j, area.length, "area({})'s length was not equals to it's link length", i ); } } #[allow(unused)] pub unsafe fn print_statistics(&mut self) { self.check_bugs(); let free_mem_count = self.zone.count_free_mem(); println!( "BuddyFrameAllocator: {} frames available, which is {} of memory", free_mem_count, (free_mem_count * Size4KiB::SIZE as usize).pretty(), ); print!("default zone:\t"); for i in 0..MAX_ORDER { print!("{:>8}", self.zone.free_area[i as usize].length); } println!(); } } unsafe impl<'zone> FrameAllocator<Size4KiB> for BuddyFrameAllocator<'zone> { fn allocate_frame(&mut self) -> Option<PhysFrame<Size4KiB>> { let frame = unsafe { self.alloc_frame_specific_order(0) }; if !frame.is_null() { Some(PhysFrame::containing_address(PhysAddr::new( self.index_of_frame(frame) as u64 * Size4KiB::SIZE, ))) } else { None } } } impl<'zone> FrameDeallocator<Size4KiB> for BuddyFrameAllocator<'zone> { unsafe fn deallocate_frame(&mut self, frame: PhysFrame<Size4KiB>) { let index = frame.start_address().as_u64() / Size4KiB::SIZE; self.free_frame_specific_order(index as usize, 0); } } lazy_static! { pub static ref FRAME_ALLOCATOR: Mutex<BuddyFrameAllocator<'static>> = { unsafe { static mut FRAMES: [BuddyFrame; MAX_FRAMES_SUPPORTED] = [BuddyFrame { next: ptr::null_mut(), use_status: BuddyFrameStatus::UNCHECKED, order: 0, }; MAX_FRAMES_SUPPORTED]; static mut DEFAULT_ZONE: BuddyZone = BuddyZone { head: ptr::null_mut(), free_area: [BuddyFreeArea { head: ptr::null_mut(), length: 0, }; MAX_ORDER as usize], }; DEFAULT_ZONE.head = &mut FRAMES[0]; Mutex::new(BuddyFrameAllocator::new(&mut DEFAULT_ZONE, &mut FRAMES)) } }; }
if align_index_l <= align_index_r { self.free_frame_range_top_down(index_l, align_index_l, order.wrapping_sub(1)); for frame_index in (align_index_l..align_index_r).step_by(1 << order) { self.free_frame_specific_order(frame_index, order); } self.free_frame_range_top_down(align_index_r, index_r, order.wrapping_sub(1)); } else { self.free_frame_range_top_down(index_l, index_r, order.wrapping_sub(1)); }
if_condition
[ { "content": "pub fn read(fd: usize, buf: &mut [u8]) -> Result<usize> {\n\n unsafe { syscall3(SYS_READ, fd, buf.as_mut_ptr() as usize, buf.len()) }\n\n}\n\n\n", "file_path": "xv7-usyscall/src/syscall.rs", "rank": 0, "score": 186518.74317084477 }, { "content": "pub fn read(fd: usize, buf: ...
Rust
src/sysctrl/sysctrl_lpdsp32_debug_cfg.rs
ldicocco/rsl10-pac
007871e940fe30f83de1da0f15fd25b052d1f340
#[doc = "Reader of register SYSCTRL_LPDSP32_DEBUG_CFG"] pub type R = crate::R<u32, super::SYSCTRL_LPDSP32_DEBUG_CFG>; #[doc = "Writer for register SYSCTRL_LPDSP32_DEBUG_CFG"] pub type W = crate::W<u32, super::SYSCTRL_LPDSP32_DEBUG_CFG>; #[doc = "Register SYSCTRL_LPDSP32_DEBUG_CFG `reset()`'s with value 0"] impl crate::ResetValue for super::SYSCTRL_LPDSP32_DEBUG_CFG { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "LPDSP32 exit powerdown mode configuration when halted\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_A { #[doc = "0: LPDSP32 exit powerdown when halted disabled"] LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_DISABLED = 0, #[doc = "1: LPDSP32 exit powerdown when halted enabled"] LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_ENABLED = 1, } impl From<LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_A> for bool { #[inline(always)] fn from(variant: LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `LPDSP32_EXIT_POWERDOWN_WHEN_HALTED`"] pub type LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_R = crate::R<bool, LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_A>; impl LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_A { match self.bits { false => { LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_A::LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_DISABLED } true => { LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_A::LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_ENABLED } } } #[doc = "Checks if the value of the field is `LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_DISABLED`"] #[inline(always)] pub fn is_lpdsp32_exit_powerdown_when_halted_disabled(&self) -> bool { *self == LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_A::LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_DISABLED } #[doc = "Checks if the value of the field is `LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_ENABLED`"] #[inline(always)] pub fn is_lpdsp32_exit_powerdown_when_halted_enabled(&self) -> bool { *self == LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_A::LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_ENABLED } } #[doc = "Write proxy for field `LPDSP32_EXIT_POWERDOWN_WHEN_HALTED`"] pub struct LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_W<'a> { w: &'a mut W, } impl<'a> LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "LPDSP32 exit powerdown when halted disabled"] #[inline(always)] pub fn lpdsp32_exit_powerdown_when_halted_disabled(self) -> &'a mut W { self.variant( LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_A::LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_DISABLED, ) } #[doc = "LPDSP32 exit powerdown when halted enabled"] #[inline(always)] pub fn lpdsp32_exit_powerdown_when_halted_enabled(self) -> &'a mut W { self.variant( LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_A::LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_ENABLED, ) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1); self.w } } #[doc = "LPDSP32 debug port enable\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum LPDSP32_DEBUG_ENABLE_A { #[doc = "0: LPDSP32 debug port disabled"] LPDSP32_DEBUG_DISABLED = 0, #[doc = "1: LPDSP32 debug port enabled"] LPDSP32_DEBUG_ENABLED = 1, } impl From<LPDSP32_DEBUG_ENABLE_A> for bool { #[inline(always)] fn from(variant: LPDSP32_DEBUG_ENABLE_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `LPDSP32_DEBUG_ENABLE`"] pub type LPDSP32_DEBUG_ENABLE_R = crate::R<bool, LPDSP32_DEBUG_ENABLE_A>; impl LPDSP32_DEBUG_ENABLE_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> LPDSP32_DEBUG_ENABLE_A { match self.bits { false => LPDSP32_DEBUG_ENABLE_A::LPDSP32_DEBUG_DISABLED, true => LPDSP32_DEBUG_ENABLE_A::LPDSP32_DEBUG_ENABLED, } } #[doc = "Checks if the value of the field is `LPDSP32_DEBUG_DISABLED`"] #[inline(always)] pub fn is_lpdsp32_debug_disabled(&self) -> bool { *self == LPDSP32_DEBUG_ENABLE_A::LPDSP32_DEBUG_DISABLED } #[doc = "Checks if the value of the field is `LPDSP32_DEBUG_ENABLED`"] #[inline(always)] pub fn is_lpdsp32_debug_enabled(&self) -> bool { *self == LPDSP32_DEBUG_ENABLE_A::LPDSP32_DEBUG_ENABLED } } #[doc = "Write proxy for field `LPDSP32_DEBUG_ENABLE`"] pub struct LPDSP32_DEBUG_ENABLE_W<'a> { w: &'a mut W, } impl<'a> LPDSP32_DEBUG_ENABLE_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: LPDSP32_DEBUG_ENABLE_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "LPDSP32 debug port disabled"] #[inline(always)] pub fn lpdsp32_debug_disabled(self) -> &'a mut W { self.variant(LPDSP32_DEBUG_ENABLE_A::LPDSP32_DEBUG_DISABLED) } #[doc = "LPDSP32 debug port enabled"] #[inline(always)] pub fn lpdsp32_debug_enabled(self) -> &'a mut W { self.variant(LPDSP32_DEBUG_ENABLE_A::LPDSP32_DEBUG_ENABLED) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } impl R { #[doc = "Bit 1 - LPDSP32 exit powerdown mode configuration when halted"] #[inline(always)] pub fn lpdsp32_exit_powerdown_when_halted(&self) -> LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_R { LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 0 - LPDSP32 debug port enable"] #[inline(always)] pub fn lpdsp32_debug_enable(&self) -> LPDSP32_DEBUG_ENABLE_R { LPDSP32_DEBUG_ENABLE_R::new((self.bits & 0x01) != 0) } } impl W { #[doc = "Bit 1 - LPDSP32 exit powerdown mode configuration when halted"] #[inline(always)] pub fn lpdsp32_exit_powerdown_when_halted(&mut self) -> LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_W { LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_W { w: self } } #[doc = "Bit 0 - LPDSP32 debug port enable"] #[inline(always)] pub fn lpdsp32_debug_enable(&mut self) -> LPDSP32_DEBUG_ENABLE_W { LPDSP32_DEBUG_ENABLE_W { w: self } } }
#[doc = "Reader of register SYSCTRL_LPDSP32_DEBUG_CFG"] pub type R = crate::R<u32, super::SYSCTRL_LPDSP32_DEBUG_CFG>; #[doc = "Writer for register SYSCTRL_LPDSP32_DEBUG_CFG"] pub type W = crate::W<u32, super::SYSCTRL_LPDSP32_DEBUG_CFG>; #[doc = "Register SYSCTRL_LPDSP32_DEBUG_CFG `reset()`'s with value 0"] impl crate::ResetValue for super::SYSCTRL_LPDSP32_DEBUG_CFG { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "LPDSP32 exit powerdown mode configuration when halted\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_A { #[doc = "0: LPDSP32 exit powerdown when halted disabled"] LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_DISABLED = 0, #[doc = "1: LPDSP32 exit powerdown when halted enabled"] LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_ENABLED = 1, } impl From<LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_A> for bool { #[inline(always)] fn from(variant: LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `LPDSP32_EXIT_POWERDOWN_WHEN_HALTED`"] pub type LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_R = crate::R<bool, LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_A>; impl LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_A { match self.bits { false => { LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_A::LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_DISABLED } true => { LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_A::LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_ENABLED } } } #[doc = "Checks if the value of the field is `LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_DISABLED`"] #[inline(always)] pub fn is_lpdsp32_exit_powerdown_when_halted_disabled(&self) -> bool { *self == LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_A::LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_DISABLED } #[doc = "Checks if the value of the field is `LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_ENABLED`"] #[inline(always)] pub fn is_lpdsp32_exit_powerdown_when_halted_enabled(&self) -> bool { *self == LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_A::LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_ENABLED } } #[doc = "Write proxy for field `LPDSP32_EXIT_POWERDOWN_WHEN_HALTED`"] pub struct LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_W<'a> { w: &'a mut W, } impl<'a> LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "LPDSP32 exit powerdown when halted disabled"] #[inline(always)] pub fn lpdsp32_exit_powerdown_when_halted_disabled(self) -> &'a mut W { self.variant( LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_A::LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_DISABLED, ) } #[doc = "LPDSP32 exit powerdown when halted enabled"] #[inline(always)] pub fn lpdsp32_exit_powerdown_when_halted_enabled(self) -> &'a mut W { self.variant( LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_A::LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_ENABLED, ) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1); self.w } } #[doc = "LPDSP32 debug port enable\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum LPDSP32_DEBUG_ENABLE_A { #[doc = "0: LPDSP32 debug port disabled"] LPDSP32_DEBUG_DISABLED = 0, #[doc = "1: LPDSP32 debug port enabled"] LPDSP32_DEBUG_ENABLED = 1, } impl From<LPDSP32_DEBUG_ENABLE_A> for bool { #[inline(always)] fn from(variant: LPDSP32_DEBUG_ENABLE_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `LPDSP32_DEBUG_ENABLE`"] pub type LPDSP32_DEBUG_ENABLE_R = crate::R<bool, LPDSP32_DEBUG_ENABLE_A>; impl LPDSP32_DEBUG_ENABLE_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> LPDSP32_DEBUG_ENABLE_A {
} #[doc = "Checks if the value of the field is `LPDSP32_DEBUG_DISABLED`"] #[inline(always)] pub fn is_lpdsp32_debug_disabled(&self) -> bool { *self == LPDSP32_DEBUG_ENABLE_A::LPDSP32_DEBUG_DISABLED } #[doc = "Checks if the value of the field is `LPDSP32_DEBUG_ENABLED`"] #[inline(always)] pub fn is_lpdsp32_debug_enabled(&self) -> bool { *self == LPDSP32_DEBUG_ENABLE_A::LPDSP32_DEBUG_ENABLED } } #[doc = "Write proxy for field `LPDSP32_DEBUG_ENABLE`"] pub struct LPDSP32_DEBUG_ENABLE_W<'a> { w: &'a mut W, } impl<'a> LPDSP32_DEBUG_ENABLE_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: LPDSP32_DEBUG_ENABLE_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "LPDSP32 debug port disabled"] #[inline(always)] pub fn lpdsp32_debug_disabled(self) -> &'a mut W { self.variant(LPDSP32_DEBUG_ENABLE_A::LPDSP32_DEBUG_DISABLED) } #[doc = "LPDSP32 debug port enabled"] #[inline(always)] pub fn lpdsp32_debug_enabled(self) -> &'a mut W { self.variant(LPDSP32_DEBUG_ENABLE_A::LPDSP32_DEBUG_ENABLED) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } impl R { #[doc = "Bit 1 - LPDSP32 exit powerdown mode configuration when halted"] #[inline(always)] pub fn lpdsp32_exit_powerdown_when_halted(&self) -> LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_R { LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 0 - LPDSP32 debug port enable"] #[inline(always)] pub fn lpdsp32_debug_enable(&self) -> LPDSP32_DEBUG_ENABLE_R { LPDSP32_DEBUG_ENABLE_R::new((self.bits & 0x01) != 0) } } impl W { #[doc = "Bit 1 - LPDSP32 exit powerdown mode configuration when halted"] #[inline(always)] pub fn lpdsp32_exit_powerdown_when_halted(&mut self) -> LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_W { LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_W { w: self } } #[doc = "Bit 0 - LPDSP32 debug port enable"] #[inline(always)] pub fn lpdsp32_debug_enable(&mut self) -> LPDSP32_DEBUG_ENABLE_W { LPDSP32_DEBUG_ENABLE_W { w: self } } }
match self.bits { false => LPDSP32_DEBUG_ENABLE_A::LPDSP32_DEBUG_DISABLED, true => LPDSP32_DEBUG_ENABLE_A::LPDSP32_DEBUG_ENABLED, }
if_condition
[ { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Res...
Rust
src/component/splits/tests/mod.rs
ash2x3zb9cy/livesplit-core
9c5e9c5877f905a518461e3a0586d58d4f840fcc
use super::{ ColumnSettings, ColumnStartWith, ColumnUpdateTrigger, ColumnUpdateWith, Component, Settings, State, }; use crate::{Run, Segment, TimeSpan, Timer, TimingMethod}; pub mod column; #[test] fn zero_visual_split_count_always_shows_all_splits() { let mut run = Run::new(); for _ in 0..32 { run.push_segment(Segment::new("")); } let timer = Timer::new(run).unwrap(); let layout_settings = Default::default(); let mut component = Component::with_settings(Settings { visual_split_count: 0, ..Default::default() }); let mut state = component.state(&timer.snapshot(), &layout_settings); assert_eq!(state.splits.len(), 32); component.scroll_down(); state = component.state(&timer.snapshot(), &layout_settings); assert_eq!(state.splits.len(), 32); component.scroll_down(); state = component.state(&timer.snapshot(), &layout_settings); assert_eq!(state.splits.len(), 32); component.scroll_up(); state = component.state(&timer.snapshot(), &layout_settings); assert_eq!(state.splits.len(), 32); } #[test] fn one_visual_split() { let mut run = Run::new(); run.push_segment(Segment::new("A")); run.push_segment(Segment::new("B")); run.push_segment(Segment::new("C")); let mut timer = Timer::new(run).unwrap(); let layout_settings = Default::default(); let mut component = Component::with_settings(Settings { always_show_last_split: false, split_preview_count: 0, visual_split_count: 1, ..Default::default() }); let mut state = component.state(&timer.snapshot(), &layout_settings); assert_eq!(state.splits[0].name, "A"); assert_eq!(state.splits.len(), 1); timer.start(); state = component.state(&timer.snapshot(), &layout_settings); assert_eq!(state.splits[0].name, "A"); assert_eq!(state.splits.len(), 1); timer.split(); state = component.state(&timer.snapshot(), &layout_settings); assert_eq!(state.splits[0].name, "B"); assert_eq!(state.splits.len(), 1); timer.split(); state = component.state(&timer.snapshot(), &layout_settings); assert_eq!(state.splits[0].name, "C"); assert_eq!(state.splits.len(), 1); timer.split(); state = component.state(&timer.snapshot(), &layout_settings); assert_eq!(state.splits[0].name, "C"); assert_eq!(state.splits.len(), 1); } #[test] fn negative_segment_times() { let mut run = Run::new(); run.push_segment(Segment::new("")); let mut timer = Timer::new(run).unwrap(); let layout_settings = Default::default(); let mut component = Component::with_settings(Settings { columns: vec![ColumnSettings { start_with: ColumnStartWith::Empty, update_with: ColumnUpdateWith::SegmentTime, update_trigger: ColumnUpdateTrigger::OnStartingSegment, ..Default::default() }], ..Default::default() }); timer.start(); timer.set_current_timing_method(TimingMethod::GameTime); timer.initialize_game_time(); timer.pause_game_time(); timer.set_game_time(TimeSpan::from_seconds(-1.0)); let state = component.state(&timer.snapshot(), &layout_settings); assert_eq!(state.splits[0].columns[0].value, "−0:01"); } #[test] fn unique_split_indices() { let mut run = Run::new(); run.push_segment(Segment::new("")); run.push_segment(Segment::new("")); run.push_segment(Segment::new("")); let timer = Timer::new(run).unwrap(); let mut component = Component::with_settings(Settings { visual_split_count: 20, fill_with_blank_space: true, ..Default::default() }); let state = component.state(&timer.snapshot(), &Default::default()); let mut indices = state .splits .into_iter() .map(|s| s.index) .collect::<Vec<_>>(); indices.sort_unstable(); assert!(indices.windows(2).all(|pair| pair[0] != pair[1])); }
use super::{ ColumnSettings, ColumnStartWith, ColumnUpdateTrigger, ColumnUpdateWith, Component, Settings, State, }; use crate::{Run, Segment, TimeSpan, Timer, TimingMethod}; pub mod column; #[test] fn zero_visual_split_count_always_shows_all_splits() { let mut run = Run::new(); for _ in 0..32 { run.push_segment(Segment::new("")); } let timer = Timer::new(run).unwrap(); let layout_settings = Default::default(); let mut component = Component::with_settings(Settings { visual_split_count: 0, ..Default::default() }); let mut state = component.state(&timer.snapshot(), &layout_settings); assert_eq!(state.splits.len(), 32); component.scroll_down(); state = component.state(&timer.snapshot(), &layout_settings); assert_eq!(state.splits.len(), 32); component.scroll_down(); state = component.state(&timer.snapshot(), &layout_settings); assert_eq!(state.splits.len(), 32); component.scroll_up(); state = component.state(&timer.snapshot(), &layout_settings); assert_eq!(state.splits.len(), 32); } #[test] fn one_visual_split() { let mut run = Run::new(); run.push_segment(Segment::new("A")); run.push_segment(Segment::new("B")); run.push_segment(Segment::new("C")); let mut timer = Timer::new(run).unwrap(); let layout_settings = Default::default(); let mut component = Component::with_settings(Settings { always_show_last_split: false, split_preview_count: 0, visual_split_count: 1, ..Default::default() }); let mut state = component.state(&timer.snapshot(), &layout_settings); assert_eq!(state.splits[0].name, "A"); assert_eq!(state.splits.len(), 1); timer.start(); state = component.state(&timer.snapshot(), &layout_settings); assert_eq!(state.splits[0].name, "A"); assert_eq!(state.splits.len(), 1); timer.split(); state = component.state(&timer.snapshot(), &layout_settings); assert_eq!(state.splits[0].name, "B"); assert_eq!(state.splits.len(), 1); timer.split(); state = component.state(&timer.snapshot(), &layout_settings); assert_eq!(state.splits[0].name, "C"); assert_eq!(state.splits.len(), 1); timer.split(); state = component.state(&timer.snapshot(), &layout_settings); assert_eq!(state.splits[0].name, "C"); assert_eq!(state.splits.len(), 1); } #[test] fn negative_segment_times() { let mut run = Run::new(); run.push_segment(Segment::new("")); let mut timer = Timer::new(run).unwrap(); let layout_settings = Default::default(); let mut component = Component::with_settings(Settings {
#[test] fn unique_split_indices() { let mut run = Run::new(); run.push_segment(Segment::new("")); run.push_segment(Segment::new("")); run.push_segment(Segment::new("")); let timer = Timer::new(run).unwrap(); let mut component = Component::with_settings(Settings { visual_split_count: 20, fill_with_blank_space: true, ..Default::default() }); let state = component.state(&timer.snapshot(), &Default::default()); let mut indices = state .splits .into_iter() .map(|s| s.index) .collect::<Vec<_>>(); indices.sort_unstable(); assert!(indices.windows(2).all(|pair| pair[0] != pair[1])); }
columns: vec![ColumnSettings { start_with: ColumnStartWith::Empty, update_with: ColumnUpdateWith::SegmentTime, update_trigger: ColumnUpdateTrigger::OnStartingSegment, ..Default::default() }], ..Default::default() }); timer.start(); timer.set_current_timing_method(TimingMethod::GameTime); timer.initialize_game_time(); timer.pause_game_time(); timer.set_game_time(TimeSpan::from_seconds(-1.0)); let state = component.state(&timer.snapshot(), &layout_settings); assert_eq!(state.splits[0].columns[0].value, "−0:01"); }
function_block-function_prefix_line
[ { "content": "pub fn start_run(timer: &mut Timer) {\n\n timer.set_current_timing_method(TimingMethod::GameTime);\n\n timer.start();\n\n timer.initialize_game_time();\n\n timer.pause_game_time();\n\n timer.set_game_time(TimeSpan::zero());\n\n}\n\n\n", "file_path": "src/tests_helper.rs", "r...
Rust
src/many-macros/src/lib.rs
hansl/many-rs
6400eb6e2b30c09ee13a28d20ef16b81a2a7fe05
use inflections::Inflect; use proc_macro2::{Ident, Span, TokenStream}; use quote::{quote, quote_spanned}; use serde::Deserialize; use serde_tokenstream::from_tokenstream; use syn::spanned::Spanned; use syn::PathArguments::AngleBracketed; use syn::{ AngleBracketedGenericArguments, FnArg, GenericArgument, PatType, ReturnType, Signature, TraitItem, Type, TypePath, }; #[derive(Deserialize)] struct ManyModuleAttributes { pub id: Option<u32>, pub name: Option<String>, pub namespace: Option<String>, pub many_crate: Option<String>, } #[derive(Debug)] struct Endpoint { pub name: String, pub func: Ident, pub span: Span, pub is_async: bool, pub is_mut: bool, pub has_sender: bool, pub arg_type: Option<Box<Type>>, #[allow(unused)] pub ret_type: Box<Type>, } impl Endpoint { pub fn new(signature: &Signature) -> Result<Self, (String, Span)> { let func = signature.ident.clone(); let name = func.to_string(); let is_async = signature.asyncness.is_some(); let mut has_sender = false; let arg_type: Option<Box<Type>>; let mut ret_type: Option<Box<Type>> = None; let mut inputs = signature.inputs.iter(); let receiver = inputs.next().ok_or_else(|| { ( "Must have at least 1 argument".to_string(), signature.span(), ) })?; let is_mut = if let FnArg::Receiver(r) = receiver { r.mutability.is_some() } else { return Err(( "Function in trait must have a receiver".to_string(), receiver.span(), )); }; let maybe_identity = inputs.next(); let maybe_argument = inputs.next(); match (maybe_identity, maybe_argument) { (_id, Some(FnArg::Typed(PatType { ty, .. }))) => { has_sender = true; arg_type = Some(ty.clone()); } (Some(FnArg::Typed(PatType { ty, .. })), None) => { arg_type = Some(ty.clone()); } (None, None) => { arg_type = None; } (_, _) => { return Err(("Must have 2 or 3 arguments".to_string(), signature.span())); } } if let ReturnType::Type(_, ty) = &signature.output { if let Type::Path(TypePath { path: syn::Path { segments, .. }, .. }) = ty.as_ref() { if segments[0].ident == "Result" || segments .iter() .map(|x| x.ident.to_string()) .collect::<Vec<String>>() .join("::") == "std::result::Result" { if let AngleBracketed(AngleBracketedGenericArguments { ref args, .. }) = segments[0].arguments { ret_type = Some( args.iter() .find_map(|x| match x { GenericArgument::Type(t) => Some(Box::new(t.clone())), _ => None, }) .unwrap(), ); } } } } if ret_type.is_none() { return Err(( "Must have a result return type.".to_string(), signature.output.span(), )); } Ok(Self { name, func, span: signature.span(), is_async, is_mut, has_sender, arg_type, ret_type: ret_type.unwrap(), }) } } #[allow(clippy::too_many_lines)] fn many_module_impl(attr: &TokenStream, item: TokenStream) -> Result<TokenStream, syn::Error> { let attrs: ManyModuleAttributes = from_tokenstream(attr)?; let many = Ident::new( attrs.many_crate.as_ref().map_or("many", String::as_str), attr.span(), ); let namespace = attrs.namespace; let span = item.span(); let tr: syn::ItemTrait = syn::parse2(item) .map_err(|_| syn::Error::new(span, "`many_module` only applies to traits.".to_string()))?; let struct_name = attrs.name.clone().unwrap_or_else(|| tr.ident.to_string()); let struct_ident = Ident::new( struct_name.as_str(), attrs .name .as_ref() .map_or_else(|| attr.span(), |_| tr.ident.span()), ); let mut trait_ = tr.clone(); if attrs.name.is_none() { trait_.ident = Ident::new(&format!("{}Backend", struct_name), tr.ident.span()); } let trait_ident = trait_.ident.clone(); let vis = trait_.vis.clone(); let attr_id = attrs.id.iter(); let attr_name = inflections::Inflect::to_constant_case(format!("{}Attribute", struct_name).as_str()); let attr_ident = Ident::new(&attr_name, attr.span()); let info_name = format!("{}Info", struct_name); let info_ident = Ident::new(&info_name, attr.span()); let endpoints: Result<Vec<_>, (String, Span)> = trait_ .items .iter() .filter_map(|item| match item { TraitItem::Method(m) => Some(m), _ => None, }) .map(|item| Endpoint::new(&item.sig)) .collect(); let endpoints = endpoints.map_err(|(msg, span)| syn::Error::new(span, msg))?; let ns = namespace.clone(); let endpoint_strings: Vec<String> = endpoints .iter() .map(move |e| { let name = e.name.as_str().to_camel_case(); match ns { Some(ref namespace) => format!("{}.{}", namespace, name), None => name, } }) .collect(); let ns = namespace.clone(); let validate_endpoint_pat = endpoints.iter().map(|e| { let span = e.span; let name = e.name.as_str().to_camel_case(); let ep = match ns { Some(ref namespace) => format!("{}.{}", namespace, name), None => name, }; if let Some(ty) = &e.arg_type { quote_spanned! { span => #ep => { minicbor::decode::<'_, #ty>(data) .map_err(|e| ManyError::deserialization_error(e.to_string()))?; } } } else { quote! { #ep => {} } } }); let validate = quote! { fn validate(&self, message: & #many ::message::RequestMessage) -> Result<(), #many ::ManyError> { let method = message.method.as_str(); let data = message.data.as_slice(); match method { #(#validate_endpoint_pat)* _ => return Err( #many ::ManyError::invalid_method_name(method.to_string())), }; Ok(()) } }; let ns = namespace; let execute_endpoint_pat = endpoints.iter().map(|e| { let span = e.span; let name = e.name.as_str().to_camel_case(); let ep = match ns { Some(ref namespace) => format!("{}.{}", namespace, name), None => name, }; let ep_ident = &e.func; let backend_decl = if e.is_mut { quote! { let mut backend = self.backend.lock().unwrap(); } } else { quote! { let backend = self.backend.lock().unwrap(); } }; let call = match (e.has_sender, e.arg_type.is_some(), e.is_async) { (false, true, false) => quote_spanned! { span => encode( backend . #ep_ident ( decode( data )? ) ) }, (false, true, true) => quote_spanned! { span => encode( backend . #ep_ident ( decode( data )? ).await ) }, (true, true, false) => quote_spanned! { span => encode( backend . #ep_ident ( &message.from.unwrap_or_default(), decode( data )? ) ) }, (true, true, true) => quote_spanned! { span => encode( backend . #ep_ident ( &message.from.unwrap_or_default(), decode( data )? ).await ) }, (false, false, false) => quote_spanned! { span => encode( backend . #ep_ident ( ) ) }, (false, false, true) => quote_spanned! { span => encode( backend . #ep_ident ( ).await ) }, (true, false, false) => quote_spanned! { span => encode( backend . #ep_ident ( &message.from.unwrap_or_default() ) ) }, (true, false, true) => quote_spanned! { span => encode( backend . #ep_ident ( &message.from.unwrap_or_default() ).await ) }, }; quote_spanned! { span => #ep => { #backend_decl #call } } }); let execute = quote! { async fn execute( &self, message: #many ::message::RequestMessage, ) -> Result< #many ::message::ResponseMessage, #many ::ManyError> { use #many ::ManyError; fn decode<'a, T: minicbor::Decode<'a>>(data: &'a [u8]) -> Result<T, ManyError> { minicbor::decode(data).map_err(|e| ManyError::deserialization_error(e.to_string())) } fn encode<T: minicbor::Encode>(result: Result<T, ManyError>) -> Result<Vec<u8>, ManyError> { minicbor::to_vec(result?).map_err(|e| ManyError::serialization_error(e.to_string())) } let data = message.data.as_slice(); let result = match message.method.as_str() { #( #execute_endpoint_pat )* _ => Err(ManyError::internal_server_error()), }?; Ok( #many ::message::ResponseMessage::from_request( &message, &message.to, Ok(result), )) } }; let attribute = if attrs.id.is_some() { quote! { Some(#attr_ident) } } else { quote! { None } }; Ok(quote! { #( #vis const #attr_ident: #many ::protocol::Attribute = #many ::protocol::Attribute::id(#attr_id); )* #vis struct #info_ident; impl std::ops::Deref for #info_ident { type Target = #many ::server::module::ManyModuleInfo; fn deref(&self) -> & #many ::server::module::ManyModuleInfo { use #many ::server::module::ManyModuleInfo; static ONCE: std::sync::Once = std::sync::Once::new(); static mut VALUE: *mut ManyModuleInfo = 0 as *mut ManyModuleInfo; unsafe { ONCE.call_once(|| VALUE = Box::into_raw(Box::new(ManyModuleInfo { name: #struct_name .to_string(), attribute: #attribute, endpoints: vec![ #( #endpoint_strings .to_string() ),* ], }))); &*VALUE } } } #[async_trait::async_trait] #trait_ #vis struct #struct_ident<T: #trait_ident> { backend: std::sync::Arc<std::sync::Mutex<T>> } impl<T: #trait_ident> std::fmt::Debug for #struct_ident<T> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct(#struct_name).finish() } } impl<T: #trait_ident> #struct_ident<T> { pub fn new(backend: std::sync::Arc<std::sync::Mutex<T>>) -> Self { Self { backend } } } #[async_trait::async_trait] impl<T: #trait_ident> #many ::ManyModule for #struct_ident<T> { fn info(&self) -> & #many ::server::module::ManyModuleInfo { & #info_ident } #validate #execute } }) } #[proc_macro_attribute] pub fn many_module( attr: proc_macro::TokenStream, item: proc_macro::TokenStream, ) -> proc_macro::TokenStream { many_module_impl(&attr.into(), item.into()) .unwrap_or_else(|e| e.to_compile_error()) .into() }
use inflections::Inflect; use proc_macro2::{Ident, Span, TokenStream}; use quote::{quote, quote_spanned}; use serde::Deserialize; use serde_tokenstream::from_tokenstream; use syn::spanned::Spanned; use syn::PathArguments::AngleBracketed; use syn::{ AngleBracketedGenericArguments, FnArg, GenericArgument, PatType, ReturnType, Signature, TraitItem, Type, TypePath, }; #[derive(Deserialize)] struct ManyModuleAttributes { pub id: Option<u32>, pub name: Option<String>, pub namespace: Option<String>, pub many_crate: Option<String>, } #[derive(Debug)] struct Endpoint { pub name: String, pub func: Ident, pub span: Span, pub is_async: bool, pub is_mut: bool, pub has_sender: bool, pub arg_type: Option<Box<Type>>, #[allow(unused)] pub ret_type: Box<Type>, } impl Endpoint { pub fn new(signature: &Signature) -> Result<Self, (String, Span)> { let func = signature.ident.clone(); let name = func.to_string(); let is_async = signature.asyncness.is_some(); let mut has_sender = false; let arg_type: Option<Box<Type>>; let mut ret_type: Option<Box<Type>> = None; let mut inputs = signature.inputs.iter(); let receiver = inputs.next().ok_or_else(|| { ( "Must have at least 1 argument".to_string(), signature.span(), ) })?; let is_mut = if let FnArg::Receiver(r) = receiver { r.mutability.is_some() } else { return Err(( "Function in trait must have a receiver".to_string(), receiver.span(), )); }; let maybe_identity = inputs.next(); let maybe_argument = inputs.next(); match (maybe_identity, maybe_argument) { (_id, Some(FnArg::Typed(PatType { ty, .. }))) => { has_sender = true; arg_type = Some(ty.clone()); } (Some(FnArg::Typed(PatType { ty, .. })), None) => { arg_type = Some(ty.clone()); } (None, None) => { arg_type = None; } (_, _) => { return Err(("Must have 2 or 3 arguments".to_string(), signature.span())); } } if let ReturnType::Type(_, ty) = &signature.output { if let Type::Path(TypePath { path: syn::Path { segments, .. }, .. }) = ty.as_ref() { if segments[0].ident == "Result" || segments
= from_tokenstream(attr)?; let many = Ident::new( attrs.many_crate.as_ref().map_or("many", String::as_str), attr.span(), ); let namespace = attrs.namespace; let span = item.span(); let tr: syn::ItemTrait = syn::parse2(item) .map_err(|_| syn::Error::new(span, "`many_module` only applies to traits.".to_string()))?; let struct_name = attrs.name.clone().unwrap_or_else(|| tr.ident.to_string()); let struct_ident = Ident::new( struct_name.as_str(), attrs .name .as_ref() .map_or_else(|| attr.span(), |_| tr.ident.span()), ); let mut trait_ = tr.clone(); if attrs.name.is_none() { trait_.ident = Ident::new(&format!("{}Backend", struct_name), tr.ident.span()); } let trait_ident = trait_.ident.clone(); let vis = trait_.vis.clone(); let attr_id = attrs.id.iter(); let attr_name = inflections::Inflect::to_constant_case(format!("{}Attribute", struct_name).as_str()); let attr_ident = Ident::new(&attr_name, attr.span()); let info_name = format!("{}Info", struct_name); let info_ident = Ident::new(&info_name, attr.span()); let endpoints: Result<Vec<_>, (String, Span)> = trait_ .items .iter() .filter_map(|item| match item { TraitItem::Method(m) => Some(m), _ => None, }) .map(|item| Endpoint::new(&item.sig)) .collect(); let endpoints = endpoints.map_err(|(msg, span)| syn::Error::new(span, msg))?; let ns = namespace.clone(); let endpoint_strings: Vec<String> = endpoints .iter() .map(move |e| { let name = e.name.as_str().to_camel_case(); match ns { Some(ref namespace) => format!("{}.{}", namespace, name), None => name, } }) .collect(); let ns = namespace.clone(); let validate_endpoint_pat = endpoints.iter().map(|e| { let span = e.span; let name = e.name.as_str().to_camel_case(); let ep = match ns { Some(ref namespace) => format!("{}.{}", namespace, name), None => name, }; if let Some(ty) = &e.arg_type { quote_spanned! { span => #ep => { minicbor::decode::<'_, #ty>(data) .map_err(|e| ManyError::deserialization_error(e.to_string()))?; } } } else { quote! { #ep => {} } } }); let validate = quote! { fn validate(&self, message: & #many ::message::RequestMessage) -> Result<(), #many ::ManyError> { let method = message.method.as_str(); let data = message.data.as_slice(); match method { #(#validate_endpoint_pat)* _ => return Err( #many ::ManyError::invalid_method_name(method.to_string())), }; Ok(()) } }; let ns = namespace; let execute_endpoint_pat = endpoints.iter().map(|e| { let span = e.span; let name = e.name.as_str().to_camel_case(); let ep = match ns { Some(ref namespace) => format!("{}.{}", namespace, name), None => name, }; let ep_ident = &e.func; let backend_decl = if e.is_mut { quote! { let mut backend = self.backend.lock().unwrap(); } } else { quote! { let backend = self.backend.lock().unwrap(); } }; let call = match (e.has_sender, e.arg_type.is_some(), e.is_async) { (false, true, false) => quote_spanned! { span => encode( backend . #ep_ident ( decode( data )? ) ) }, (false, true, true) => quote_spanned! { span => encode( backend . #ep_ident ( decode( data )? ).await ) }, (true, true, false) => quote_spanned! { span => encode( backend . #ep_ident ( &message.from.unwrap_or_default(), decode( data )? ) ) }, (true, true, true) => quote_spanned! { span => encode( backend . #ep_ident ( &message.from.unwrap_or_default(), decode( data )? ).await ) }, (false, false, false) => quote_spanned! { span => encode( backend . #ep_ident ( ) ) }, (false, false, true) => quote_spanned! { span => encode( backend . #ep_ident ( ).await ) }, (true, false, false) => quote_spanned! { span => encode( backend . #ep_ident ( &message.from.unwrap_or_default() ) ) }, (true, false, true) => quote_spanned! { span => encode( backend . #ep_ident ( &message.from.unwrap_or_default() ).await ) }, }; quote_spanned! { span => #ep => { #backend_decl #call } } }); let execute = quote! { async fn execute( &self, message: #many ::message::RequestMessage, ) -> Result< #many ::message::ResponseMessage, #many ::ManyError> { use #many ::ManyError; fn decode<'a, T: minicbor::Decode<'a>>(data: &'a [u8]) -> Result<T, ManyError> { minicbor::decode(data).map_err(|e| ManyError::deserialization_error(e.to_string())) } fn encode<T: minicbor::Encode>(result: Result<T, ManyError>) -> Result<Vec<u8>, ManyError> { minicbor::to_vec(result?).map_err(|e| ManyError::serialization_error(e.to_string())) } let data = message.data.as_slice(); let result = match message.method.as_str() { #( #execute_endpoint_pat )* _ => Err(ManyError::internal_server_error()), }?; Ok( #many ::message::ResponseMessage::from_request( &message, &message.to, Ok(result), )) } }; let attribute = if attrs.id.is_some() { quote! { Some(#attr_ident) } } else { quote! { None } }; Ok(quote! { #( #vis const #attr_ident: #many ::protocol::Attribute = #many ::protocol::Attribute::id(#attr_id); )* #vis struct #info_ident; impl std::ops::Deref for #info_ident { type Target = #many ::server::module::ManyModuleInfo; fn deref(&self) -> & #many ::server::module::ManyModuleInfo { use #many ::server::module::ManyModuleInfo; static ONCE: std::sync::Once = std::sync::Once::new(); static mut VALUE: *mut ManyModuleInfo = 0 as *mut ManyModuleInfo; unsafe { ONCE.call_once(|| VALUE = Box::into_raw(Box::new(ManyModuleInfo { name: #struct_name .to_string(), attribute: #attribute, endpoints: vec![ #( #endpoint_strings .to_string() ),* ], }))); &*VALUE } } } #[async_trait::async_trait] #trait_ #vis struct #struct_ident<T: #trait_ident> { backend: std::sync::Arc<std::sync::Mutex<T>> } impl<T: #trait_ident> std::fmt::Debug for #struct_ident<T> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct(#struct_name).finish() } } impl<T: #trait_ident> #struct_ident<T> { pub fn new(backend: std::sync::Arc<std::sync::Mutex<T>>) -> Self { Self { backend } } } #[async_trait::async_trait] impl<T: #trait_ident> #many ::ManyModule for #struct_ident<T> { fn info(&self) -> & #many ::server::module::ManyModuleInfo { & #info_ident } #validate #execute } }) } #[proc_macro_attribute] pub fn many_module( attr: proc_macro::TokenStream, item: proc_macro::TokenStream, ) -> proc_macro::TokenStream { many_module_impl(&attr.into(), item.into()) .unwrap_or_else(|e| e.to_compile_error()) .into() }
.iter() .map(|x| x.ident.to_string()) .collect::<Vec<String>>() .join("::") == "std::result::Result" { if let AngleBracketed(AngleBracketedGenericArguments { ref args, .. }) = segments[0].arguments { ret_type = Some( args.iter() .find_map(|x| match x { GenericArgument::Type(t) => Some(Box::new(t.clone())), _ => None, }) .unwrap(), ); } } } } if ret_type.is_none() { return Err(( "Must have a result return type.".to_string(), signature.output.span(), )); } Ok(Self { name, func, span: signature.span(), is_async, is_mut, has_sender, arg_type, ret_type: ret_type.unwrap(), }) } } #[allow(clippy::too_many_lines)] fn many_module_impl(attr: &TokenStream, item: TokenStream) -> Result<TokenStream, syn::Error> { let attrs: ManyModuleAttributes
random
[ { "content": "// TODO: Change the error type\n\npub fn public_key(key: &CoseKey) -> Result<CoseKey, String> {\n\n let params = BTreeMap::from_iter(key.params.clone().into_iter());\n\n match key.alg {\n\n Some(Algorithm::Assigned(coset::iana::Algorithm::EdDSA)) => {\n\n let x = params.get...
Rust
src/types/record.rs
jmackie/fit
8103bfd7435e992d88165a1dd8c55947ab73d7c7
use bits::Bits; use byteorder::{ BigEndian, ByteOrder, LittleEndian, ReadBytesExt, }; use error::{ Error, Result, }; use profile; use std::{ collections::HashMap, convert::TryFrom, }; pub struct Record { pub header: Header, pub content: Message, } impl Record { pub(crate) fn decode<R: ReadBytesExt>( r: &mut R, local_mesgs: &HashMap<u8, Definition>, ) -> Result<Self> { let header = Header::decode(r).map_err(Error::decoding("header"))?; let content = match header { Header::Definition { .. } => { Message::Definition( Definition::decode(r) .map_err(Error::decoding("definition message"))?, ) }, Header::Data { local_mesg_num, } => { let definition = local_mesgs .get(&local_mesg_num) .ok_or(Error::missing_definition(local_mesg_num))?; match definition.arch { Architecture::LittleEndian => { Message::Data( Data::decode::<R, LittleEndian>(r, definition) .map_err(Error::decoding("data message"))?, ) }, Architecture::BigEndian => { Message::Data( Data::decode::<R, BigEndian>(r, definition) .map_err(Error::decoding("data message"))?, ) }, } }, Header::CompressedTimestamp { .. } => Message::CompressedTimestamp, }; Ok(Record { header, content, }) } } pub enum Header { Definition { local_mesg_num: u8, }, Data { local_mesg_num: u8, }, CompressedTimestamp { local_mesg_num: u8, time_offset: u8, }, } impl Header { fn decode<R: ReadBytesExt>(r: &mut R) -> Result<Self> { let byte = r.read_u8().map_err(Error::reading("byte"))?; if byte.bit_not_set(7) { if byte.bit_is_set(6) { Ok(Header::Definition { local_mesg_num: byte.bit_range(0, 3) }) } else { Ok(Header::Data { local_mesg_num: byte.bit_range(0, 3) }) } } else { Ok(Header::CompressedTimestamp { local_mesg_num: byte.bit_range(5, 6), time_offset: byte.bit_range(0, 4), }) } } pub fn local_mesg_num(&self) -> u8 { match self { Header::Definition { local_mesg_num, } => *local_mesg_num, Header::Data { local_mesg_num, } => *local_mesg_num, Header::CompressedTimestamp { local_mesg_num, .. } => *local_mesg_num, } } } pub enum Message { Definition(Definition), Data(Data), CompressedTimestamp, } #[derive(Debug, Clone)] pub struct Definition { arch: Architecture, global_mesg_num: u16, nfields: u8, field_defs: Vec<FieldDefinition>, } impl Definition { pub(super) fn decode<R: ReadBytesExt>(r: &mut R) -> Result<Self> { r.read_u8().map_err(Error::reading("reserved byte"))?; let arch = r .read_u8() .map_err(Error::reading("architecture byte")) .and_then(Architecture::try_from)?; let global_mesg_num = match arch { Architecture::LittleEndian => { r.read_u16::<LittleEndian>() .map_err(Error::reading("global message number"))? }, Architecture::BigEndian => { r.read_u16::<BigEndian>() .map_err(Error::reading("global message number"))? }, }; let nfields = r.read_u8().map_err(Error::reading("number of fields"))?; let mut field_defs = Vec::with_capacity(nfields as usize); for i in 0..nfields { let field_def = FieldDefinition::decode(r) .map_err(Error::reading(format!("field definition #{}", i)))?; field_defs.push(field_def); } Ok(Definition { arch, global_mesg_num, nfields, field_defs, }) } } #[derive(Debug, Clone)] pub struct FieldDefinition { num: u8, size: u8, _base_type_num: u8, } impl FieldDefinition { pub(super) fn decode<R: ReadBytesExt>(r: &mut R) -> Result<Self> { let num = r.read_u8().map_err(Error::reading("number"))?; let size = r.read_u8().map_err(Error::reading("size"))?; let _base_type_num = r.read_u8().map_err(Error::reading("base type"))?; Ok(FieldDefinition { num, size, _base_type_num, }) } } pub struct Data(pub Vec<profile::messages::Message>); impl Data { pub(super) fn decode<R: ReadBytesExt, T: ByteOrder>( r: &mut R, definition: &Definition, ) -> Result<Self> { let mut mesgs = Vec::with_capacity(definition.field_defs.len()); for field_def in definition.field_defs.iter() { let mut buffer = vec![0; field_def.size as usize]; r.read(&mut buffer).map_err(Error::reading("buffer"))?; let mesg = profile::messages::Message::decode::<T>( &buffer, definition.global_mesg_num, field_def.num, )?; mesgs.push(mesg); } Ok(Data(mesgs)) } } #[derive(Debug, Clone)] enum Architecture { LittleEndian = 0, BigEndian = 1, } impl TryFrom<u8> for Architecture { type Error = Error; fn try_from(n: u8) -> Result<Architecture> { match n { 0 => Ok(Architecture::LittleEndian), 1 => Ok(Architecture::BigEndian), _ => Err(Error::unknown_architecture(n)), } } }
use bits::Bits; use byteorder::{ BigEndian, ByteOrder, LittleEndian, ReadBytesExt, }; use error::{ Error, Result, }; use profile; use std::{ collections::HashMap, convert::TryFrom, }; pub struct Record { pub header: Header, pub content: Message, } impl Record { pub(crate) fn decode<R: ReadBytesExt>( r: &mut R, local_mesgs: &HashMap<u8, Definition>, ) -> Result<Self> { let header = Header::decode(r).map_err(Error::decoding("header"))?; let content = match header { Header::Definition { .. } => { Message::Definition( Definition::decode(r) .map_err(Error::decoding("definition message"))?, ) }, Header::Data { local_mesg_num, } => { let definition = local_mesgs .get(&local_mesg_num) .ok_or(Error::missing_definition(local_mesg_num))?; match definition.arch { Architecture::LittleEndian => { Message::Data( Data::decode::<R, LittleEndian>(r, definition) .map_err(Error::decoding("data message"))?, ) }, Architecture::BigEndian => { Message::Data( Data::decode::<R, BigEndian>(r, definition) .map_err(Error::decoding("data message"))?, ) }, } }, Header::CompressedTimestamp { .. } => Message::CompressedTimestamp, }; Ok(Record { header, content, }) } } pub enum Header { Definition { local_mesg_num: u8, }, Data { local_mesg_num: u8, }, CompressedTimestamp { local_mesg_num: u8, time_offset: u8, }, } impl Header { fn decode<R: ReadBytesExt>(r: &mut R) -> Result<Self> { let byte = r.read_u8().map_err(Error::reading("byte"))?; if byte.bit_not_set(7) { if byte.bit_is_set(6) { Ok(Header::Definition { local_mesg_num: byte.bit_range(0, 3) }) } else { Ok(Header::Data { local_mesg_num: byte.bit_range(0, 3) }) } } else { Ok(Header::CompressedTimestamp { local_mesg_num: byte.bit_range(5, 6), time_offset: byte.bit_range(0, 4), }) } } pub fn local_mesg_num(&self) -> u8 { match self { Header::Definition { local_mesg_num, } => *local_mesg_num, Header::Data { local_mesg_num, } => *local_mesg_num, Header::CompressedTimestamp { local_mesg_num, .. } => *local_mesg_num, } } } pub enum Message { Definition(Definition), Data(Data), CompressedTimestamp, } #[derive(Debug, Clone)] pub struct Definition { arch: Architecture, global_mesg_num: u16, nfields: u8, field_defs: Vec<FieldDefinition>, } impl Definition { pub(super) fn decode<R: ReadBytesExt>(r: &mut R) -> Result<Self> { r.read_u8().map_err(Error::reading("reserved byte"))?; let arch = r .read_u8() .map_err(Error::reading("architecture byte")) .and_then(Architecture::try_from)?; let global_mesg_num = match arch { Architecture::LittleEndian => { r.read_u16::<LittleEndian>() .map_err(Error::reading("global message number"))? },
} #[derive(Debug, Clone)] pub struct FieldDefinition { num: u8, size: u8, _base_type_num: u8, } impl FieldDefinition { pub(super) fn decode<R: ReadBytesExt>(r: &mut R) -> Result<Self> { let num = r.read_u8().map_err(Error::reading("number"))?; let size = r.read_u8().map_err(Error::reading("size"))?; let _base_type_num = r.read_u8().map_err(Error::reading("base type"))?; Ok(FieldDefinition { num, size, _base_type_num, }) } } pub struct Data(pub Vec<profile::messages::Message>); impl Data { pub(super) fn decode<R: ReadBytesExt, T: ByteOrder>( r: &mut R, definition: &Definition, ) -> Result<Self> { let mut mesgs = Vec::with_capacity(definition.field_defs.len()); for field_def in definition.field_defs.iter() { let mut buffer = vec![0; field_def.size as usize]; r.read(&mut buffer).map_err(Error::reading("buffer"))?; let mesg = profile::messages::Message::decode::<T>( &buffer, definition.global_mesg_num, field_def.num, )?; mesgs.push(mesg); } Ok(Data(mesgs)) } } #[derive(Debug, Clone)] enum Architecture { LittleEndian = 0, BigEndian = 1, } impl TryFrom<u8> for Architecture { type Error = Error; fn try_from(n: u8) -> Result<Architecture> { match n { 0 => Ok(Architecture::LittleEndian), 1 => Ok(Architecture::BigEndian), _ => Err(Error::unknown_architecture(n)), } } }
Architecture::BigEndian => { r.read_u16::<BigEndian>() .map_err(Error::reading("global message number"))? }, }; let nfields = r.read_u8().map_err(Error::reading("number of fields"))?; let mut field_defs = Vec::with_capacity(nfields as usize); for i in 0..nfields { let field_def = FieldDefinition::decode(r) .map_err(Error::reading(format!("field definition #{}", i)))?; field_defs.push(field_def); } Ok(Definition { arch, global_mesg_num, nfields, field_defs, }) }
function_block-function_prefix_line
[ { "content": "/// Attempt to open the types worksheet.\n\npub fn open_sheet<R>(workbook: &mut R) -> Result<Sheet>\n\nwhere\n\n R: calamine::Reader,\n\n{\n\n workbook\n\n .worksheet_range(WORKSHEET_NAME)\n\n .ok_or(Error::missing_sheet(WORKSHEET_NAME))?\n\n .map_err(Error::bad_sheet(WO...
Rust
src/matcher.rs
SpectralOps/service-policy-kit
a1d1b8eab9981b21b87349c5232b4c893a723933
use crate::data::{Cause, HeaderList, Response, Violation}; use fancy_regex::Regex; use std::collections::HashMap; pub struct RegexMatcher { pub kind: String, } impl RegexMatcher { pub fn new(kind: &str) -> Self { Self { kind: kind.to_string(), } } fn match_field( &self, name: &str, wire_field: &Option<String>, recorded_field: &Option<String>, ) -> Option<Violation> { if let Some(recorded_value) = recorded_field { if wire_field.is_none() { return Some(Violation { kind: self.kind.clone(), cause: Cause::WireMissing, subject: name.to_string(), on: Some(name.to_string()), wire: None, recorded: recorded_value.to_string(), }); } let match_re = Regex::new(recorded_value).unwrap(); if !match_re.is_match(wire_field.as_ref().unwrap()).unwrap() { return Some(Violation { kind: self.kind.clone(), cause: Cause::Mismatch, subject: name.to_string(), on: Some(name.to_string()), wire: wire_field.clone(), recorded: recorded_value.to_string(), }); } } None } fn match_headers( &self, wire_headers: &Option<HashMap<String, HeaderList>>, recorded_headers: &Option<HashMap<String, HeaderList>>, ) -> Option<Violation> { if let Some(recorded_headers) = recorded_headers { if wire_headers.is_none() { return Some(Violation { kind: self.kind.clone(), cause: Cause::WireMissing, subject: "headers".to_string(), on: Some("all headers".to_string()), wire: None, recorded: format!("{:?}", recorded_headers), }); } let wire_headers = wire_headers.as_ref().unwrap(); let matches_headers = recorded_headers.iter().find(|(k, vs)| { let k = k.to_lowercase(); if !wire_headers.contains_key(k.as_str()) { return true; } let wire_header_values = &wire_headers[k.as_str()]; !vs.iter().any(|v| { let v_re = Regex::new(v.as_str()).unwrap(); wire_header_values .iter() .any(|wv| v_re.is_match(wv).unwrap()) }) }); if let Some(matches_headers) = matches_headers { let (key, _) = matches_headers; let key = key.to_lowercase(); return Some(Violation { kind: self.kind.clone(), cause: Cause::Mismatch, subject: "headers".to_string(), on: Some(key.to_string()), wire: Some(format!( "{:?}", wire_headers.get(key.as_str()).unwrap_or(&vec![]) )), recorded: format!("{:?}", matches_headers.1), }); } } None } fn match_vars( &self, wire_vars: &Option<HashMap<String, String>>, recorded_vars: &Option<HashMap<String, String>>, ) -> Option<Violation> { if let Some(recorded_vars) = recorded_vars { if wire_vars.is_none() { return Some(Violation { kind: self.kind.clone(), cause: Cause::WireMissing, subject: "vars".to_string(), on: Some("all vars".to_string()), wire: None, recorded: format!("{:?}", recorded_vars), }); } let wire_vars = wire_vars.as_ref().unwrap(); let badly_matched_vars = recorded_vars.iter().find(|(k, v)| { let k = k.to_lowercase(); if !wire_vars.contains_key(k.as_str()) { return true; } let wire_var = &wire_vars[k.as_str()]; let v_re = Regex::new(v.as_str()).unwrap(); !v_re.is_match(wire_var).unwrap() }); if let Some(badly_matched_vars) = badly_matched_vars { let (key, _) = badly_matched_vars; let key = key.to_lowercase(); return Some(Violation { kind: self.kind.clone(), cause: Cause::Mismatch, subject: "vars".to_string(), on: Some(key.to_string()), wire: Some(format!( "{:?}", wire_vars.get(key.as_str()).unwrap_or(&"".to_string()) )), recorded: format!("{:?}", badly_matched_vars.1), }); } } None } pub fn is_match( &self, wire_response: &Response, recorded_response: Option<&Response>, ) -> Vec<Violation> { if let Some(recorded_response) = recorded_response { vec![ self.match_field("body", &wire_response.body, &recorded_response.body), self.match_field( "status_code", &wire_response.status_code, &recorded_response.status_code, ), self.match_headers(&wire_response.headers, &recorded_response.headers), self.match_vars(&wire_response.vars, &recorded_response.vars), ] .into_iter() .flatten() .collect::<Vec<_>>() } else { vec![Violation { kind: self.kind.clone(), cause: Cause::RecordedMissing, subject: "response".to_string(), on: None, wire: None, recorded: format!("{:?}", wire_response), }] } } }
use crate::data::{Cause, HeaderList, Response, Violation}; use fancy_regex::Regex; use std::collections::HashMap; pub struct RegexMatcher { pub kind: String, } impl RegexMatcher { pub fn new(kind: &str) -> Self { Self { kind: kind.to_string(), } } fn match_field( &self, name: &str, wire_field: &Option<String>, recorded_field: &Option<String>, ) -> Option<Violation> { if let Some(recorded_value) = recorded_field { if wire_field.is_none() { return Some(Violation { kind: self.kind.clone(), cause: Cause::WireMissing, subject: name.to_string(), on: Some(name.to_string()), wire: None, recorded: recorded_value.to_string(), }); } let match_re = Regex::new(recorded_value).unwrap(); if !match_re.is_match(wire_field.as_ref().unwrap()).unwrap() { return Some(Violation { kind: self.kind.clone(), cause: Cause::Mismatch, subject: name.to_string(), on: Some(name.to_string()), wire: wire_field.clone(), recorded: recorded_value.to_string(), }); } } None }
fn match_vars( &self, wire_vars: &Option<HashMap<String, String>>, recorded_vars: &Option<HashMap<String, String>>, ) -> Option<Violation> { if let Some(recorded_vars) = recorded_vars { if wire_vars.is_none() { return Some(Violation { kind: self.kind.clone(), cause: Cause::WireMissing, subject: "vars".to_string(), on: Some("all vars".to_string()), wire: None, recorded: format!("{:?}", recorded_vars), }); } let wire_vars = wire_vars.as_ref().unwrap(); let badly_matched_vars = recorded_vars.iter().find(|(k, v)| { let k = k.to_lowercase(); if !wire_vars.contains_key(k.as_str()) { return true; } let wire_var = &wire_vars[k.as_str()]; let v_re = Regex::new(v.as_str()).unwrap(); !v_re.is_match(wire_var).unwrap() }); if let Some(badly_matched_vars) = badly_matched_vars { let (key, _) = badly_matched_vars; let key = key.to_lowercase(); return Some(Violation { kind: self.kind.clone(), cause: Cause::Mismatch, subject: "vars".to_string(), on: Some(key.to_string()), wire: Some(format!( "{:?}", wire_vars.get(key.as_str()).unwrap_or(&"".to_string()) )), recorded: format!("{:?}", badly_matched_vars.1), }); } } None } pub fn is_match( &self, wire_response: &Response, recorded_response: Option<&Response>, ) -> Vec<Violation> { if let Some(recorded_response) = recorded_response { vec![ self.match_field("body", &wire_response.body, &recorded_response.body), self.match_field( "status_code", &wire_response.status_code, &recorded_response.status_code, ), self.match_headers(&wire_response.headers, &recorded_response.headers), self.match_vars(&wire_response.vars, &recorded_response.vars), ] .into_iter() .flatten() .collect::<Vec<_>>() } else { vec![Violation { kind: self.kind.clone(), cause: Cause::RecordedMissing, subject: "response".to_string(), on: None, wire: None, recorded: format!("{:?}", wire_response), }] } } }
fn match_headers( &self, wire_headers: &Option<HashMap<String, HeaderList>>, recorded_headers: &Option<HashMap<String, HeaderList>>, ) -> Option<Violation> { if let Some(recorded_headers) = recorded_headers { if wire_headers.is_none() { return Some(Violation { kind: self.kind.clone(), cause: Cause::WireMissing, subject: "headers".to_string(), on: Some("all headers".to_string()), wire: None, recorded: format!("{:?}", recorded_headers), }); } let wire_headers = wire_headers.as_ref().unwrap(); let matches_headers = recorded_headers.iter().find(|(k, vs)| { let k = k.to_lowercase(); if !wire_headers.contains_key(k.as_str()) { return true; } let wire_header_values = &wire_headers[k.as_str()]; !vs.iter().any(|v| { let v_re = Regex::new(v.as_str()).unwrap(); wire_header_values .iter() .any(|wv| v_re.is_match(wv).unwrap()) }) }); if let Some(matches_headers) = matches_headers { let (key, _) = matches_headers; let key = key.to_lowercase(); return Some(Violation { kind: self.kind.clone(), cause: Cause::Mismatch, subject: "headers".to_string(), on: Some(key.to_string()), wire: Some(format!( "{:?}", wire_headers.get(key.as_str()).unwrap_or(&vec![]) )), recorded: format!("{:?}", matches_headers.1), }); } } None }
function_block-full_function
[ { "content": "pub fn diff_text(expected: &str, actual: &str) -> (String, String, String) {\n\n let expected = format!(\"{:?}\", expected);\n\n let expected = &expected[1..expected.len() - 1];\n\n\n\n let actual = format!(\"{:?}\", actual);\n\n let actual = &actual[1..actual.len() - 1];\n\n\n\n le...
Rust
crates/newport_editor/src/editor.rs
PyroFlareX/newport
ca4b09e98b31d1eefed8a8a545087be8fe28913f
use crate::{ engine, graphics, math, asset, gpu, os, Context, RawInput, DrawState, View, DARK, Layout, Panel, Style, Sizing, ColorStyle, LayoutStyle, Shape, TextStyle, }; use engine::{ Module, Engine, EngineBuilder, InputEvent }; use graphics::{ Graphics, Texture, Pipeline }; use math::{ Rect }; use asset::{ AssetRef, AssetManager }; use os::window::WindowStyle; use std::sync::{ Mutex, MutexGuard }; struct EditorAssets { _close_button: AssetRef<Texture>, present_pipeline: AssetRef<Pipeline>, } impl EditorAssets { fn new() -> Self { let asset_manager = Engine::as_ref().module::<AssetManager>().unwrap(); Self{ _close_button: asset_manager.find("{ce163885-9cd7-4103-b865-3e41df21ba13}").unwrap(), present_pipeline: asset_manager.find("{62b4ffa0-9510-4818-a6f2-7645ec304d8e}").unwrap() } } } #[allow(dead_code)] struct EditorInner { gui: Context, input: Option<RawInput>, draw_state: DrawState, assets: EditorAssets, view: View, } pub struct Editor(Mutex<EditorInner>); impl Editor { pub fn set_view(&self, view: View) { let mut editor = self.lock(); editor.view = view; } fn lock(&self) -> MutexGuard<EditorInner> { self.0.lock().unwrap() } fn do_frame(&self, dt: f32) { let engine = Engine::as_ref(); let graphics = engine.module::<Graphics>().unwrap(); let device = graphics.device(); let dpi = engine.dpi(); let backbuffer = device.acquire_backbuffer(); let mut editor = self.lock(); let EditorInner { gui, input, draw_state, view, assets, } = &mut *editor; let canvas = { let mut input = input.take().unwrap_or_default(); input.viewport = (0.0, 0.0, backbuffer.width() as f32, backbuffer.height() as f32).into(); input.dt = dt; input.dpi = dpi; gui.begin_frame(input); let mut layout_style: LayoutStyle = gui.style().get(); layout_style.padding = (12.0, 8.0, 12.0, 8.0).into(); layout_style.margin = Rect::default(); gui.style().push(layout_style); let mut color: ColorStyle = gui.style().get(); color.inactive_background = DARK.bg; color.unhovered_background = DARK.bg; gui.style().push(color); let text_style: TextStyle = gui.style().get(); let height = text_style.label_height() + layout_style.padding.min.y + layout_style.padding.max.y; Panel::top("menu_bar", height).build(gui, |builder| { let space = builder.available_rect(); builder.button("File").clicked(); builder.button("Edit").clicked(); builder.button("View").clicked(); builder.button("Run").clicked(); builder.button("Help").clicked(); let bounds = builder.layout.push_size(builder.layout.space_left()); builder.layout(Layout::right_to_left(bounds), |builder| { let mut color: ColorStyle = builder.style().get(); color.hovered_background = DARK.red0; color.hovered_foreground = DARK.fg; color.focused_background = DARK.red0; color.focused_foreground = DARK.fg; builder.scoped_style(color, |builder| { if builder.button("Close").clicked() { engine.shutdown(); } }); if builder.button("Max").clicked() { engine.maximize(); } if builder.button("Min").clicked() { engine.minimize(); } let drag = builder.layout.available_rect(); let drag = Rect::from_pos_size(drag.pos() * builder.input().dpi, drag.size() * builder.input().dpi); engine.set_custom_drag(drag); builder.layout(Layout::left_to_right(space), |builder| { let mut layout_style: LayoutStyle = builder.style().get(); layout_style.width_sizing = Sizing::Fill; layout_style.height_sizing = Sizing::Fill; builder.scoped_style(layout_style, |builder| builder.label(format!("{} - Newport Editor", Engine::as_ref().name()))); }); }); }); gui.style().pop::<ColorStyle>(); let bounds = gui.take_canvas(); let mut builder = gui.builder("view", Layout::up_to_down(bounds)); let mut color: ColorStyle = builder.style().get(); builder.painter.push_shape(Shape::solid_rect(bounds, color.inactive_background, 0.0)); color.inactive_background = DARK.bg; builder.scoped_style(color, |builder| { let bounds = Rect::from_min_max(bounds.min, bounds.max); builder.layout(Layout::up_to_down(bounds), |builder| { view.build(builder); }); }); builder.finish(); gui.end_frame() }; device.update_bindless(); let present_pipeline = assets.present_pipeline.read(); let mut gfx = device.create_graphics_context().unwrap(); gfx.begin(); { let imgui = draw_state.record(canvas, &mut gfx, gui).unwrap(); gfx.begin_render_pass(&graphics.backbuffer_render_pass(), &[&backbuffer]); gfx.bind_pipeline(&present_pipeline.gpu); struct Import { _texture: u32, } let import_buffer = device.create_buffer( gpu::BufferUsage::CONSTANTS, gpu::MemoryType::HostVisible, std::mem::size_of::<Import>() ).unwrap(); import_buffer.copy_to(&[Import{ _texture: imgui.bindless().unwrap(), }]); gfx.bind_constant_buffer(&import_buffer); gfx.draw(3, 0); gfx.end_render_pass(); gfx.resource_barrier_texture(&backbuffer, gpu::Layout::ColorAttachment, gpu::Layout::Present); } gfx.end(); let receipt = device.submit_graphics(vec![gfx], &[]); device.display(&[receipt]); device.wait_for_idle(); } } impl Module for Editor { fn new() -> Self { Self(Mutex::new(EditorInner{ gui: Context::new(), input: None, draw_state: DrawState::new(), assets: EditorAssets::new(), view: View::new("main", 1.0), })) } fn depends_on(builder: EngineBuilder) -> EngineBuilder { builder .module::<Graphics>() .module::<AssetManager>() .register(WindowStyle::CustomTitleBar{ border: 5.0, drag: Default::default(), }) .process_input(|engine: &Engine, _window: &os::window::Window, event: &InputEvent| { let mut editor = engine.module::<Editor>().unwrap().lock(); if editor.input.is_none() { editor.input = Some(RawInput::default()); } editor.input.as_mut().unwrap().events.push_back(event.clone()); }) .tick(|engine: &Engine, dt: f32| { let editor = engine.module::<Editor>().unwrap(); if engine.window().is_minimized() { return; } editor.do_frame(dt); }) } }
use crate::{ engine, graphics, math, asset, gpu, os, Context, RawInput, DrawState, View, DARK, Layout, Panel, Style, Sizing, ColorStyle, LayoutStyle, Shape, TextStyle, }; use engine::{ Module, Engine, EngineBuilder, InputEvent }; use graphics::{ Graphics, Texture, Pipeline }; use math::{ Rect }; use asset::{ AssetRef, AssetManager }; use os::window::WindowStyle; use std::sync::{ Mutex, MutexGuard }; struct EditorAssets { _close_button: AssetRef<Texture>, present_pipeline: AssetRef<Pipeline>, } impl EditorAssets { fn new() -> Self { let asset_manager = Engine::as_ref().module::<AssetManager>().unwrap(); Self{ _close_button: asset_manager.find("{ce163885-9cd7-4103-b865-3e41df21ba13}").unwrap(), present_pipeline: asset_manager.find("{62b4ffa0-9510-4818-a6f2-7645ec304d8e}").unwrap() } } } #[allow(dead_code)] struct EditorInner { gui: Context, input: Option<RawInput>, draw_state: DrawState, assets: EditorAssets, view: View, } pub struct Editor(Mutex<EditorInner>); impl Editor { pub fn set_view(&self, view: View) { let mut editor = self.lock(); editor.view = view; } fn lock(&self) -> MutexGuard<EditorInner> { self.0.lock().unwrap() } fn do_frame(&self, dt: f32) { let engine = Engine::as_ref(); let graphics = engine.module::<Graphics>().unwrap(); let device = graphics.device(); let dpi = engine.dpi(); let backbuffer = device.acquire_backbuffer(); let mut editor = self.lock(); let EditorInner { gui, input, draw_state, view, assets, } = &mut *editor; let canvas = { let mut input = input.take().unwrap_or_default(); input.viewport = (0.0, 0.0, backbuffer.width() as f32, backbuffer.height() as f32).into(); input.dt = dt; input.dpi = dpi; gui.begin_frame(input); let mut layout_style: LayoutStyle = gui.style().get(); layout_style.padding = (12.0, 8.0, 12.0, 8.0).into(); layout_style.margin = Rect::default(); gui.style().push(layout_style); let mut color: ColorStyle = gui.style().get(); color.inactive_background = DARK.bg; color.unhovered_background = DARK.bg; gui.style().push(color); let text_style: TextStyle = gui.style().get(); let height = text_style.label_height() + layout_style.padding.min.y + layout_style.padding.max.y; Panel::top("menu_bar", height).build(gui, |builder| { let space = builder.available_rect(); builder.button("File").clicked(); builder.button("Edit").clicked(); builder.button("View").clicked(); builder.button("Run").clicked(); builder.button("Help").clicked(); let bounds = builder.layout.push_size(builder.layout.space_left()); builder.layout(Layout::right_to_left(bounds), |builder| { let mut color: ColorStyle = builder.style().get(); color.hovered_background = DARK.red0; color.hovered_foreground = DARK.fg; color.focused_background = DARK.red0; color.focused_foreground = DARK.fg; builder.scoped_style(color, |builder| { if builder.button("Close").clicked() { engine.shutdown(); } }); if builder
ct(); let drag = Rect::from_pos_size(drag.pos() * builder.input().dpi, drag.size() * builder.input().dpi); engine.set_custom_drag(drag); builder.layout(Layout::left_to_right(space), |builder| { let mut layout_style: LayoutStyle = builder.style().get(); layout_style.width_sizing = Sizing::Fill; layout_style.height_sizing = Sizing::Fill; builder.scoped_style(layout_style, |builder| builder.label(format!("{} - Newport Editor", Engine::as_ref().name()))); }); }); }); gui.style().pop::<ColorStyle>(); let bounds = gui.take_canvas(); let mut builder = gui.builder("view", Layout::up_to_down(bounds)); let mut color: ColorStyle = builder.style().get(); builder.painter.push_shape(Shape::solid_rect(bounds, color.inactive_background, 0.0)); color.inactive_background = DARK.bg; builder.scoped_style(color, |builder| { let bounds = Rect::from_min_max(bounds.min, bounds.max); builder.layout(Layout::up_to_down(bounds), |builder| { view.build(builder); }); }); builder.finish(); gui.end_frame() }; device.update_bindless(); let present_pipeline = assets.present_pipeline.read(); let mut gfx = device.create_graphics_context().unwrap(); gfx.begin(); { let imgui = draw_state.record(canvas, &mut gfx, gui).unwrap(); gfx.begin_render_pass(&graphics.backbuffer_render_pass(), &[&backbuffer]); gfx.bind_pipeline(&present_pipeline.gpu); struct Import { _texture: u32, } let import_buffer = device.create_buffer( gpu::BufferUsage::CONSTANTS, gpu::MemoryType::HostVisible, std::mem::size_of::<Import>() ).unwrap(); import_buffer.copy_to(&[Import{ _texture: imgui.bindless().unwrap(), }]); gfx.bind_constant_buffer(&import_buffer); gfx.draw(3, 0); gfx.end_render_pass(); gfx.resource_barrier_texture(&backbuffer, gpu::Layout::ColorAttachment, gpu::Layout::Present); } gfx.end(); let receipt = device.submit_graphics(vec![gfx], &[]); device.display(&[receipt]); device.wait_for_idle(); } } impl Module for Editor { fn new() -> Self { Self(Mutex::new(EditorInner{ gui: Context::new(), input: None, draw_state: DrawState::new(), assets: EditorAssets::new(), view: View::new("main", 1.0), })) } fn depends_on(builder: EngineBuilder) -> EngineBuilder { builder .module::<Graphics>() .module::<AssetManager>() .register(WindowStyle::CustomTitleBar{ border: 5.0, drag: Default::default(), }) .process_input(|engine: &Engine, _window: &os::window::Window, event: &InputEvent| { let mut editor = engine.module::<Editor>().unwrap().lock(); if editor.input.is_none() { editor.input = Some(RawInput::default()); } editor.input.as_mut().unwrap().events.push_back(event.clone()); }) .tick(|engine: &Engine, dt: f32| { let editor = engine.module::<Editor>().unwrap(); if engine.window().is_minimized() { return; } editor.do_frame(dt); }) } }
.button("Max").clicked() { engine.maximize(); } if builder.button("Min").clicked() { engine.minimize(); } let drag = builder.layout.available_re
random
[ { "content": "pub fn button_control(id: Id, bounds: Rect, builder: &mut Builder) -> ButtonResponse {\n\n let mut response = ButtonResponse::None;\n\n let is_over = builder.input().mouse_is_over(bounds);\n\n if is_over {\n\n if !builder.is_hovered(id) {\n\n response = ButtonResponse::H...
Rust
common/lib/esp32-c3-dkc02-bsc/src/led.rs
SuGlider/espressif-trainings
cdfb39b7fea35c0aa78e177169d55e4a1cef3379
use std::ptr::{null, null_mut}; use esp_idf_sys::{ c_types::c_void, esp, rmt_config, rmt_config_t, rmt_config_t__bindgen_ty_1, rmt_driver_install, rmt_get_counter_clock, rmt_item32_t, rmt_item32_t__bindgen_ty_1, rmt_item32_t__bindgen_ty_1__bindgen_ty_1, rmt_mode_t_RMT_MODE_TX, rmt_translator_init, rmt_tx_config_t, rmt_wait_tx_done, rmt_write_sample, size_t, u_int8_t, }; pub use rgb::RGB8; const WS2812_T0H_NS: u32 = 350; const WS2812_T0L_NS: u32 = 1000; const WS2812_T1H_NS: u32 = 1000; const WS2812_T1L_NS: u32 = 350; #[derive(Debug, Default, Clone, Copy)] struct Ws2812Config { t0h_ticks: u32, t0l_ticks: u32, t1h_ticks: u32, t1l_ticks: u32, } const FREERTOS_HZ: u32 = 1000; static mut WS_CONFIG: Option<Ws2812Config> = None; unsafe extern "C" fn ws2812_to_rmt( src: *const c_void, dest: *mut rmt_item32_t, src_size: size_t, wanted_num: size_t, translated_size: *mut size_t, item_num: *mut size_t, ) { if src == null() || dest == null_mut() { *translated_size = 0; *item_num = 0; return; } let config = WS_CONFIG.unwrap(); let mut bit0: rmt_item32_t__bindgen_ty_1__bindgen_ty_1 = Default::default(); bit0.set_duration0(config.t0h_ticks); bit0.set_level0(1); bit0.set_duration1(config.t0l_ticks); bit0.set_level1(0); let bit0 = rmt_item32_t { __bindgen_anon_1: rmt_item32_t__bindgen_ty_1 { __bindgen_anon_1: bit0, }, }; let mut bit1: rmt_item32_t__bindgen_ty_1__bindgen_ty_1 = Default::default(); bit1.set_duration0(config.t1h_ticks); bit1.set_level0(1); bit1.set_duration1(config.t1l_ticks); bit1.set_level1(0); let bit1 = rmt_item32_t { __bindgen_anon_1: rmt_item32_t__bindgen_ty_1 { __bindgen_anon_1: bit1, }, }; let mut size: size_t = 0; let mut num = 0; let mut psrc = src as *const u_int8_t; let mut pdest: *mut rmt_item32_t = dest as _; while size < src_size && num < wanted_num { for i in 0..8 { if *psrc & (1 << (7 - i)) != 0 { *pdest = bit1; } else { *pdest = bit0; } num += 1; pdest = pdest.add(1); } size += 1; psrc = psrc.add(1); } *translated_size = size; *item_num = num; } pub struct WS2812RMT { config: rmt_config_t, } impl WS2812RMT { pub fn new() -> anyhow::Result<Self> { let rmt_tx_config = rmt_tx_config_t { carrier_freq_hz: 38000, carrier_level: 1, idle_level: 0, carrier_duty_percent: 33, loop_count: 1, carrier_en: false, loop_en: false, idle_output_en: true, }; let config = rmt_config_t { rmt_mode: rmt_mode_t_RMT_MODE_TX, channel: 0, gpio_num: 8, clk_div: 2, mem_block_num: 1, flags: 0, __bindgen_anon_1: rmt_config_t__bindgen_ty_1 { tx_config: rmt_tx_config, }, }; unsafe { esp!(rmt_config(&config))?; esp!(rmt_driver_install(config.channel, 0, 0))?; let mut rmt_clock = 0u32; esp!(rmt_get_counter_clock(config.channel, &mut rmt_clock))?; let ratio = rmt_clock as f64 / 1e9; WS_CONFIG = Some(Ws2812Config { t0h_ticks: (ratio * WS2812_T0H_NS as f64) as _, t0l_ticks: (ratio * WS2812_T0L_NS as f64) as _, t1h_ticks: (ratio * WS2812_T1H_NS as f64) as _, t1l_ticks: (ratio * WS2812_T1L_NS as f64) as _, }); esp!(rmt_translator_init(config.channel, Some(ws2812_to_rmt)))?; } Ok(Self { config }) } pub fn set_pixel(&mut self, color: RGB8) -> anyhow::Result<()> { let timeout_ms = 1; unsafe { esp!(rmt_write_sample( self.config.channel, &[color.g, color.r, color.b] as *const u8, 3, true, ))?; esp!(rmt_wait_tx_done( self.config.channel, (timeout_ms as u32 * FREERTOS_HZ) / 1000, ))?; } Ok(()) } }
use std::ptr::{null, null_mut}; use esp_idf_sys::{ c_types::c_void, esp, rmt_config, rmt_config_t, rmt_config_t__bindgen_ty_1, rmt_driver_install, rmt_get_counter_clock, rmt_item32_t, rmt_item32_t__bindgen_ty_1, rmt_item32_t__bindgen_ty_1__bindgen_ty_1, rmt_mode_t_RMT_MODE_TX, rmt_translator_init, rmt_tx_config_t, rmt_wait_tx_done, rmt_write_sample, size_t, u_int8_t, }; pub use rgb::RGB8; const WS2812_T0H_NS: u32 = 350; const WS2812_T0L_NS: u32 = 1000; const WS2812_T1H_NS: u32 = 1000; const WS2812_T1L_NS: u32 = 350; #[derive(Debug, Default, Clone, Copy)] struct Ws2812Config { t0h_ticks: u32, t0l_ticks: u32, t1h_ticks: u32, t1l_ticks: u32, } const FREERTOS_HZ: u32 = 1000; static mut WS_CONFIG: Option<Ws2812Config> = None; unsafe extern "C" fn ws2812_to_rmt( src: *const c_void, dest: *mut rmt_item32_t, src_size: size_t, wanted_num: size_t, translated_size: *mut size_t, item_num: *mut size_t, ) { if src == null() || dest == null_mut() { *translated_size = 0; *item_num = 0; return; } let config = WS_CONFIG.unwrap(); let mut bit0: rmt_item32_t__bindgen_ty_1__bindgen_ty_1 = Default::default(); bit0.set_duration0(config.t0h_ticks); bit0.set_level0(1); bit0.set_duration1(config.t0l_ticks); bit0.set_level1(0); let bit0 = rmt_item32_t { __bindgen_anon_1: rmt_item32_t__bindgen_ty_1 { __bindgen_anon_1: bit0, }, }; let mut bit1: rmt_item32_t__bindgen_ty_1__bindgen_ty_1 = Default::default(); bit1.set_duration0(config.t1h_ticks); bit1.set_level0(1); bit1.set_duration1(config.t1l_ticks); bit1.set_level1(0); let bit1 = rmt_item32_t { __bindgen_anon_1: rmt_item32_t__bindgen_ty_1 { __bindgen_anon_1: bit1, }, }; let mut size: size_t = 0; let mut num = 0; let mut psrc = src as *const u_int8_t; let mut pdest: *mut rmt_item32_t = dest as _; while size < src_size && num < wanted_num { for i in 0..8 { if *psrc & (1 << (7 - i)) != 0 { *pdest = bit1; } else { *pdest = bit0; } num += 1; pdest = pdest.add(1); } size += 1; psrc = psrc.add(1); } *translated_size = size; *item_num = num; } pub struct WS2812RMT { config: rmt_config_t, } impl WS2812RMT { pub fn new() -> anyhow::Result<Self> { let rmt_tx_config = rmt_tx_config_t { carrier_freq_hz: 38000, carrier_level: 1, idle_level: 0, carrier_duty_percent: 33, loop_count: 1, carrier_en: false, loop_en: false,
pub fn set_pixel(&mut self, color: RGB8) -> anyhow::Result<()> { let timeout_ms = 1; unsafe { esp!(rmt_write_sample( self.config.channel, &[color.g, color.r, color.b] as *const u8, 3, true, ))?; esp!(rmt_wait_tx_done( self.config.channel, (timeout_ms as u32 * FREERTOS_HZ) / 1000, ))?; } Ok(()) } }
idle_output_en: true, }; let config = rmt_config_t { rmt_mode: rmt_mode_t_RMT_MODE_TX, channel: 0, gpio_num: 8, clk_div: 2, mem_block_num: 1, flags: 0, __bindgen_anon_1: rmt_config_t__bindgen_ty_1 { tx_config: rmt_tx_config, }, }; unsafe { esp!(rmt_config(&config))?; esp!(rmt_driver_install(config.channel, 0, 0))?; let mut rmt_clock = 0u32; esp!(rmt_get_counter_clock(config.channel, &mut rmt_clock))?; let ratio = rmt_clock as f64 / 1e9; WS_CONFIG = Some(Ws2812Config { t0h_ticks: (ratio * WS2812_T0H_NS as f64) as _, t0l_ticks: (ratio * WS2812_T0L_NS as f64) as _, t1h_ticks: (ratio * WS2812_T1H_NS as f64) as _, t1l_ticks: (ratio * WS2812_T1L_NS as f64) as _, }); esp!(rmt_translator_init(config.channel, Some(ws2812_to_rmt)))?; } Ok(Self { config }) }
function_block-function_prefix_line
[ { "content": "fn process_message(message: EspMqttMessage, led: &mut WS2812RMT) {\n\n match message.details() {\n\n Complete(token) => {\n\n info!(\"{}\", message.topic(token));\n\n let message_data: &[u8] = &message.data();\n\n if let Ok(ColorData::BoardLed(color)) = C...