text
stringlengths
8
4.13M
use super::{Expression, JsonValue, name::NameExpression, visitor::ExpressionVisitor}; #[derive(Debug)] pub struct PropertyAssignmentExpression { pub name: Box<dyn Expression>, pub value: Box<dyn Expression>, } impl PropertyAssignmentExpression { pub fn new(name: Box<dyn Expression>, value: Box<dyn Expression>) -> PropertyAssignmentExpression { PropertyAssignmentExpression { name, value } } } impl Expression for PropertyAssignmentExpression { fn accept(&mut self, visitor: &mut dyn ExpressionVisitor) { visitor.visit_property_assignment(self); } }
#[derive(Copy, Clone, Debug)] pub struct Point { pub x: f64, pub y: f64 } impl Point { pub fn new() -> Self { Point { x: 0.0, y: 0.0 } } }
// https://rustcc.gitbooks.io/rustprimer/content/std/process.html use std::{env::args, process::Command}; fn main() { let mut args = args(); args.next().unwrap(); let pattern = args.next().unwrap_or("main".to_string()); let path = args.next().unwrap_or("./".to_string()); let output = Command::new("grep") .arg("-n") .arg("-r") .arg(&pattern) .arg(&path) .output() .unwrap_or_else(|e| panic!("panic error:{}", e)); let st = String::from_utf8_lossy(&output.stdout); println!("output:"); for line in st.split('\n') { println!("{}", line); } }
/* NOTE: * push_str -> Add string * push -> Add character */ fn translate_string(curr_str: &mut String) -> String { // Create blank string let mut new_str = String::new(); // Iterate string for letter in curr_str.chars() { // Use match operator to check how to modify string match letter { 'a' | 'e' | 'i' | 'o' | 'u' => { // Add current string to new string and append "-hay" to it new_str.push_str(curr_str); new_str.push_str("-hay"); }, _ => { // Add current string to new string and remove the 1st letter new_str.push_str(curr_str); new_str.remove(0); // Append -{letter}ay new_str.push('-'); new_str.push(letter); new_str.push_str("ay"); }, } // Exit for-loop break; } // Return new string new_str } fn main() { // Test strings let mut str_1 = String::from("apple"); let mut str_2 = String::from("first"); println!("{}", translate_string(&mut str_1)); println!("{}", translate_string(&mut str_2)); }
use crate::message::Message; use byteorder::{LittleEndian, ReadBytesExt}; use cirrus_peer::{ errors::{message::ErrorKind::IoError, Result, ResultExt}, MessagePacket, }; use std::io; #[derive(Clone, Debug)] pub struct PingMessage { pub nonce: u64, } impl Message for PingMessage { fn command() -> &'static [u8] { b"ping" } fn packet(&self) -> MessagePacket { MessagePacket::from_payload(Self::command(), self.nonce.to_le_bytes().to_vec()) } fn from_payload(payload: &[u8]) -> Result<Self> { Ok(PingMessage { nonce: io::Cursor::new(payload) .read_u64::<LittleEndian>() .chain_err(|| IoError)?, }) } } #[derive(Clone, Debug)] pub struct PongMessage { pub nonce: u64, } impl Message for PongMessage { fn command() -> &'static [u8] { b"pong" } fn packet(&self) -> MessagePacket { MessagePacket::from_payload(Self::command(), self.nonce.to_le_bytes().to_vec()) } fn from_payload(payload: &[u8]) -> Result<Self> { Ok(PongMessage { nonce: io::Cursor::new(payload) .read_u64::<LittleEndian>() .chain_err(|| IoError)?, }) } }
use anyhow::{anyhow, Result}; use mockall::*; use proger_backend::{Server, StorageCmd, StorageDriver}; use proger_core::protocol::model::StepPageModel; use proger_core::{protocol::request::CreateStepPage, API_URL_V1_CREATE_STEP_PAGE}; use reqwest::blocking::Client; use std::thread; use std::time::Duration; use tokio::runtime::Runtime; use url::Url; use chrono::Utc; mock! { pub DynamoDbDriver {} trait StorageDriver { fn connect(&self) -> Result<()>; fn execute(&self, rt: &mut Runtime, cmd: StorageCmd) -> Result<StepPageModel>; } trait Clone { fn clone(&self) -> Self; } } pub fn create_testserver(storage: MockDynamoDbDriver) -> Result<Url> { // Set the test configuration let host = "localhost:8081".to_string(); // url.set_port(Some(get_next_port())) // .map_err(|_| format_err!("Unable to set server port"))?; // Start the server let host_clone = host.clone(); thread::spawn(move || Server::new(host_clone, storage).unwrap().start().unwrap()); // Wait until the server is up let url = Url::parse(&format!("http://{}", &host))?; for _ in 0..5 { let check = Client::new().get(url.as_str()).send(); println!("check result {:?}", check); if let Ok(res) = check { if res.status().is_success() { return Ok(url); } } thread::sleep(Duration::from_millis(10)); } // Return the server url Err(anyhow!("failed to start server")) } #[test] fn test_server_starting() { let mut storage_mock = MockDynamoDbDriver::new(); storage_mock.expect_connect().returning(|| Ok(())); storage_mock .expect_clone() .returning(|| MockDynamoDbDriver::new()); let url = create_testserver(storage_mock).unwrap(); println!("server successfully started on {:?}", url); } #[test] fn test_server_new_page() { let mut mock1 = MockDynamoDbDriver::new(); mock1.expect_connect().returning(|| Ok(())); mock1.expect_clone().returning(|| { let mut mock2 = MockDynamoDbDriver::new(); mock2.expect_execute().returning(|_, _| { Ok(StepPageModel { link: "LINK".to_string(), secret: "HASHED_SECRET".to_string(), steps: 0, completed: 0, progress: vec![], created: Utc::now(), updated: Utc::now(), }) }); mock2 }); let mut url = create_testserver(mock1).unwrap(); url.set_path(API_URL_V1_CREATE_STEP_PAGE); println!("accessing {:?}", url); // When let request = CreateStepPage { steps: 10, }; let res = Client::new() .post(url.as_str()) .json(&request) .send() .unwrap(); // Then println!("result: {:?}", res); assert_eq!(res.status().as_u16(), 200); }
extern crate easy_toml_config; use self::easy_toml_config::*; use std::path::{PathBuf}; use std::fs::File; use std::env::home_dir; use log::LevelFilter; static CONFIG_FILE: &'static str = "default.toml"; lazy_static! { pub static ref CONFIG: Config = { set_config_dir(String::from(".config/BEST-Bot")); read_config(CONFIG_FILE, config_template()) }; } /// Reads the config file fn read_config(config_file: &str, config: Config) -> Config { use std::io::Read; let mut config_file = init(config_file, &config).unwrap(); let mut data = String::new(); config_file.read_to_string(&mut data); error_handler(toml::from_str(&data)) } /// Main struct for config #[derive(Deserialize, Serialize, Clone, Debug)] pub struct Config { plugin_path: Option<String>, plugin_config_path: Option<String>, pub slack: Slack, log: Option<Log>, } /// Struct for handling Slack keys #[derive(Deserialize, Serialize, Clone, Debug)] pub struct Slack { /// The token are meant to be from a Slack Bot, but can also be from a normal user. /// Look in to Legacy Tokens on api.slack.com pub api_token: String, /// The token have to be from a normal user with admin privileges. /// Look in to Legacy Tokens on api.slack.com to figure out have to generate the token. pub admin_api_token: String, /// The token is from the app Incoming WebHooks. pub incoming_webhooks_token: Option<String>, /// The token is from the app Outcoming WebHooks. pub outgoing_webhooks_token: Option<String>, } #[derive(Deserialize, Serialize, Clone, Debug)] pub struct Log { level: Option<String>, to_file: Option<bool>, to_terminal: Option<bool>, log_path: Option<String>, } impl Config { /// Get the path for the plugins pub fn plugin_path(&self) -> PathBuf { let mut plugin_path = home_dir().unwrap(); if self.plugin_path.is_none() { plugin_path.push(get_config_dir().unwrap()); plugin_path.push("libs"); return plugin_path; } else { plugin_path.push(self.plugin_path.clone().unwrap()); return plugin_path; } } /// Get the path for there to the plugins should store their config file pub fn plugin_config_path(&self) -> PathBuf { let mut plugin_config_path = home_dir().unwrap(); if self.plugin_config_path.is_none() { plugin_config_path.push(get_config_dir().unwrap()); plugin_config_path.push("plugins"); return plugin_config_path; } else { plugin_config_path.push(self.plugin_config_path.clone().unwrap()); return plugin_config_path; } } pub fn log(&self) -> Log { if self.log.is_none() { return config_template().log.expect("the variable 'log' was not declared in struct 'Config' in the function 'config_template', fix it"); } else { return self.log.clone().unwrap(); } } } impl Log { pub fn level(&self) -> LevelFilter { match self.level.as_ref() { Some(level) => match level.to_uppercase().as_ref() { "OFF" => LevelFilter::Off, "ERROR" => LevelFilter::Error, "WARN" => LevelFilter::Warn, "INFO" => LevelFilter::Info, "DEBUG" => LevelFilter::Debug, "TRACE" => LevelFilter::Trace, _ => LevelFilter::Info, } None => LevelFilter::Info, } } pub fn to_file(&self) -> bool { self.to_file.unwrap_or(false) } pub fn to_terminal(&self) -> bool { self.to_terminal.unwrap_or(true) } pub fn path(&self) -> PathBuf { let mut log_path = home_dir().unwrap(); if self.log_path.is_none() { log_path.push(get_config_dir().unwrap()); log_path.push("log"); return log_path; } else { log_path.push(self.log_path.clone().unwrap()); return log_path; } } } /// The setting/config are saved impl WriteConfig for Config { fn write(&self) { use std::io::Write; let path_config_file = path_config_file(CONFIG_FILE).unwrap(); let mut config_file = File::create(&path_config_file).expect(&format!("Failed at creating a template config file '{}'", &path_config_file.to_str().unwrap())); let toml = toml::to_string(self).unwrap(); config_file.write_all(toml.as_bytes()).expect(&format!("Failed to create a config file")); error!("Edit the config file '{}'", &path_config_file.to_str().unwrap()); } } /// Create a example/default configuration fn config_template() -> Config { Config { plugin_path: Some(String::from(format!("{}/libs", get_config_dir().unwrap()))), plugin_config_path: Some(String::from(format!("{}/plugins", get_config_dir().unwrap()))), slack: Slack { api_token: "zzzz-xxxxxxxxxxxx-yyyyyyyyyyyyyyyyyyyyyyyy".to_string(), admin_api_token: "zzzz-xxxxxxxxxxx-yyyyyyyyyyy-aaaaaaaaaaaa-bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb".to_string(), incoming_webhooks_token: None, outgoing_webhooks_token: None, }, log: Some(Log { level: Some(String::from("info")), to_file: Some(false), to_terminal: Some(true), log_path: None, }), } }
use std::{ io, mem::MaybeUninit, os::{ fd::{AsRawFd, RawFd}, unix::net::UnixStream, }, sync::OnceLock, }; use crate::{cutils::cerr, log::dev_error}; use super::{ handler::{SignalHandler, SignalHandlerBehavior}, info::SignalInfo, signal_name, SignalNumber, }; static STREAM: OnceLock<SignalStream> = OnceLock::new(); pub(super) unsafe fn send_siginfo( _signal: SignalNumber, info: *const SignalInfo, _context: *const libc::c_void, ) { if let Some(tx) = STREAM.get().map(|stream| stream.tx.as_raw_fd()) { unsafe { libc::send(tx, info.cast(), SignalInfo::SIZE, libc::MSG_DONTWAIT) }; } } /// A type able to receive signal information from any [`super::SignalHandler`] with the /// [`super::SignalHandlerBehavior::Stream`] behavior. /// /// This is a singleton type. Meaning that there will be only one value of this type during the /// execution of a program. pub(crate) struct SignalStream { rx: UnixStream, tx: UnixStream, } impl SignalStream { /// Create a new [`SignalStream`]. /// /// # Panics /// /// If this function has been called before. #[track_caller] pub(crate) fn init() -> io::Result<&'static Self> { let (rx, tx) = UnixStream::pair().map_err(|err| { dev_error!("cannot create socket pair for `SignalStream`: {err}"); err })?; if STREAM.set(Self { rx, tx }).is_err() { panic!("`SignalStream` has already been initialized"); }; Ok(STREAM.get().unwrap()) } /// Receives the information related to the arrival of a signal. pub(crate) fn recv(&self) -> io::Result<SignalInfo> { let mut info = MaybeUninit::<SignalInfo>::uninit(); let fd = self.rx.as_raw_fd(); let bytes = cerr(unsafe { libc::recv(fd, info.as_mut_ptr().cast(), SignalInfo::SIZE, 0) })?; if bytes as usize != SignalInfo::SIZE { return Err(io::Error::new( io::ErrorKind::UnexpectedEof, "Not enough bytes when receiving `siginfo_t`", )); } // SAFETY: we can assume `info` is initialized because `recv` wrote enough bytes to fill // the value and `siginfo_t` is POD. Ok(unsafe { info.assume_init() }) } } #[track_caller] pub(crate) fn register_handlers<const N: usize>( signals: [SignalNumber; N], ) -> io::Result<[SignalHandler; N]> { let mut handlers = signals.map(|signal| (signal, MaybeUninit::uninit())); for (signal, handler) in &mut handlers { *handler = SignalHandler::register(*signal, SignalHandlerBehavior::Stream) .map(MaybeUninit::new) .map_err(|err| { let name = signal_name(*signal); dev_error!("cannot setup handler for {name}: {err}"); err })?; } Ok(handlers.map(|(_, handler)| unsafe { handler.assume_init() })) } impl AsRawFd for SignalStream { fn as_raw_fd(&self) -> RawFd { self.rx.as_raw_fd() } }
//! Module containing errors for database account problems use thiserror::Error; #[derive(Debug, Error, PartialEq)] /// Errors relating to user lookup. pub enum AccountError { #[error("The username does not exist.")] UserDoesNotExist, #[error("This email address is already in use.")] DuplicateAccount, #[error("The given ID is not valid hex for an internal ID.")] InvalidID, #[error("The given email verification code is not valid or has expired.")] InvalidEmailVerification, #[error("An unknown error occurred. See logs for more details.")] UnknownError, }
//! Contains utilites for working with virtual (TAP) network interfaces. use priv_prelude::*; use sys; use iface::build::{IfaceBuilder, build}; /// This object can be used to set the configuration options for a `EtherIface` before creating the /// `EtherIface` /// using `build`. #[derive(Debug)] pub struct EtherIfaceBuilder { builder: IfaceBuilder, } impl Default for EtherIfaceBuilder { fn default() -> EtherIfaceBuilder { EtherIfaceBuilder { builder: IfaceBuilder { name: String::from("netsim"), address: ipv4!("0.0.0.0"), netmask: ipv4!("0.0.0.0"), routes: Vec::new(), }, } } } impl EtherIfaceBuilder { /// Start building a new `EtherIface` with the default configuration options. pub fn new() -> EtherIfaceBuilder { Default::default() } /// Set the interface name. pub fn name<S: Into<String>>(mut self, name: S) -> Self { self.builder.name = name.into(); self } /// Set the interface address. pub fn address(mut self, address: Ipv4Addr) -> Self { self.builder.address = address; self } /// Set the interface netmask. pub fn netmask(mut self, netmask: Ipv4Addr) -> Self { self.builder.netmask = netmask; self } /// Add a route to the set of routes that will be created and directed through this interface. pub fn route(mut self, route: RouteV4) -> Self { self.builder.routes.push(route); self } /// Consume this `EtherIfaceBuilder` and build a `UnboundEtherIface`. This creates the TAP device but does not /// bind it to a tokio event loop. This is useful if the event loop lives in a different thread /// to where you need to create the device. You can send a `UnboundEtherIface` to another thread then /// `bind` it to create your `EtherIface`. pub fn build_unbound(self) -> Result<UnboundEtherIface, IfaceBuildError> { let fd = build(self.builder, true)?; trace!("creating TAP"); Ok(UnboundEtherIface { fd }) } /// Consume this `EtherIfaceBuilder` and build the TAP interface. The returned `EtherIface` object can be /// used to read/write ethernet frames from this interface. `handle` is a handle to a tokio /// event loop which will be used for reading/writing. pub fn build(self, handle: &Handle) -> Result<EtherIface, IfaceBuildError> { Ok(self.build_unbound()?.bind(handle)) } } /// Represents a TAP device which has been built but not bound to a tokio event loop. #[derive(Debug)] pub struct UnboundEtherIface { fd: AsyncFd, } impl UnboundEtherIface { /// Bind the tap device to the event loop, creating a `EtherIface` which you can read/write ethernet /// frames with. pub fn bind(self, handle: &Handle) -> EtherIface { let UnboundEtherIface { fd } = self; let fd = unwrap!(PollEvented::new(fd, handle)); EtherIface { fd } } } /// A handle to a virtual (TAP) network interface. Can be used to read/write ethernet frames /// directly to the device. pub struct EtherIface { fd: PollEvented<AsyncFd>, } impl Stream for EtherIface { type Item = EtherFrame; type Error = io::Error; fn poll(&mut self) -> io::Result<Async<Option<EtherFrame>>> { if let Async::NotReady = self.fd.poll_read() { return Ok(Async::NotReady); } let mut buffer: [u8; sys::ETH_FRAME_LEN as usize] = unsafe { mem::uninitialized() }; match self.fd.read(&mut buffer[..]) { Ok(0) => Ok(Async::Ready(None)), Ok(n) => { /* 'out: for i in 0.. { println!(""); for j in 0..4 { let pos = i * 4 + j; if pos < n { print!("{:02x}", buffer[pos]); } else { break 'out; } } } println!(""); */ let bytes = Bytes::from(&buffer[..n]); let frame = EtherFrame::from_bytes(bytes); info!("TAP sending frame: {:?}", frame); Ok(Async::Ready(Some(frame))) }, Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => { self.fd.need_read(); Ok(Async::NotReady) }, Err(e) => Err(e), } } } impl Sink for EtherIface { type SinkItem = EtherFrame; type SinkError = io::Error; fn start_send(&mut self, item: EtherFrame) -> io::Result<AsyncSink<EtherFrame>> { info!("TAP received frame: {:?}", item); if let Async::NotReady = self.fd.poll_write() { return Ok(AsyncSink::NotReady(item)); } /* trace!("frame as bytes:"); for chunk in item.as_bytes().chunks(8) { let mut s = String::new(); for b in chunk { use std::fmt::Write; write!(&mut s, " {:02x}", b).unwrap(); } trace!(" {}", s); } */ match self.fd.write(item.as_bytes()) { Ok(n) => assert_eq!(n, item.as_bytes().len()), Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => { self.fd.need_write(); return Ok(AsyncSink::NotReady(item)); } Err(e) => return Err(e), } trace!("sent: {:?}", item); Ok(AsyncSink::Ready) } fn poll_complete(&mut self) -> io::Result<Async<()>> { Ok(Async::Ready(())) } } #[cfg(test)] mod test { use priv_prelude::*; use spawn; use capabilities; #[test] fn build_tap_name_contains_nul() { run_test(1, || { let tap_builder = { EtherIfaceBuilder::new() .address(Ipv4Addr::random_global()) .name("hello\0") }; let res = tap_builder.build_unbound(); match res { Err(IfaceBuildError::NameContainsNul) => (), x => panic!("unexpected result: {:?}", x), } }) } #[test] fn build_tap_duplicate_name() { run_test(3, || { let spawn_complete = spawn::new_namespace(|| { let tap_builder = { EtherIfaceBuilder::new() .address(Ipv4Addr::random_global()) .name("hello") }; trace!("build_tap_duplicate_name: building first interface"); let _tap = unwrap!(tap_builder.build_unbound()); let tap_builder = { EtherIfaceBuilder::new() .address(Ipv4Addr::random_global()) .name("hello") }; trace!("build_tap_duplicate_name: building second interface"); match tap_builder.build_unbound() { Err(IfaceBuildError::InterfaceAlreadyExists) => (), res => panic!("unexpected result: {:?}", res), } trace!("build_tap_duplicate_name: done"); }); let mut core = unwrap!(Core::new()); unwrap!(core.run(spawn_complete)) }); } #[test] fn build_tap_permission_denied() { run_test(3, || { let spawn_complete = spawn::new_namespace(|| { unwrap!(unwrap!(capabilities::Capabilities::new()).apply()); let tap_builder = EtherIfaceBuilder::new(); match tap_builder.build_unbound() { Err(IfaceBuildError::CreateIfacePermissionDenied) => (), res => panic!("unexpected result: {:?}", res), } }); let mut core = unwrap!(Core::new()); unwrap!(core.run(spawn_complete)) }) } }
use std::{env, io}; use actix_files as fs; use actix_web::{ error, middleware, web, App, HttpResponse, HttpServer, Result, }; use log::{error}; use env_logger; use tera::{compile_templates}; use serde_derive::{Deserialize}; use rust_birkana::document_from_string; #[derive(Deserialize)] pub struct FormData { text: String, } fn index(tmpl: web::Data<tera::Tera>) -> Result<HttpResponse> { let body = tmpl.render("index.html.tera", &tera::Context::new()) .map_err(|err| { error!("error rendering index template: {}", err); error::ErrorInternalServerError("Template error") })?; Ok(HttpResponse::Ok() .content_type("text/html") .body(body)) } fn generate(params: web::Form<FormData>) -> Result<HttpResponse> { let hex_string: String = params.text.bytes().map(|x| format!("{:x}", x)).collect(); let document = document_from_string(hex_string); Ok(HttpResponse::Ok() .content_type("image/svg+xml") .body(document.to_string())) } fn main() -> io::Result<()> { env::set_var("RUST_LOG", "error,actix_web=debug"); env_logger::init(); let sys = actix_rt::System::new("basic-example"); HttpServer::new(|| { let tera = compile_templates!("templates/**/*"); App::new() .data(tera) .wrap(middleware::Logger::default()) .service( web::resource("/").route(web::get().to(index)), ) .service(web::resource("/generate").route(web::post().to(generate))) .service(fs::Files::new("/static", "static")) }) .keep_alive(65) .bind("127.0.0.1:8080")? .start(); sys.run() }
use std::process::Command; use std::net::{IpAddr, Ipv4Addr}; use regex::Regex; // LINUX EXAMPLE IFCONFIG // // eth0: flags=4163<UP,BROADCAST,RUNNING,MULTICAST> mtu 1500 // inet 172.17.0.2 netmask 255.255.0.0 broadcast 0.0.0.0gf // inet6 fe80::42:acff:fe11:2 prefixlen 64 scopeid 0x20<link> // ether 02:42:ac:11:00:02 txqueuelen 0 (Ethernet) // RX packets 20775 bytes 151512533 (144.4 MiB) // RX errors 0 dropped 0 overruns 0 frame 0 // TX packets 21587 bytes 2799736 (2.6 MiB) // TX errors 0 dropped 0 overruns 0 carrier 0 collisions 0 // // lo: flags=73<UP,LOOPBACK,RUNNING> mtu 65536 // inet 127.0.0.1 netmask 255.0.0.0 // inet6 ::1 prefixlen 128 scopeid 0x10<host> // loop txqueuelen 1000 (Local Loopback) // RX packets 11001 bytes 570940 (557.5 KiB) // RX errors 0 dropped 0 overruns 0 frame 0 // TX packets 11001 bytes 570940 (557.5 KiB) // TX errors 0 dropped 0 overruns 0 carrier 0 collisions 0 #[cfg(target_family = "unix")] pub fn get_local_ip() -> Option<IpAddr> { let output = Command::new("ifconfig") .output() .expect("failed to execute `ifconfig`"); let stdout = String::from_utf8(output.stdout).unwrap(); let regex = Regex::new(r#"(?m)^.*inet (addr:)?(([0-9]*\.){3}[0-9]*).*$"#).unwrap(); find_ip_by_regex(regex, stdout) } // WINDOWS EXAMPLE IPCONFIG // // Windows IP Configuration // Ethernet adapter Ethernet: // Media State . . . . . . . . . . . : Media disconnected // Connection-specific DNS Suffix . : home // // Wireless LAN adapter Local Area Connection* 2: // Media State . . . . . . . . . . . : Media disconnected // Connection-specific DNS Suffix . : // // Wireless LAN adapter Local Area Connection* 3: // Media State . . . . . . . . . . . : Media disconnected // Connection-specific DNS Suffix . : // // Ethernet adapter Ethernet 2: // Media State . . . . . . . . . . . : Media disconnected // Connection-specific DNS Suffix . : // // Wireless LAN adapter Wi-Fi: // Connection-specific DNS Suffix . : home // Link-local IPv6 Address . . . . . : fe80::9091:50fe:cf97:9af6%11 // IPv4 Address. . . . . . . . . . . : 192.168.1.47 // Subnet Mask . . . . . . . . . . . : 255.255.255.0 // Default Gateway . . . . . . . . . : 192.168.1.1 #[cfg(target_family = "windows")] pub fn get_local_ip() -> Option<IpAddr> { let output = Command::new("ipconfig") .output() .expect("failed to execute `ipconfig`"); let stdout = String::from_utf8(output.stdout).unwrap(); let regex = Regex::new(r#"(?m)^.*IPv4 Address. . . . . . . . . . . : (Addr:)?(([0-9]*\.){3}[0-9]*).*$"#).unwrap(); find_ip_by_regex(regex, stdout) } fn find_ip_by_regex(regex: Regex, content: String) -> Option<IpAddr> { regex.captures_iter(&content) .filter_map(|cap| cap.at(2)) .filter_map(|host| host.parse::<Ipv4Addr>().ok()) .filter(|ip_addr| !ip_addr.is_loopback()) .map(|ip_addr| IpAddr::V4(ip_addr)) .next() } #[test] fn test_local_ip() { let local_ip = get_local_ip(); // Write your known local ip here to test it. assert_eq!("192.168.1.124".parse::<IpAddr>().unwrap(), local_ip.unwrap()); }
fn cal_points(operations: Vec<String>) -> i32 { let mut points: Vec<i32> = Vec::new(); for o in operations { match o.as_ref() { "C" => { let _ = points.pop(); } "D" => points.push(points[points.len() - 1] * 2), "+" => points.push(points[points.len() - 1] + points[points.len() - 2]), _ => points.push(o.parse::<i32>().unwrap()), } } points.iter().sum() } fn main() { assert_eq!( cal_points(vec![ "5".to_string(), "2".to_string(), "C".to_string(), "D".to_string(), "+".to_string() ]), 30 ); }
#[doc = "Reader of register CLK_FLL_CONFIG4"] pub type R = crate::R<u32, super::CLK_FLL_CONFIG4>; #[doc = "Writer for register CLK_FLL_CONFIG4"] pub type W = crate::W<u32, super::CLK_FLL_CONFIG4>; #[doc = "Register CLK_FLL_CONFIG4 `reset()`'s with value 0xff"] impl crate::ResetValue for super::CLK_FLL_CONFIG4 { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0xff } } #[doc = "Reader of field `CCO_LIMIT`"] pub type CCO_LIMIT_R = crate::R<u8, u8>; #[doc = "Write proxy for field `CCO_LIMIT`"] pub struct CCO_LIMIT_W<'a> { w: &'a mut W, } impl<'a> CCO_LIMIT_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !0xff) | ((value as u32) & 0xff); self.w } } #[doc = "Frequency range of CCO\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum CCO_RANGE_A { #[doc = "0: Target frequency is in range \\[48, 64) MHz"] RANGE0, #[doc = "1: Target frequency is in range \\[64, 85) MHz"] RANGE1, #[doc = "2: Target frequency is in range \\[85, 113) MHz"] RANGE2, #[doc = "3: Target frequency is in range \\[113, 150) MHz"] RANGE3, #[doc = "4: Target frequency is in range \\[150, 200\\] MHz"] RANGE4, } impl From<CCO_RANGE_A> for u8 { #[inline(always)] fn from(variant: CCO_RANGE_A) -> Self { match variant { CCO_RANGE_A::RANGE0 => 0, CCO_RANGE_A::RANGE1 => 1, CCO_RANGE_A::RANGE2 => 2, CCO_RANGE_A::RANGE3 => 3, CCO_RANGE_A::RANGE4 => 4, } } } #[doc = "Reader of field `CCO_RANGE`"] pub type CCO_RANGE_R = crate::R<u8, CCO_RANGE_A>; impl CCO_RANGE_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> crate::Variant<u8, CCO_RANGE_A> { use crate::Variant::*; match self.bits { 0 => Val(CCO_RANGE_A::RANGE0), 1 => Val(CCO_RANGE_A::RANGE1), 2 => Val(CCO_RANGE_A::RANGE2), 3 => Val(CCO_RANGE_A::RANGE3), 4 => Val(CCO_RANGE_A::RANGE4), i => Res(i), } } #[doc = "Checks if the value of the field is `RANGE0`"] #[inline(always)] pub fn is_range0(&self) -> bool { *self == CCO_RANGE_A::RANGE0 } #[doc = "Checks if the value of the field is `RANGE1`"] #[inline(always)] pub fn is_range1(&self) -> bool { *self == CCO_RANGE_A::RANGE1 } #[doc = "Checks if the value of the field is `RANGE2`"] #[inline(always)] pub fn is_range2(&self) -> bool { *self == CCO_RANGE_A::RANGE2 } #[doc = "Checks if the value of the field is `RANGE3`"] #[inline(always)] pub fn is_range3(&self) -> bool { *self == CCO_RANGE_A::RANGE3 } #[doc = "Checks if the value of the field is `RANGE4`"] #[inline(always)] pub fn is_range4(&self) -> bool { *self == CCO_RANGE_A::RANGE4 } } #[doc = "Write proxy for field `CCO_RANGE`"] pub struct CCO_RANGE_W<'a> { w: &'a mut W, } impl<'a> CCO_RANGE_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: CCO_RANGE_A) -> &'a mut W { unsafe { self.bits(variant.into()) } } #[doc = "Target frequency is in range \\[48, 64) MHz"] #[inline(always)] pub fn range0(self) -> &'a mut W { self.variant(CCO_RANGE_A::RANGE0) } #[doc = "Target frequency is in range \\[64, 85) MHz"] #[inline(always)] pub fn range1(self) -> &'a mut W { self.variant(CCO_RANGE_A::RANGE1) } #[doc = "Target frequency is in range \\[85, 113) MHz"] #[inline(always)] pub fn range2(self) -> &'a mut W { self.variant(CCO_RANGE_A::RANGE2) } #[doc = "Target frequency is in range \\[113, 150) MHz"] #[inline(always)] pub fn range3(self) -> &'a mut W { self.variant(CCO_RANGE_A::RANGE3) } #[doc = "Target frequency is in range \\[150, 200\\] MHz"] #[inline(always)] pub fn range4(self) -> &'a mut W { self.variant(CCO_RANGE_A::RANGE4) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x07 << 8)) | (((value as u32) & 0x07) << 8); self.w } } #[doc = "Reader of field `CCO_FREQ`"] pub type CCO_FREQ_R = crate::R<u16, u16>; #[doc = "Write proxy for field `CCO_FREQ`"] pub struct CCO_FREQ_W<'a> { w: &'a mut W, } impl<'a> CCO_FREQ_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u16) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01ff << 16)) | (((value as u32) & 0x01ff) << 16); self.w } } #[doc = "Reader of field `CCO_HW_UPDATE_DIS`"] pub type CCO_HW_UPDATE_DIS_R = crate::R<bool, bool>; #[doc = "Write proxy for field `CCO_HW_UPDATE_DIS`"] pub struct CCO_HW_UPDATE_DIS_W<'a> { w: &'a mut W, } impl<'a> CCO_HW_UPDATE_DIS_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 30)) | (((value as u32) & 0x01) << 30); self.w } } #[doc = "Reader of field `CCO_ENABLE`"] pub type CCO_ENABLE_R = crate::R<bool, bool>; #[doc = "Write proxy for field `CCO_ENABLE`"] pub struct CCO_ENABLE_W<'a> { w: &'a mut W, } impl<'a> CCO_ENABLE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 31)) | (((value as u32) & 0x01) << 31); self.w } } impl R { #[doc = "Bits 0:7 - Maximum CCO offset allowed (used to prevent FLL dynamics from selecting an CCO frequency that the logic cannot support)"] #[inline(always)] pub fn cco_limit(&self) -> CCO_LIMIT_R { CCO_LIMIT_R::new((self.bits & 0xff) as u8) } #[doc = "Bits 8:10 - Frequency range of CCO"] #[inline(always)] pub fn cco_range(&self) -> CCO_RANGE_R { CCO_RANGE_R::new(((self.bits >> 8) & 0x07) as u8) } #[doc = "Bits 16:24 - CCO frequency code. This is updated by HW when the FLL is enabled. It can be manually updated to use the CCO in an open loop configuration. The meaning of each frequency code depends on the range."] #[inline(always)] pub fn cco_freq(&self) -> CCO_FREQ_R { CCO_FREQ_R::new(((self.bits >> 16) & 0x01ff) as u16) } #[doc = "Bit 30 - Disable CCO frequency update by FLL hardware 0: Hardware update of CCO settings is allowed. Use this setting for normal FLL operation. 1: Hardware update of CCO settings is disabled. Use this setting for open-loop FLL operation."] #[inline(always)] pub fn cco_hw_update_dis(&self) -> CCO_HW_UPDATE_DIS_R { CCO_HW_UPDATE_DIS_R::new(((self.bits >> 30) & 0x01) != 0) } #[doc = "Bit 31 - Enable the CCO. It is required to enable the CCO before using the FLL. 0: Block is powered off 1: Block is powered on"] #[inline(always)] pub fn cco_enable(&self) -> CCO_ENABLE_R { CCO_ENABLE_R::new(((self.bits >> 31) & 0x01) != 0) } } impl W { #[doc = "Bits 0:7 - Maximum CCO offset allowed (used to prevent FLL dynamics from selecting an CCO frequency that the logic cannot support)"] #[inline(always)] pub fn cco_limit(&mut self) -> CCO_LIMIT_W { CCO_LIMIT_W { w: self } } #[doc = "Bits 8:10 - Frequency range of CCO"] #[inline(always)] pub fn cco_range(&mut self) -> CCO_RANGE_W { CCO_RANGE_W { w: self } } #[doc = "Bits 16:24 - CCO frequency code. This is updated by HW when the FLL is enabled. It can be manually updated to use the CCO in an open loop configuration. The meaning of each frequency code depends on the range."] #[inline(always)] pub fn cco_freq(&mut self) -> CCO_FREQ_W { CCO_FREQ_W { w: self } } #[doc = "Bit 30 - Disable CCO frequency update by FLL hardware 0: Hardware update of CCO settings is allowed. Use this setting for normal FLL operation. 1: Hardware update of CCO settings is disabled. Use this setting for open-loop FLL operation."] #[inline(always)] pub fn cco_hw_update_dis(&mut self) -> CCO_HW_UPDATE_DIS_W { CCO_HW_UPDATE_DIS_W { w: self } } #[doc = "Bit 31 - Enable the CCO. It is required to enable the CCO before using the FLL. 0: Block is powered off 1: Block is powered on"] #[inline(always)] pub fn cco_enable(&mut self) -> CCO_ENABLE_W { CCO_ENABLE_W { w: self } } }
use crate::{ dispose::Dispose, hex::{ pointer::HexPointer, render::renderer::HexRenderer, shape::cubic_range::{CubicRangeShape, Range}, }, world::RhombusViewerWorld, }; use amethyst::{ ecs::prelude::*, prelude::*, renderer::{debug_drawing::DebugLinesComponent, palette::Srgba}, }; use rand::{thread_rng, RngCore}; use rhombus_core::hex::{ coordinates::{axial::AxialVector, cubic::CubicVector, direction::HexagonalDirection}, field_of_view::FieldOfView, storage::hash::RectHashStorage, }; use std::{collections::HashSet, sync::Arc}; #[derive(Clone, Copy, PartialEq, Eq, Debug)] pub enum HexState { Open, Wall, HardWall, } pub struct HexData { state: HexState, automaton_count: u8, } impl Dispose for HexData { fn dispose(&mut self, _data: &mut StateData<'_, GameData<'_, '_>>) {} } #[derive(Clone, Copy, PartialEq, Eq, Debug)] pub enum FovState { Partial, Full, } #[derive(Clone, Copy, PartialEq, Eq, Debug)] pub enum MoveMode { StraightAhead, StrafeLeftAhead, StrafeLeftBack, StrafeRightAhead, StrafeRightBack, StraightBack, } pub struct World<R: HexRenderer> { shape: CubicRangeShape, cell_radius: usize, limits_entity: Option<Entity>, hexes: RectHashStorage<(HexData, R::Hex)>, renderer: R, renderer_dirty: bool, pointer: Option<(HexPointer, FovState)>, } impl<R: HexRenderer> World<R> { pub fn new(renderer: R) -> Self { Self { shape: CubicRangeShape::default(), cell_radius: 1, limits_entity: None, hexes: RectHashStorage::new(), renderer, renderer_dirty: false, pointer: None, } } pub fn set_shape_and_reset_world( &mut self, shape: CubicRangeShape, cell_radius_ratio_den: usize, wall_ratio: f32, data: &mut StateData<'_, GameData<'_, '_>>, ) { self.shape = shape; self.reset_world(cell_radius_ratio_den, wall_ratio, data); } fn for_each_big_cell<F>(center: AxialVector, cell_radius: usize, mut f: F) where F: FnMut(AxialVector) -> bool, { let mut r = 0; loop { let mut end = true; for pos in center.big_ring_iter(cell_radius, r) { let keep_going = f(pos); if keep_going { end = false; } } if end { break; } r += 1; } } pub fn reset_world( &mut self, cell_radius_ratio_den: usize, wall_ratio: f32, data: &mut StateData<'_, GameData<'_, '_>>, ) { let world = (*data.world.read_resource::<Arc<RhombusViewerWorld>>()).clone(); self.clear(data, &world); if let Some(entity) = self.limits_entity { let mut debug_lines_storage = data.world.write_storage::<DebugLinesComponent>(); let debug_lines = debug_lines_storage.get_mut(entity).expect("Debug lines"); debug_lines.clear(); self.add_limit_lines(debug_lines, &world); } else { let mut debug_lines = DebugLinesComponent::with_capacity(6); self.add_limit_lines(&mut debug_lines, &world); self.limits_entity = Some(data.world.create_entity().with(debug_lines).build()); } self.cell_radius = Self::compute_cell_radius(&self.shape, cell_radius_ratio_den); let mut rng = thread_rng(); let internal_ranges: [Range; 3] = [ ( self.shape.range_x().start() + 1 + self.cell_radius as isize, self.shape.range_x().end() - 1 - self.cell_radius as isize, ) .into(), ( self.shape.range_y().start() + 1 + self.cell_radius as isize, self.shape.range_y().end() - 1 - self.cell_radius as isize, ) .into(), ( self.shape.range_z().start() + 1 + self.cell_radius as isize, self.shape.range_z().end() - 1 - self.cell_radius as isize, ) .into(), ]; Self::for_each_big_cell(self.shape.center(), self.cell_radius, |pos| -> bool { if !pos .ring_iter(self.cell_radius) .any(|v| self.shape.contains_position(v)) { return false; } let cubic = CubicVector::from(pos); let state = if internal_ranges[0].contains(cubic.x()) && internal_ranges[1].contains(cubic.y()) && internal_ranges[2].contains(cubic.z()) { if ((rng.next_u32() & 0xffff) as f32 / 0x1_0000 as f32) < wall_ratio { HexState::Wall } else { HexState::Open } } else { HexState::HardWall }; self.hexes.insert( pos, ( HexData { state, automaton_count: 0, }, self.renderer.new_hex(state != HexState::Open, true), ), ); for s in 1..=self.cell_radius { for sub_pos in pos.ring_iter(s) { if self.shape.contains_position(sub_pos) { self.hexes.insert( sub_pos, ( HexData { state, automaton_count: 0, }, self.renderer.new_hex(state != HexState::Open, true), ), ); } } } true }); self.renderer_dirty = true; } pub fn try_resize_shape( &mut self, resize: fn(&mut CubicRangeShape, usize) -> bool, cell_radius_ratio_den: usize, wall_ratio: f32, data: &mut StateData<'_, GameData<'_, '_>>, ) -> bool { if resize(&mut self.shape, 1) { self.reset_world(cell_radius_ratio_den, wall_ratio, data); true } else { false } } fn compute_cell_radius(shape: &CubicRangeShape, cell_radius_ratio_den: usize) -> usize { let mut deltas = [ shape.range_x().end() - shape.range_x().start(), shape.range_y().end() - shape.range_y().start(), shape.range_z().end() - shape.range_z().start(), ]; deltas.sort(); deltas[1] as usize / cell_radius_ratio_den } pub fn clear( &mut self, data: &mut StateData<'_, GameData<'_, '_>>, world: &RhombusViewerWorld, ) { self.delete_pointer(data, world); self.renderer.clear(data); self.hexes.dispose(data); if let Some(entity) = self.limits_entity.take() { data.world.delete_entity(entity).expect("delete entity"); } } fn delete_pointer( &mut self, data: &mut StateData<'_, GameData<'_, '_>>, world: &RhombusViewerWorld, ) { if let Some((mut pointer, _)) = self.pointer.take() { pointer.delete_entities(data, world); } } fn add_limit_lines(&self, debug_lines: &mut DebugLinesComponent, world: &RhombusViewerWorld) { let translations = self .shape .vertices() .iter() .map(|v| world.axial_translation((*v, 2.0).into())) .collect::<Vec<[f32; 3]>>(); let color = Srgba::new(0.2, 0.2, 0.2, 1.0); debug_lines.add_line(translations[0].into(), translations[1].into(), color); debug_lines.add_line(translations[1].into(), translations[2].into(), color); debug_lines.add_line(translations[2].into(), translations[3].into(), color); debug_lines.add_line(translations[3].into(), translations[4].into(), color); debug_lines.add_line(translations[4].into(), translations[5].into(), color); debug_lines.add_line(translations[5].into(), translations[0].into(), color); } pub fn cellular_automaton_phase1_step1(&mut self) { Self::for_each_big_cell(self.shape.center(), self.cell_radius, |pos| -> bool { if let Some(( HexData { automaton_count, .. }, _, )) = self.hexes.get_mut(pos) { *automaton_count = 0; true } else { false } }); Self::for_each_big_cell(self.shape.center(), self.cell_radius, |pos| -> bool { if let Some(( HexData { state: hex_state, .. }, _, )) = self.hexes.get(pos) { let is_wall = match hex_state { HexState::Wall | HexState::HardWall => true, HexState::Open => false, }; if is_wall { for neighbor in pos.big_ring_iter(self.cell_radius, 1) { if let Some((hex_data, _)) = self.hexes.get_mut(neighbor) { hex_data.automaton_count += 1; } } } true } else { false } }); } pub fn cellular_automaton_phase2_step1(&mut self) { for (hex_data, _) in self.hexes.hexes_mut() { hex_data.automaton_count = 0; } let positions = self.hexes.positions().collect::<Vec<_>>(); for pos in positions { let hex_state = self.hexes.get(pos).unwrap().0.state; let is_wall = match hex_state { HexState::Wall | HexState::HardWall => true, HexState::Open => false, }; if is_wall { for neighbor in pos.ring_iter(1) { if let Some((hex_data, _)) = self.hexes.get_mut(neighbor) { hex_data.automaton_count += 1; } } } } } fn cellular_automaton_step2_internal<RaiseF, RemainF>( hex_data: &mut HexData, raise_wall_test: RaiseF, remain_wall_test: RemainF, ) -> bool where RaiseF: Fn(u8) -> bool, RemainF: Fn(u8) -> bool, { let mut frozen = true; match hex_data.state { HexState::Wall => { if !remain_wall_test(hex_data.automaton_count) { hex_data.state = HexState::Open; frozen = false; } } HexState::Open => { if raise_wall_test(hex_data.automaton_count) { hex_data.state = HexState::Wall; frozen = false; } } HexState::HardWall => {} } frozen } pub fn cellular_automaton_phase1_step2<RaiseF, RemainF>( &mut self, raise_wall_test: RaiseF, remain_wall_test: RemainF, ) -> bool where RaiseF: Fn(u8) -> bool, RemainF: Fn(u8) -> bool, { let mut frozen = true; Self::for_each_big_cell(self.shape.center(), self.cell_radius, |pos| -> bool { if let Some((hex_data, _)) = self.hexes.get_mut(pos) { if !Self::cellular_automaton_step2_internal( hex_data, &raise_wall_test, &remain_wall_test, ) { frozen = false; } true } else { false } }); if !frozen { self.renderer_dirty = true; } frozen } pub fn cellular_automaton_phase2_step2<RaiseF, RemainF>( &mut self, raise_wall_test: RaiseF, remain_wall_test: RemainF, ) -> bool where RaiseF: Fn(u8) -> bool, RemainF: Fn(u8) -> bool, { let mut frozen = true; for (hex_data, _) in self.hexes.hexes_mut() { if !Self::cellular_automaton_step2_internal( hex_data, &raise_wall_test, &remain_wall_test, ) { frozen = false; } } if !frozen { self.renderer_dirty = true; } frozen } pub fn expand(&mut self, data: &mut StateData<'_, GameData<'_, '_>>) { if self.cell_radius <= 0 { return; } Self::for_each_big_cell(self.shape.center(), self.cell_radius, |pos| -> bool { if let Some(( HexData { state: hex_state, .. }, _, )) = self.hexes.get(pos) { let hex_state = *hex_state; if !self.shape.contains_position(pos) { self.hexes.remove(pos).map(|mut hex| hex.dispose(data)); } for s in 1..=self.cell_radius { for sub_pos in pos.ring_iter(s) { if self.shape.contains_position(sub_pos) { let hex = self .hexes .get_mut(sub_pos) .expect("The entire shape is paved with tiles"); hex.0 = HexData { state: hex_state, automaton_count: 0, }; } } } true } else { false } }); self.renderer_dirty = true; } fn find_open_hex(&self) -> Option<AxialVector> { let mut r = 0; loop { let mut end = true; for pos in self.shape.center().ring_iter(r) { let hex_data = self.hexes.get(pos).map(|hex| &hex.0); match hex_data { Some(HexData { state: HexState::Open, .. }) => return Some(pos), Some(..) => end = false, None => (), } } if end { return None; } r += 1; } } pub fn create_pointer( &mut self, fov_state: FovState, data: &mut StateData<'_, GameData<'_, '_>>, ) { let world = (*data.world.read_resource::<Arc<RhombusViewerWorld>>()).clone(); self.delete_pointer(data, &world); if let Some(hex) = self.find_open_hex() { let mut pointer = HexPointer::new_with_level_height(1.0); pointer.set_position(hex, 0, data, &world); pointer.create_entities(data, &world); self.pointer = Some((pointer, fov_state)); self.renderer_dirty = true; } } pub fn increment_direction(&mut self, data: &StateData<'_, GameData<'_, '_>>) { if let Some((pointer, _)) = &mut self.pointer { let world = (*data.world.read_resource::<Arc<RhombusViewerWorld>>()).clone(); pointer.increment_direction(data, &world); } } pub fn decrement_direction(&mut self, data: &StateData<'_, GameData<'_, '_>>) { if let Some((pointer, _)) = &mut self.pointer { let world = (*data.world.read_resource::<Arc<RhombusViewerWorld>>()).clone(); pointer.decrement_direction(data, &world); } } pub fn next_position(&mut self, mode: MoveMode, data: &mut StateData<'_, GameData<'_, '_>>) { if let Some((pointer, _)) = &mut self.pointer { let direction = match mode { MoveMode::StraightAhead => pointer.direction(), MoveMode::StrafeLeftAhead => (pointer.direction() + 5) % 6, MoveMode::StrafeLeftBack => (pointer.direction() + 4) % 6, MoveMode::StrafeRightAhead => (pointer.direction() + 1) % 6, MoveMode::StrafeRightBack => (pointer.direction() + 2) % 6, MoveMode::StraightBack => (pointer.direction() + 3) % 6, }; let next = pointer.position().neighbor(direction); if let Some(HexData { state: HexState::Open, .. }) = self.hexes.get(next).map(|hex| &hex.0) { let world = (*data.world.read_resource::<Arc<RhombusViewerWorld>>()).clone(); pointer.set_position(next, 0, data, &world); self.renderer_dirty = true; } } } pub fn change_field_of_view(&mut self, fov_state: FovState) { if let Some((_, pointer_fov_state)) = &mut self.pointer { *pointer_fov_state = fov_state; self.renderer_dirty = true; } } pub fn update_renderer_world( &mut self, force: bool, data: &mut StateData<'_, GameData<'_, '_>>, ) { if !self.renderer_dirty { return; } let (visible_positions, visible_only) = if let Some((pointer, fov_state)) = &self.pointer { let mut visible_positions = HashSet::new(); visible_positions.insert(pointer.position()); let mut fov = FieldOfView::default(); fov.start(pointer.position()); let is_obstacle = |pos| { let hex_data = self.hexes.get(pos).map(|hex| &hex.0); match hex_data { Some(HexData { state: HexState::Open, .. }) => false, Some(HexData { state: HexState::Wall, .. }) | Some(HexData { state: HexState::HardWall, .. }) => true, None => false, } }; loop { let prev_len = visible_positions.len(); for pos in fov.iter() { let key = pointer.position() + pos; if self.hexes.contains_position(key) { let inserted = visible_positions.insert(key); debug_assert!(inserted); } } if visible_positions.len() == prev_len { break; } fov.next_radius(&is_obstacle); } ( Some(visible_positions), match fov_state { FovState::Partial => false, FovState::Full => true, }, ) } else { (None, false) }; let world = (*data.world.read_resource::<Arc<RhombusViewerWorld>>()).clone(); self.renderer.update_world( &mut self.hexes, |_, hex| hex.0.state != HexState::Open, |pos, _| { visible_positions .as_ref() .map_or(true, |vp| vp.contains(&pos)) }, |hex| &mut hex.1, visible_only, force, data, &world, ); self.renderer_dirty = false; } }
use crate::args::FileParser; use std::path::{Path}; use std::fs::{OpenOptions}; use std::io::{Read}; use std::fmt::Display; use std::str::{FromStr, Lines}; use plotters::prelude::{BitMapBackend, WHITE, ChartBuilder, IntoFont, LineSeries, RED, PathElement, BLACK, PointSeries, EmptyElement}; use plotters::drawing::IntoDrawingArea; use plotters::style::Color; use plotters::element::Circle; use crate::estimate_price::estimate_price; pub struct DatasetArg; impl FileParser<'_> for DatasetArg { const NAMES: &'static [&'static str] = &["-d", "--dataset"]; const DESCRIPTION: &'static str = "The Learning Dataset, csv formatted with column headers"; } #[derive(Copy, Clone)] pub struct DatasetEntry { pub km: f64, pub price: f64, } #[derive(Clone)] pub struct Dataset { pub entries: Vec<DatasetEntry> } impl Dataset { fn parse_header(headers: &str, separator: Option<&str>, dataset_file: impl Display) -> Result<(usize, usize), ()> { let headers = headers.split(separator.unwrap_or(",")).enumerate().fold((None, None), |(km, price), (idx, header)| { let mut known = false; let km = if header.eq_ignore_ascii_case("km") { known = true; if let Some(km) = km { println!("Error: Duplicate column #{} in dataset {}: \"km\" column is already defined at index {}", idx, dataset_file, km); Some(km) } else { Some(idx) } } else { km }; let price = if header.eq_ignore_ascii_case("price") { known = true; if let Some(price) = price { println!("Error: Duplicate column #{} in dataset {}: \"price\" column is already defined at index {}", idx, dataset_file, price); Some(price) } else { Some(idx) } } else { price }; if !known { println!("Warning: Unknown column \"{}\" #{} in dataset {}", idx, header, dataset_file); } (km, price) }); match headers { (Some(km), Some(price)) => Ok((km, price)), (Some(_), None) => Err(println!("Error: Column \"price\" is missing in dataset {}", dataset_file)), (None, Some(_)) => Err(println!("Error: Column \"km\" is missing in dataset {}", dataset_file)), (None, None) => Err(println!("Error: Columns \"km\" and \"price\" are missing in dataset {}", dataset_file)), } } fn parse_lines(lines: Lines, (km, price): (usize, usize), separator: Option<&str>, dataset_file: impl Display) -> Result<Dataset, ()> { let entries = lines.enumerate().filter_map(|(idx, line)| { let idx = idx + 1; if line.is_empty() { return None; } let mut columns = line.split(separator.unwrap_or(",")); let (km, price) = if km < price { ( columns.nth(km).and_then(|str| { f64::from_str(str).map_err(|err| println!("Error: Row {} in dataset {} had bad km value in column {}: {}", idx, dataset_file, km, err)).ok() }).or_else(|| { println!("Error: Row {} in dataset {} is missing km value in column {}", idx, dataset_file, km); None })?, columns.nth(price - km - 1).and_then(|str| { f64::from_str(str).map_err(|err| println!("Error: Row {} in dataset {} had bad price value in column {}: {}", idx, dataset_file, price, err)).ok() }).or_else(|| { println!("Error: Row {} in dataset {} is missing price value in column {}", idx, dataset_file, price); None })? ) } else { ( columns.nth(price).and_then(|str| { f64::from_str(str).map_err(|err| println!("Error: Row {} in dataset {} had bad price value in column {}: {}", idx, dataset_file, price, err)).ok() }).or_else(|| { println!("Error: Row {} in dataset {} is missing price value in column {}", idx, dataset_file, price); None })?, columns.nth(km - price - 1).and_then(|str| { f64::from_str(str).map_err(|err| println!("Error: Row {} in dataset {} had bad km value in column {}: {}", idx, dataset_file, km, err)).ok() }).or_else(|| { println!("Error: Row {} in dataset {} is missing km value in column {}", idx, dataset_file, km); None })? ) }; Some(DatasetEntry { km, price, }) }).collect(); Ok(Dataset { entries }) } pub fn read_from(path: Option<&Path>, separator: Option<&str>) -> Result<Dataset, ()> { let path = path.unwrap_or_else(|| Path::new("./data.csv")); match OpenOptions::new().read(true).open(path) { Ok(mut file) => { let mut string = String::new(); file.read_to_string(&mut string).map_err(|err| println!("Error: Could not read dataset file {}: {}", path.display(), err))?; drop(file); let mut lines = string.lines(); let headers = lines.next().ok_or_else(|| println!("Error: Dataset file {} is empty", path.display()))?; let headers = Self::parse_header(headers, separator, path.display())?; Self::parse_lines(lines, headers, separator, path.display()) } Err(err) => { Err(println!("Error: Could not open dataset file {} with read permission: {}", path.display(), err)) } } } fn gen_box(&self) -> ((f64, f64), (f64, f64)) { self.entries.iter().fold(((f64::MAX, f64::MIN), (f64::MAX, f64::MIN)), |a, b| ((a.0.0.min(b.km), a.0.1.max(b.km)), (a.1.0.min(b.price), a.1.1.max(b.price)))) } pub fn normalize(mut self) -> Self { let ((kmmin, kmmax), (prmin, prmax)) = self.gen_box(); self.entries.iter_mut().for_each(|it| { it.km = (it.km - kmmin) / (kmmax - kmmin); it.price = (it.price - prmin) / (prmax - prmin); }); self } pub fn denormalize_theta(&self, theta: (f64, f64)) -> (f64, f64) { let ((kmmin, kmmax), (prmin, prmax)) = self.gen_box(); let theta1 = theta.1 / (kmmax - kmmin) * (prmax - prmin); (theta.0 * (prmax - prmin) + prmin - kmmin * theta1, theta1) } pub fn draw_to_file_with_theta(&self, file: &str, theta: (f64, f64)) -> Result<(), Box<dyn std::error::Error>> { let ((kmmin, kmmax), (prmin, prmax)) = self.gen_box(); let root = BitMapBackend::new(file, (1000, 1000)).into_drawing_area(); root.fill(&WHITE)?; let mut chart = ChartBuilder::on(&root) .caption("Linear Regression", ("sans-serif", 50).into_font()) .margin(5) .x_label_area_size(50) .y_label_area_size(50) .build_cartesian_2d(kmmin..kmmax, prmin..prmax)?; chart.configure_mesh().draw()?; chart .draw_series(PointSeries::of_element( self.entries.iter().map(|it| (it.km, it.price)), 2, &RED, &|c, s, st| { return EmptyElement::at(c) // We want to construct a composed element on-the-fly + Circle::new((0,0),s,st.filled()) // At this point, the new pixel coordinate is established } ))? .label("data") .legend(|(x, y)| PathElement::new(vec![(x, y), (x + 20, y)], &RED)); chart.draw_series(LineSeries::new( vec![(kmmin, estimate_price(kmmin, theta)), (kmmax, estimate_price(kmmax, theta))], &RED, ))?; chart .configure_series_labels() .background_style(&WHITE.mix(0.8)) .border_style(&BLACK) .draw()?; Ok(()) } }
//! Pragmatic crates aims to solve real world VRP variations allowing users to specify their problems //! via simple **pragmatic** json format. #![warn(missing_docs)] #[cfg(test)] #[path = "../tests/helpers/mod.rs"] #[macro_use] mod helpers; #[cfg(test)] #[path = "../tests/generator/mod.rs"] mod generator; #[cfg(test)] #[path = "../tests/features/mod.rs"] mod features; #[cfg(test)] #[path = "../tests/discovery/mod.rs"] pub mod discovery; pub use vrp_core as core; mod constraints; mod extensions; mod utils; pub mod checker; pub mod format; pub mod validation; use crate::format::problem::Problem; use crate::format::{CoordIndex, Location}; use chrono::{DateTime, ParseError, SecondsFormat, TimeZone, Utc}; /// Get lists of problem. pub fn get_unique_locations(problem: &Problem) -> Vec<Location> { CoordIndex::new(&problem).unique() } fn format_time(time: f64) -> String { Utc.timestamp(time as i64, 0).to_rfc3339_opts(SecondsFormat::Secs, true) } fn parse_time(time: &str) -> f64 { parse_time_safe(time).unwrap() } fn parse_time_safe(time: &str) -> Result<f64, ParseError> { DateTime::parse_from_rfc3339(time).map(|time| time.timestamp() as f64) }
#[doc = "Register `SDMMC_CLKCR` reader"] pub type R = crate::R<SDMMC_CLKCR_SPEC>; #[doc = "Register `SDMMC_CLKCR` writer"] pub type W = crate::W<SDMMC_CLKCR_SPEC>; #[doc = "Field `CLKDIV` reader - CLKDIV"] pub type CLKDIV_R = crate::FieldReader<u16>; #[doc = "Field `CLKDIV` writer - CLKDIV"] pub type CLKDIV_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 10, O, u16>; #[doc = "Field `PWRSAV` reader - PWRSAV"] pub type PWRSAV_R = crate::BitReader; #[doc = "Field `PWRSAV` writer - PWRSAV"] pub type PWRSAV_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `WIDBUS` reader - WIDBUS"] pub type WIDBUS_R = crate::FieldReader; #[doc = "Field `WIDBUS` writer - WIDBUS"] pub type WIDBUS_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>; #[doc = "Field `NEGEDGE` reader - NEGEDGE"] pub type NEGEDGE_R = crate::BitReader; #[doc = "Field `NEGEDGE` writer - NEGEDGE"] pub type NEGEDGE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `HWFC_EN` reader - HWFC_EN"] pub type HWFC_EN_R = crate::BitReader; #[doc = "Field `HWFC_EN` writer - HWFC_EN"] pub type HWFC_EN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `DDR` reader - DDR"] pub type DDR_R = crate::BitReader; #[doc = "Field `DDR` writer - DDR"] pub type DDR_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `BUSSPEED` reader - BUSSPEED"] pub type BUSSPEED_R = crate::BitReader; #[doc = "Field `BUSSPEED` writer - BUSSPEED"] pub type BUSSPEED_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `SELCLKRX` reader - SELCLKRX"] pub type SELCLKRX_R = crate::FieldReader; #[doc = "Field `SELCLKRX` writer - SELCLKRX"] pub type SELCLKRX_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>; impl R { #[doc = "Bits 0:9 - CLKDIV"] #[inline(always)] pub fn clkdiv(&self) -> CLKDIV_R { CLKDIV_R::new((self.bits & 0x03ff) as u16) } #[doc = "Bit 12 - PWRSAV"] #[inline(always)] pub fn pwrsav(&self) -> PWRSAV_R { PWRSAV_R::new(((self.bits >> 12) & 1) != 0) } #[doc = "Bits 14:15 - WIDBUS"] #[inline(always)] pub fn widbus(&self) -> WIDBUS_R { WIDBUS_R::new(((self.bits >> 14) & 3) as u8) } #[doc = "Bit 16 - NEGEDGE"] #[inline(always)] pub fn negedge(&self) -> NEGEDGE_R { NEGEDGE_R::new(((self.bits >> 16) & 1) != 0) } #[doc = "Bit 17 - HWFC_EN"] #[inline(always)] pub fn hwfc_en(&self) -> HWFC_EN_R { HWFC_EN_R::new(((self.bits >> 17) & 1) != 0) } #[doc = "Bit 18 - DDR"] #[inline(always)] pub fn ddr(&self) -> DDR_R { DDR_R::new(((self.bits >> 18) & 1) != 0) } #[doc = "Bit 19 - BUSSPEED"] #[inline(always)] pub fn busspeed(&self) -> BUSSPEED_R { BUSSPEED_R::new(((self.bits >> 19) & 1) != 0) } #[doc = "Bits 20:21 - SELCLKRX"] #[inline(always)] pub fn selclkrx(&self) -> SELCLKRX_R { SELCLKRX_R::new(((self.bits >> 20) & 3) as u8) } } impl W { #[doc = "Bits 0:9 - CLKDIV"] #[inline(always)] #[must_use] pub fn clkdiv(&mut self) -> CLKDIV_W<SDMMC_CLKCR_SPEC, 0> { CLKDIV_W::new(self) } #[doc = "Bit 12 - PWRSAV"] #[inline(always)] #[must_use] pub fn pwrsav(&mut self) -> PWRSAV_W<SDMMC_CLKCR_SPEC, 12> { PWRSAV_W::new(self) } #[doc = "Bits 14:15 - WIDBUS"] #[inline(always)] #[must_use] pub fn widbus(&mut self) -> WIDBUS_W<SDMMC_CLKCR_SPEC, 14> { WIDBUS_W::new(self) } #[doc = "Bit 16 - NEGEDGE"] #[inline(always)] #[must_use] pub fn negedge(&mut self) -> NEGEDGE_W<SDMMC_CLKCR_SPEC, 16> { NEGEDGE_W::new(self) } #[doc = "Bit 17 - HWFC_EN"] #[inline(always)] #[must_use] pub fn hwfc_en(&mut self) -> HWFC_EN_W<SDMMC_CLKCR_SPEC, 17> { HWFC_EN_W::new(self) } #[doc = "Bit 18 - DDR"] #[inline(always)] #[must_use] pub fn ddr(&mut self) -> DDR_W<SDMMC_CLKCR_SPEC, 18> { DDR_W::new(self) } #[doc = "Bit 19 - BUSSPEED"] #[inline(always)] #[must_use] pub fn busspeed(&mut self) -> BUSSPEED_W<SDMMC_CLKCR_SPEC, 19> { BUSSPEED_W::new(self) } #[doc = "Bits 20:21 - SELCLKRX"] #[inline(always)] #[must_use] pub fn selclkrx(&mut self) -> SELCLKRX_W<SDMMC_CLKCR_SPEC, 20> { SELCLKRX_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "The SDMMC_CLKCR register controls the SDMMC_CK output clock, the sdmmc_rx_ck receive clock, and the bus width.\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`sdmmc_clkcr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`sdmmc_clkcr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct SDMMC_CLKCR_SPEC; impl crate::RegisterSpec for SDMMC_CLKCR_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`sdmmc_clkcr::R`](R) reader structure"] impl crate::Readable for SDMMC_CLKCR_SPEC {} #[doc = "`write(|w| ..)` method takes [`sdmmc_clkcr::W`](W) writer structure"] impl crate::Writable for SDMMC_CLKCR_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets SDMMC_CLKCR to value 0"] impl crate::Resettable for SDMMC_CLKCR_SPEC { const RESET_VALUE: Self::Ux = 0; }
mod data; use crate::data::{Definitions, BANNER, DEFS}; use anyhow::anyhow; use anyhow::Result as AnyResult; use clap::crate_version; use clap::{App, Arg, ArgMatches}; use console::style; use service_policy_kit::data::Context; use service_policy_kit::runner::{RunOptions, SequenceRunner}; use std::process::exit; fn main() { env_logger::init(); let app = App::new("keyscope") .version(env!("VERGEN_GIT_SEMVER")) .version(crate_version!()) .about("Key validation and rotation toolkit") .arg( Arg::new("dry_run") .short('d') .long("dry-run") .value_name("EXAMPLE_KEY") .about("Dry run with examples given in EXAMPLE_KEY") .takes_value(true), ) .arg( Arg::new("reporter") .short('r') .long("reporter") .value_name("REPORTER") .takes_value(true) .possible_values(&["console"]) .about("Reporter to use (default: 'console')"), ) .arg( Arg::new("definitions") .short('f') .long("definitions") .value_name("DEFINITIONS_FILE") .takes_value(true) .about("Path to custom definitions file"), ) .arg( Arg::new("no_banner") .long("no-banner") .about("Don't show the banner") .takes_value(false), ) .arg( Arg::new("verbose") .long("verbose") .about("Show details about interactions") .takes_value(false), ) .arg( Arg::new("vars_format") .long("vars-format") .value_name("VARS_FORMAT") .about("Specify vars replacement format, with 'var' e.g. --vars-format \"<<var>>\"") .takes_value(true), ) .arg( Arg::new("flip") .long("flip") .about("Flip the meaning of success") .takes_value(false), ) .subcommand( App::new("validate") .about("Validate keys") .arg( Arg::new("provider") .index(1) .value_name("PROVIDER") .about("Specify the provider to validate for") .takes_value(true), ) .arg( Arg::new("list") .long("list") .about("Show provider list") .takes_value(false), ) .arg( Arg::new("requirements") .long("requirements") .about("Show provider requirements (params)") .takes_value(false), ) .arg( Arg::new("params") .long("params") .short('p') .value_name("PROVIDER") .about("Specify the provider to validate for") .multiple_values(true) .takes_value(true), ) .arg( Arg::new("csv_in") .long("--csv-in") .value_name("FILE") .about("Read providers and params via CSV") .takes_value(true), ), ); let v = app.render_version(); let matches = app.to_owned().get_matches(); if !matches.is_present("no_banner") { println!( "{}\n {}", style(BANNER).magenta(), style(v).dim() ); } if let Some(validate_matches) = matches.subcommand_matches("validate") { match validate_command(&matches, validate_matches) { Ok(ok) => { exit(if ok { 0 } else { 1 }); } Err(err) => { eprintln!("error: {}", err.to_string()); } } } } fn validate_command(matches: &ArgMatches, cmd_matches: &ArgMatches) -> AnyResult<bool> { let defs: Definitions = match matches.value_of("definitions") { Some(defs_file) => serde_yaml::from_reader(std::fs::File::open(defs_file)?)?, None => serde_yaml::from_str(DEFS).unwrap(), }; let opts = RunOptions::build( matches.value_of("dry_run").map(|s| s.to_string()), matches.is_present("flip"), matches.value_of("reporter").map(|s| s.to_string()), matches.is_present("verbose"), ); if cmd_matches.is_present("list") { let mut vec = defs.providers.iter().collect::<Vec<_>>(); vec.sort_by(|(p1, _), (p2, _)| p1.cmp(p2)); vec.iter().for_each(|(provider, action)| { if let Some(validation) = action.validation.as_ref() { println!( "{}\nkeyscope validate {} -p {}\n", style(validation.request.get_id()).magenta(), style(provider).yellow(), style( validation .request .params .as_ref() .map(|p| p .iter() .map(|p| p.name.clone()) .collect::<Vec<_>>() .join(" ")) .unwrap_or_default() ) .blue() ) } }); println!( "Total {} providers available.", vec.iter() .filter(|(_, action)| action.validation.is_some()) .count(), ); Ok(true) } else if cmd_matches.is_present("csv_in") { let file = cmd_matches .value_of("csv_in") .map(|s| s.to_string()) .ok_or_else(|| anyhow!("missing csv file"))?; let mut rdr = csv::Reader::from_path(file)?; let mut interactions = vec![]; let mut context = Context::new(); for res in rdr.records() { let record = res?; let provider = record.iter().next().unwrap(); // push all keys as: provider_0, provider_1 .. record.iter().skip(1).enumerate().for_each(|(i, v)| { context .vars_bag .insert(format!("{}_{}", provider, i + 1), v.to_string()); }); let interaction = defs.validation_for(&context, provider)?; interactions.push(interaction); } let runner = SequenceRunner::from_opts(&opts); let resp = runner.run(&mut context, &interactions); Ok(resp.ok) } else if cmd_matches.is_present("requirements") { let provider = cmd_matches .value_of("provider") .map(|s| s.to_string()) .ok_or_else(|| anyhow!("missing provider parameter"))?; let params = defs.requirements_for(&provider)?; if let Some(params) = params { println!("provider {} requires:", provider); params.iter().for_each(|p| { println!(" - param: {}\n desc: {}", p.name, p.desc); }); println!( "\nTo use from the CLI, run:\n\n\tkeyscope validate {} -p PARAM1 PARAM2 ...", provider ); } else { println!("provider {} has no requirements.", provider); } Ok(true) } else { let provider = cmd_matches .value_of("provider") .map(|s| s.to_string()) .ok_or_else(|| anyhow!("missing provider parameter"))?; let params = cmd_matches .values_of("params") .map(|vs| { vs.enumerate() .map(|(i, s)| (format!("{}_{}", provider, i + 1), s.to_string())) .collect::<Vec<_>>() }) .ok_or_else(|| anyhow!("missing params. please run with the --requirements flag"))?; let mut context = Context::new(); params.into_iter().for_each(|(k, v)| { context.vars_bag.insert(k, v); }); let interaction = defs.validation_for(&context, &provider)?; let runner = SequenceRunner::from_opts(&opts); let resp = runner.run(&mut context, &vec![interaction]); Ok(resp.ok) } }
use crate::enums::{Align, CallbackTrigger, Color, Damage, Event, Font, FrameType, LabelType}; use crate::image::Image; use crate::prelude::*; use crate::utils::FlString; use fltk_sys::browser::*; use std::{ ffi::{CStr, CString}, mem, os::raw, }; /** Creates a normal browser. Example usage: ```rust,no_run use fltk::{prelude::*, *}; let mut b = browser::Browser::new(10, 10, 900 - 20, 300 - 20, ""); let widths = &[50, 50, 50, 70, 70, 40, 40, 70, 70, 50]; b.set_column_widths(widths); b.set_column_char('\t'); b.add("USER\tPID\t%CPU\t%MEM\tVSZ\tRSS\tTTY\tSTAT\tSTART\tTIME\tCOMMAND"); b.add("root\t2888\t0.0\t0.0\t1352\t0\ttty3\tSW\tAug15\t0:00\t@b@f/sbin/mingetty tty3"); b.add("erco\t2889\t0.0\t13.0\t221352\t0\ttty3\tR\tAug15\t1:34\t@b@f/usr/local/bin/render a35 0004"); b.add("uucp\t2892\t0.0\t0.0\t1352\t0\tttyS0\tSW\tAug15\t0:00\t@b@f/sbin/agetty -h 19200 ttyS0 vt100"); b.add("root\t13115\t0.0\t0.0\t1352\t0\ttty2\tSW\tAug30\t0:00\t@b@f/sbin/mingetty tty2"); b.add( "root\t13464\t0.0\t0.0\t1352\t0\ttty1\tSW\tAug30\t0:00\t@b@f/sbin/mingetty tty1 --noclear", ); ``` */ #[derive(WidgetBase, WidgetExt, BrowserExt, Debug)] pub struct Browser { inner: *mut Fl_Browser, tracker: *mut fltk_sys::fl::Fl_Widget_Tracker, } /// Defines the browser type #[repr(i32)] #[derive(WidgetType, Debug, Copy, Clone, PartialEq)] pub enum BrowserType { /// Normal browser Normal = 0, /// Enable select Select = 1, /// Enable holding Hold = 2, /// Multi selection Multi = 3, } /// Defines the type of Scrollbar associated with the browser #[repr(u8)] #[derive(Debug, Copy, Clone, PartialEq)] pub enum BrowserScrollbar { /// Never show bars None = 0, /// Show vertical bar Horizontal = 1, /// Show vertical bar Vertical = 2, /// Show both horizontal and vertical bars Both = 3, /// Always show bars AlwaysOn = 4, /// Show horizontal bar always HorizontalAlways = 5, /// Show vertical bar always VerticalAlways = 6, /// Always show both horizontal and vertical bars BothAlways = 7, } /// Creates a select browser #[derive(WidgetBase, WidgetExt, BrowserExt, Debug)] pub struct SelectBrowser { inner: *mut Fl_Select_Browser, tracker: *mut fltk_sys::fl::Fl_Widget_Tracker, } /// Creates a multi-browser #[derive(WidgetBase, WidgetExt, BrowserExt, Debug)] pub struct MultiBrowser { inner: *mut Fl_Multi_Browser, tracker: *mut fltk_sys::fl::Fl_Widget_Tracker, } /// Creates a hold browser #[derive(WidgetBase, WidgetExt, BrowserExt, Debug)] pub struct HoldBrowser { inner: *mut Fl_Hold_Browser, tracker: *mut fltk_sys::fl::Fl_Widget_Tracker, } /// Creates a file browser #[derive(WidgetBase, WidgetExt, BrowserExt, Debug)] pub struct FileBrowser { inner: *mut Fl_File_Browser, tracker: *mut fltk_sys::fl::Fl_Widget_Tracker, } /// File types for the `FileBrowser` #[repr(i32)] #[derive(Copy, Clone, Debug)] pub enum FileType { /// Show files Files = 0, /// Show dirs Dirs, } impl FileBrowser { /// Gets the icon size pub fn icon_size(&self) -> u32 { assert!(!self.was_deleted()); unsafe { Fl_File_Browser_iconsize(self.inner) } } /// Sets the icon size pub fn set_icon_size(&mut self, s: u32) { assert!(!self.was_deleted()); unsafe { Fl_File_Browser_set_iconsize(self.inner, s) } } /// Sets the filter for the `FileBrowser`. /// The following syntax is used for the pattern: /// `*` matches any sequence of 0 or more characters. /// `?` matches any single character. /// `[set]` matches any character in the set. The set can contain any single characters, or a-z to represent a range. /// To match `]` or `-`, they must be the first characters. To match `^` or `!`, they must not be the first characters. /// `[^set]` or `[!set]` matches any character not in the set. /// `{X|Y|Z}` or `{X,Y,Z}` matches any one of the subexpressions literally. /// `\x` quotes the character `x` so it has no special meaning. /// `x` all other characters must be matched exactly. pub fn set_filter(&mut self, pattern: &'static str) { assert!(!self.was_deleted()); let pattern = CString::safe_new(pattern); unsafe { // This is deleted on the C++ side Fl_File_Browser_set_filter(self.inner, pattern.into_raw()) } } /// Gets the filter for the `FileBrowser` pub fn filter(&self) -> Option<String> { assert!(!self.was_deleted()); unsafe { let ptr = Fl_File_Browser_filter(self.inner); if ptr.is_null() { None } else { Some( CStr::from_ptr(ptr as *mut raw::c_char) .to_string_lossy() .to_string(), ) } } } /// Gets the `FileType` of the `FileBrowser` pub fn filetype(&self) -> FileType { assert!(!self.was_deleted()); unsafe { mem::transmute(Fl_File_Browser_filetype(self.inner)) } } /// Sets the `FileType` of the `FileBrowser` pub fn set_filetype(&mut self, t: FileType) { assert!(!self.was_deleted()); unsafe { Fl_File_Browser_set_filetype(self.inner, t as i32) } } } /// Creates a `CheckBrowser` widget #[derive(WidgetBase, WidgetExt, Debug)] pub struct CheckBrowser { inner: *mut Fl_Check_Browser, tracker: *mut fltk_sys::fl::Fl_Widget_Tracker, } impl CheckBrowser { /// Add an item, returns the number of current items pub fn add(&mut self, s: &str, checked: bool) -> i32 { assert!(!self.was_deleted()); let s = CString::safe_new(s); unsafe { Fl_Check_Browser_add(self.inner, s.as_ptr(), checked as i32) } } /// Remove item at index, returns the number of current items pub fn remove(&mut self, item: usize) -> i32 { assert!(!self.was_deleted()); unsafe { Fl_Check_Browser_remove(self.inner, item as i32) } } /// Clear the browser pub fn clear(&mut self) { assert!(!self.was_deleted()); unsafe { Fl_Check_Browser_clear(self.inner) } } /// Return the number of items pub fn nitems(&self) -> usize { assert!(!self.was_deleted()); unsafe { Fl_Check_Browser_nitems(self.inner) as usize } } /// Get the number of checked items pub fn nchecked(&self) -> usize { assert!(!self.was_deleted()); unsafe { Fl_Check_Browser_nchecked(self.inner) as usize } } /// Returns whether an item is checked pub fn checked(&self, item: i32) -> bool { assert!(!self.was_deleted()); unsafe { Fl_Check_Browser_checked(self.inner, item) != 0 } } /// Check selected item pub fn set_checked(&mut self, item: i32) { assert!(!self.was_deleted()); unsafe { Fl_Check_Browser_set_checked(self.inner, item) } } /// Ckeck all of the items pub fn check_all(&mut self) { assert!(!self.was_deleted()); unsafe { Fl_Check_Browser_check_all(self.inner) } } /// Check none of the items pub fn check_none(&mut self) { assert!(!self.was_deleted()); unsafe { Fl_Check_Browser_check_none(self.inner) } } /// Returns the selected line, returns 0 if no line is selected pub fn value(&self) -> i32 { assert!(!self.was_deleted()); unsafe { Fl_Check_Browser_value(self.inner) as i32 } } /// Get the text of the item pub fn text(&self, item: i32) -> Option<String> { assert!(!self.was_deleted()); unsafe { let ptr = Fl_Check_Browser_text(self.inner, item); if ptr.is_null() { None } else { Some(CStr::from_ptr(ptr).to_string_lossy().to_string()) } } } /// Gets the text font pub fn text_font(&self) -> Font { assert!(!self.was_deleted()); unsafe { std::mem::transmute(Fl_Check_Browser_text_font(self.inner)) } } /// Sets the text font pub fn set_text_font(&mut self, f: Font) { assert!(!self.was_deleted()); unsafe { Fl_Check_Browser_set_text_font(self.inner, f.bits() as i32) } } /// Gets the text size pub fn text_size(&self) -> i32 { assert!(!self.was_deleted()); unsafe { Fl_Check_Browser_text_size(self.inner) as i32 } } /// Sets the text size pub fn set_text_size(&mut self, s: i32) { assert!(!self.was_deleted()); unsafe { Fl_Check_Browser_set_text_size(self.inner, s as i32) } } /// Gets the text's color pub fn text_color(&self) -> Color { assert!(!self.was_deleted()); unsafe { std::mem::transmute(Fl_Check_Browser_text_color(self.inner)) } } /// Sets the text's color pub fn set_text_color(&mut self, color: Color) { assert!(!self.was_deleted()); unsafe { Fl_Check_Browser_set_text_color(self.inner, color.bits() as u32) } } /// Gets the vertical scroll position of the list as a pixel position pub fn position(&self) -> i32 { assert!(!self.was_deleted()); unsafe { Fl_Check_Browser_position(self.inner) as i32 } } /// Sets the vertical scroll position of the list as a pixel position pub fn set_position(&mut self, pos: i32) { assert!(!self.was_deleted()); unsafe { Fl_Check_Browser_set_position(self.inner, pos as i32) } } /// Gets the horizontal scroll position of the list as a pixel position pub fn hposition(&self) -> i32 { assert!(!self.was_deleted()); unsafe { Fl_Check_Browser_hposition(self.inner) as i32 } } /// Sets the horizontal scroll position of the list as a pixel position pub fn set_hposition(&mut self, pos: i32) { assert!(!self.was_deleted()); unsafe { Fl_Check_Browser_set_hposition(self.inner, pos as i32) } } /// Returns the type of scrollbar associated with the browser pub fn has_scrollbar(&self) -> BrowserScrollbar { assert!(!self.was_deleted()); unsafe { mem::transmute(Fl_Check_Browser_has_scrollbar(self.inner)) } } /// Sets the type of scrollbar associated with the browser pub fn set_has_scrollbar(&mut self, mode: BrowserScrollbar) { assert!(!self.was_deleted()); unsafe { Fl_Check_Browser_set_has_scrollbar(self.inner, mode as raw::c_uchar) } } /// Gets the scrollbar size pub fn scrollbar_size(&self) -> i32 { assert!(!self.was_deleted()); unsafe { Fl_Check_Browser_scrollbar_size(self.inner) as i32 } } /// Sets the scrollbar size pub fn set_scrollbar_size(&mut self, new_size: i32) { assert!(!self.was_deleted()); unsafe { Fl_Check_Browser_set_scrollbar_size(self.inner, new_size as i32) } } /// Sort browser elements pub fn sort(&mut self) { assert!(!self.was_deleted()); unsafe { Fl_Check_Browser_sort(self.inner) } } /// Returns the vertical scrollbar pub fn scrollbar(&self) -> Box<dyn ValuatorExt> { assert!(!self.was_deleted()); unsafe { let ptr = Fl_Check_Browser_scrollbar(self.inner); assert!(!ptr.is_null()); Box::new(crate::valuator::Scrollbar::from_widget_ptr( ptr as *mut fltk_sys::widget::Fl_Widget, )) } } /// Returns the horizontal scrollbar pub fn hscrollbar(&self) -> Box<dyn ValuatorExt> { assert!(!self.was_deleted()); unsafe { let ptr = Fl_Check_Browser_hscrollbar(self.inner); assert!(!ptr.is_null()); Box::new(crate::valuator::Scrollbar::from_widget_ptr( ptr as *mut fltk_sys::widget::Fl_Widget, )) } } }
use crate::lib::environment::Environment; use crate::lib::error::DfxResult; use crate::lib::models::canister_id_store::CanisterIdStore; use clap::Clap; use ic_types::principal::Principal; /// Prints the identifier of a canister. #[derive(Clap)] pub struct CanisterIdOpts { /// Specifies the name of the canister. canister: String, } pub async fn exec(env: &dyn Environment, opts: CanisterIdOpts) -> DfxResult { env.get_config_or_anyhow()?; let canister_name = opts.canister.as_str(); let canister_id_store = CanisterIdStore::for_env(env)?; let canister_id = Principal::from_text(canister_name).or_else(|_| canister_id_store.get(canister_name))?; println!("{}", Principal::to_text(&canister_id)); Ok(()) }
extern crate mpd_rs_interface; extern crate colored; extern crate mpd; #[macro_use] extern crate serde_derive; extern crate serde_json; extern crate serde; extern crate xdg; use colored::*; use mpd::Client; use std::net::TcpStream; use std::env; use std::fs::File; use std::io::Write; use mpd_rs_interface::{get_tag, next, pause, play, prev, stop, update}; // TODO: Try a timeout. #[derive(Serialize, Deserialize, Debug)] struct Conf { mpd_host: String, mpd_port: String, music_dir: String, } macro_rules! coloured_print { ($plain_string: ident) => {println!("{}", $plain_string.bright_green().bold())}; } fn main() { let conn: Client<TcpStream> = Client::connect(get_conf()).unwrap(); parse_cmd_args(conn); } fn get_conf() -> String { let conf_name = "mpd_rsi.json"; let xdg_dirs = xdg::BaseDirectories::with_prefix("mpd-ctrl").unwrap(); let conf_path = xdg_dirs.find_config_file(&conf_name); let conf: Conf; match conf_path { Some(conf_file) => conf = || -> Conf { let json = File::open(conf_file).unwrap(); return serde_json::from_reader(json).unwrap(); }(), None => conf = || -> Conf { let new_conf = Conf { mpd_host: "127.0.0.1".to_owned(), mpd_port: "6600".to_owned(), music_dir: "".to_owned(), }; let conf_json = serde_json::to_string(&new_conf).unwrap(); let new_conf_path = xdg_dirs.place_config_file(&conf_name).unwrap(); let mut new_file = File::create(&new_conf_path).unwrap(); println!("Configuration file not found!\nCreating a new one...\n\ Please put your music directory as the value of music_dir in mpd_rsi.json!\n"); new_file.write_all(&conf_json.as_bytes()).unwrap(); return new_conf; }(), } return format!("{}:{}", &conf.mpd_host, &conf.mpd_port); } fn get_current_info(mut conn: Client) { let title = get_tag(&mut conn, "title"); let album = get_tag(&mut conn, "album"); let artist = get_tag(&mut conn, "artist"); let duration = get_tag(&mut conn, "duration"); let file = get_tag(&mut conn, "file"); let coloured_string = format!("{}\n{}\n{}\n{}\n{}\n", title, album, artist, duration, file); coloured_print!(coloured_string); } fn show_help() { println!( "h ⇾ Show This\n\ play | s ⇾ Play\n\ pause | p ⇾ Pause\n\ stop ⇾ Stop\n\ next | ns ⇾ Next Song\n\ prev | ps ⇾ Previous Song\n\ update ⇾ Update Database\n\ info | i ⇾ Get Current Song's Information\n" ); } fn parse_cmd_args(conn: Client) { let args: Vec<String> = env::args().collect(); match args.len() { 1 => show_help(), 2 => { let cmd = &args[1].to_lowercase(); match &cmd[..] { "h" => show_help(), "play" | "s" => play(conn), "pause" | "p" => pause(conn), "stop" => stop(conn), "next" | "ns" => next(conn), "prev" | "ps" => prev(conn), "update" => update(conn).clear(), "info" | "i" => get_current_info(conn), _ => () } } _ => { () } } }
use gfx; use gfx::ui; use io::timer; use realtime; pub fn main() { gfx::clear_screen(0xFF, 0xFF, 0xFF); let timer = timer::Timer::new(lease!(TIMER0), 0, 0, timer::Prescaler::Div1, None); let _sleep_timer = realtime::SleepTimer::new(&timer); let ui = ui::Ui::new(gfx::top_screen, [0, 0, 0]); let center_box = ui.subbox((10, 10)); ui.draw_box(center_box); }
use crate::{ constants::SUBREDDIT_MEMES, types::{Error, PoiseContext}, utils::{apis::reddit_random_post, discord::reply_embed}, }; async fn reddit_command( ctx: PoiseContext<'_>, subreddits: &[&str], image: bool, ) -> Result<(), Error> { let post = reddit_random_post(subreddits, image).await?; reply_embed(ctx, |e| { e.title(&post.title); e.url(format!("https://www.reddit.com{}", &post.permalink)); if image { e.image(&post.url); } else { e.description(&post.selftext); } e.footer(|f| { f.icon_url("https://www.redditstatic.com/desktop2x/img/favicon/favicon-32x32.png"); f.text(format!( "{} | 🔼: {} 🔽: {}", &post.subreddit_name_prefixed, &post.ups, &post.downs )) }) }) .await?; Ok(()) } /// Gets random meme from reddit. #[poise::command(slash_command, defer_response)] pub async fn meme(ctx: PoiseContext<'_>) -> Result<(), Error> { reddit_command(ctx, &SUBREDDIT_MEMES, true).await } /// Gets random image post from the subreddit given as argument. /// /// Usage: `reddit_image dankmemes` #[poise::command(slash_command, defer_response)] pub async fn reddit_image( ctx: PoiseContext<'_>, #[description = "Subreddit"] subreddit: String, ) -> Result<(), Error> { reddit_command(ctx, &[&subreddit], true).await } /// Gets random text post from the subreddit given as argument. /// /// Usage: `reddit_text copypasta` #[poise::command(slash_command, defer_response)] pub async fn reddit_text( ctx: PoiseContext<'_>, #[description = "Subreddit"] subreddit: String, ) -> Result<(), Error> { reddit_command(ctx, &[&subreddit], false).await }
use handlegraph::{ handle::{Edge, Handle}, mutablehandlegraph::*, pathhandlegraph::*, }; use gfa::mmap::MmapGFA; use handlegraph::packedgraph::PackedGraph; use gfa::gfa::Line; use anyhow::Result; use rustc_hash::FxHashMap; #[allow(unused_imports)] use log::{debug, error, info, trace, warn}; pub fn packed_graph_from_mmap(mmap_gfa: &mut MmapGFA) -> Result<PackedGraph> { let indices = mmap_gfa.build_index()?; // let mut graph = // PackedGraph::with_expected_node_count(indices.segments.len()); let mut graph = PackedGraph::default(); // eprintln!("empty space usage: {} bytes", graph.total_bytes()); info!( "loading GFA with {} nodes, {} edges", indices.segments.len(), indices.links.len() ); let mut min_id = std::usize::MAX; let mut max_id = 0; for &offset in indices.segments.iter() { let _line = mmap_gfa.read_line_at(offset.0)?; let name = mmap_gfa.current_line_name().unwrap(); let name_str = std::str::from_utf8(name).unwrap(); let id = name_str.parse::<usize>().unwrap(); min_id = id.min(min_id); max_id = id.max(max_id); } let id_offset = if min_id == 0 { 1 } else { 0 }; info!("adding nodes"); for &offset in indices.segments.iter() { let _line = mmap_gfa.read_line_at(offset.0)?; let segment = mmap_gfa.parse_current_line()?; if let gfa::gfa::Line::Segment(segment) = segment { let id = (segment.name + id_offset) as u64; graph.create_handle(&segment.sequence, id); } } // eprintln!( // "after segments - space usage: {} bytes", // graph.total_bytes() // ); info!("adding edges"); let edges_iter = indices.links.iter().filter_map(|&offset| { let _line = mmap_gfa.read_line_at(offset).ok()?; let link = mmap_gfa.parse_current_line().ok()?; if let gfa::gfa::Line::Link(link) = link { let from_id = (link.from_segment + id_offset) as u64; let to_id = (link.to_segment + id_offset) as u64; let from = Handle::new(from_id, link.from_orient); let to = Handle::new(to_id, link.to_orient); Some(Edge(from, to)) } else { None } }); graph.create_edges_iter(edges_iter); // eprintln!( // "after edges - space usage: {} bytes", // graph.total_bytes() // ); let mut path_ids: FxHashMap<PathId, (usize, usize)> = FxHashMap::default(); path_ids.reserve(indices.paths.len()); info!("adding paths"); for &offset in indices.paths.iter() { let line = mmap_gfa.read_line_at(offset)?; let length = line.len(); if let Some(path_name) = mmap_gfa.current_line_name() { let path_id = graph.create_path(path_name, false).unwrap(); path_ids.insert(path_id, (offset, length)); } } info!("created path handles"); let mmap_gfa_bytes = mmap_gfa.get_ref(); let parser = mmap_gfa.get_parser(); graph.with_all_paths_mut_ctx_chn_new(|path_id, sender, path_ref| { let &(offset, length) = path_ids.get(&path_id).unwrap(); let end = offset + length; let line = &mmap_gfa_bytes[offset..end]; if let Some(Line::Path(path)) = parser.parse_gfa_line(line).ok() { path_ref.append_handles_iter_chn( sender, path.iter().map(|(node, orient)| { let node = node + id_offset; Handle::new(node, orient) }), ); } }); /* graph.with_all_paths_mut_ctx_chn(|path_id, path_ref| { let &(offset, length) = path_ids.get(&path_id).unwrap(); let end = offset + length; let line = &mmap_gfa_bytes[offset..end]; if let Some(Line::Path(path)) = parser.parse_gfa_line(line).ok() { path.iter() .map(|(node, orient)| { let node = node + id_offset; let handle = Handle::new(node, orient); path_ref.append_step(handle) }) .collect() // path_ref.append_steps_iter(path.iter().map(|(node, orient)| { // let node = node + id_offset; // Handle::new(node, orient) // })) } else { Vec::new() } }); */ // eprintln!( // "after paths - space usage: {} bytes", // graph.total_bytes() // ); Ok(graph) }
use crate::Hasher as HasherImpl; use ares::hashers::Hasher; use ares::iv::Iv; use ares::raw_key::RawKey; use std::io::Error; pub struct Input { input: String, } impl Input { #[allow(dead_code)] fn from_dialoguer() -> Result<Self, Error> { let input = dialoguer::PasswordInput::new() .with_prompt("Encryption key") .with_confirmation("Confirm encryption key", "Inputs do not match") .interact()?; Ok(Input::make(input)) } #[allow(dead_code)] fn from_file() -> Result<Self, Error> { let input = String::from_utf8(std::fs::read("key.txt").unwrap()).unwrap(); Ok(Input::make(input)) } pub fn make(input: String) -> Self { Input { input } } pub fn make_from_cfg() -> Result<Self, Error> { #[cfg(test)] return Input::from_file(); #[cfg(not(test))] return Input::from_dialoguer(); } pub fn to_raw_key(&self) -> RawKey { RawKey::from_string::<HasherImpl>(&self.input) } pub fn to_raw_key_iv(&self, iv: Iv) -> RawKey { RawKey::make(HasherImpl::make(&self.input), iv) } }
use std::rc::Rc; use std::cell::RefCell; use request::Request; type RcRequest = Rc<RefCell<Request>>; type NewRequest = RcRequest; type OldRequest = RcRequest; type QuantumStart = f64; type CtxxStart = f64; #[derive(Debug)] pub enum CpuState { Idle, Busy(RcRequest, QuantumStart), CtxSwitching(NewRequest, OldRequest, CtxxStart), } #[derive(Debug)] pub struct Cpu { pub state: CpuState, pub total_procd_time: f64, pub total_ctxx_time: f64, } impl Cpu { pub fn new() -> Cpu { Cpu { state: CpuState::Idle, total_procd_time: 0.0, total_ctxx_time: 0.0 } } }
use crate::src_a::a::*; // 绝对路径 pub fn c_echo() { println!("c_echo"); a_echo(); }
pub fn square_of_sum(num: i32) -> i32 { (1..num+1).sum::<i32>().pow(2) } pub fn sum_of_squares(num: i32) -> i32 { (1..num+1).map(|x| x * x).sum() } pub fn difference(num: i32) -> i32 { square_of_sum(num) - sum_of_squares(num) }
use crate::component::ComponentFlags; use std::ops::{Deref, DerefMut}; /// Unique borrow of an entity's component pub struct Mut<'a, T> { pub(crate) value: &'a mut T, pub(crate) flags: &'a mut ComponentFlags, } impl<'a, T> Deref for Mut<'a, T> { type Target = T; #[inline] fn deref(&self) -> &T { self.value } } impl<'a, T> DerefMut for Mut<'a, T> { #[inline] fn deref_mut(&mut self) -> &mut T { self.flags.insert(ComponentFlags::MUTATED); self.value } } impl<'a, T: core::fmt::Debug> core::fmt::Debug for Mut<'a, T> { fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { self.value.fmt(f) } } impl<'w, T> Mut<'w, T> { /// Returns true if (and only if) this component been added since the start of the frame. pub fn added(&self) -> bool { self.flags.contains(ComponentFlags::ADDED) } /// Returns true if (and only if) this component been mutated since the start of the frame. pub fn mutated(&self) -> bool { self.flags.contains(ComponentFlags::MUTATED) } /// Returns true if (and only if) this component been either mutated or added since the start of the frame. pub fn changed(&self) -> bool { self.flags .intersects(ComponentFlags::ADDED | ComponentFlags::MUTATED) } }
use crate::error::*; pub fn split_type_name(name: &str) -> ParseResult<(&str, &str)> { let index = name.rfind('.').ok_or_else(|| ParseError::InvalidTypeName)?; Ok((&name[0..index], &name[index + 1..])) }
use super::*; static VALID_COLORS: [u8; 75] = [ 20, 21, 26, 27, 33, 38, 39, 40, 41, 42, 43, 44, 45, 56, 57, 62, 63, 68, 69, 74, 75, 76, 77, 78, 79, 80, 81, 92, 93, 98, 99, 112, 113, 128, 129, 134, 135, 148, 149, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 178, 179, 184, 185, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 214, 215, 220, 221, ]; #[macro_export] macro_rules! log_osgood_message { ($($item:expr),+) => { println!("[{}] {}", ansi_term::Colour::Green.paint("OSGOOD"), format!($($item),+)); } } #[macro_export] macro_rules! log_osgood_error { ($($item:expr),+) => { eprintln!("[{}] {}: {}", ansi_term::Colour::Green.paint("OSGOOD"), ansi_term::Colour::Red.paint("ERROR"), format!($($item),+)); } } #[macro_export] macro_rules! log_worker_warning { ($($item:expr),+) => { eprintln!("[{}] [{}] {}: {}", $crate::worker::logging::color_name(), ansi_term::Colour::Green.paint("OSGOOD"), ansi_term::Colour::Yellow.paint("WARNING"), format!($($item),+)); } } #[macro_export] macro_rules! fmt_worker_error { ($($item:expr),+) => { format!("[{}] [{}] {}: {}", $crate::worker::logging::color_name(), ansi_term::Colour::Green.paint("OSGOOD"), ansi_term::Colour::Red.paint("ERROR"), format!($($item),+)); } } #[macro_export] macro_rules! log_worker_error { ($($item:expr),+) => { eprintln!("[{}] [{}] {}: {}", $crate::worker::logging::color_name(), ansi_term::Colour::Green.paint("OSGOOD"), ansi_term::Colour::Red.paint("ERROR"), format!($($item),+)); } } #[macro_export] macro_rules! log_info { ($($item:expr),+) => { info!("[{}] {}", $crate::worker::logging::color_name(), format!($($item),+)); } } #[macro_export] macro_rules! log_error { ($($item:expr),+) => { error!("[{}] {}", $crate::worker::logging::color_name(), format!($($item),+)); } } #[macro_export] macro_rules! log_debug { ($($item:expr),+) => { debug!("[{}] {}", $crate::worker::logging::color_name(), format!($($item),+)); } } #[macro_export] macro_rules! log_trace { ($($item:expr),+) => { trace!("[{}] {}", $crate::worker::logging::color_name(), format!($($item),+)); } } #[macro_export] macro_rules! log_warn { ($($item:expr),+) => { warn!("[{}] {}", $crate::worker::logging::color_name(), format!($($item),+)); } } fn log_color(name: &str) -> u8 { let mut hash: usize = 0; for c in name.chars() { hash += c as usize; } // hash += std::process::id() as usize; VALID_COLORS[hash % VALID_COLORS.len()] } pub fn color_name() -> std::string::String { super::super::NAME.with(|name| { let name = name.borrow(); let color = ansi_term::Colour::Fixed(log_color(&name)); format!("{}", color.paint(name.as_str())) }) }
use image2ascii::image2ascii; use std::fs::File; use std::io::{BufWriter, Write}; use structopt::StructOpt; #[derive(StructOpt, Debug)] struct CmdOpt { #[structopt(short, long)] input: String, #[structopt(short, long)] output: Option<String>, #[structopt(short, long)] width: u32, #[structopt(short, long, default_value = "30.0")] contrast: f32, } fn main() { let opt = CmdOpt::from_args(); let c2d = image2ascii(&opt.input, opt.width, Option::Some(opt.contrast), Option::None).unwrap(); let lines = c2d.to_lines(); if let Some(output_file) = opt.output { println!("output to file {}", output_file); let mut out = BufWriter::new(File::create(output_file).unwrap()); for line in lines { out.write(line.as_bytes()).unwrap(); out.write("\n".as_bytes()).unwrap(); } out.flush().unwrap(); } else { for line in lines { println!("{}", line); } } }
// Chip-8 Emulator // Written by Austin Bricker, 2019 // Uses Rust + SDL2 // TODO: // Add screen buffer to avoid flickering; OR the X most recent screen buffers together // Hit detection in Pong is still off. Improperly drawing sprite XY locations? // Add sound extern crate sdl2; use chip8_core::cpu::*; use rand::Rng; use std::{env, process}; use std::io::Read; use std::fs::File; use sdl2::pixels::Color; use sdl2::event::Event; use sdl2::keyboard::Keycode; use sdl2::rect::Rect; // Should scale be a struct? const SCALE_X: u32 = 16; const SCALE_Y: u32 = 16; const WIN_W: u32 = (SCREEN_W as u32) * SCALE_X; const WIN_H: u32 = (SCREEN_H as u32) * SCALE_Y; pub fn main() { let args: Vec<_> = env::args().collect(); if args.len() == 1 { println!("cargo run GAME_NAME"); process::exit(1); } let mut paused = false; // Setup SDL let sdl_context = sdl2::init().unwrap(); let video_subsystem = sdl_context.video().unwrap(); let window = video_subsystem.window(&args[1], WIN_W, WIN_H).position_centered().opengl().build().unwrap(); let mut canvas = window.into_canvas().build().unwrap(); canvas.clear(); canvas.present(); let mut event_pump = sdl_context.event_pump().unwrap(); // Load program let mut chip8 = Cpu::init(); // Set random seed let seed = rand::thread_rng().gen_range(0, 0xffffffff); chip8.set_seed(seed); // Load game load_game(&mut chip8, &args[1]); 'gameloop: loop { // Check for key presses for event in event_pump.poll_iter() { match event { Event::Quit{..} | Event::KeyDown{keycode: Some(Keycode::Escape), ..} => { break 'gameloop; }, Event::KeyDown{keycode: Some(Keycode::Space), ..} => { paused = !paused; if paused { println!("Paused"); } }, Event::KeyDown{keycode: Some(Keycode::N), ..} => { if paused { chip8.tick(); draw_screen(&chip8, &mut canvas); } } _ => {} } check_keypressed(&mut chip8, &event); check_keyreleased(&mut chip8, &event); } // Game loop if !paused { chip8.tick(); draw_screen(&chip8, &mut canvas); } } } fn load_game(cpu: &mut Cpu, path: &str) { let mut index = 0x200; // Read ROM contents in let rom = File::open(path).expect("Unable to read file."); // Iterate through byte by byte, setting in self.memory for byte in rom.bytes() { cpu.set_byte(byte.unwrap(), index); index += 1; } } fn draw_screen(cpu: &Cpu, canvas: &mut sdl2::render::Canvas<sdl2::video::Window>) { canvas.clear(); for i in 0..(SCREEN_H * SCREEN_W) { let x = (i % SCREEN_W) as u32; let y = (i / SCREEN_W) as u32; let pixel = cpu.gfx[i]; if pixel == 1 { canvas.set_draw_color(Color::RGB(255, 255, 255)); } else { canvas.set_draw_color(Color::RGB(0, 0, 0)); } let rect = Rect::new((x * SCALE_X) as i32, (y * SCALE_Y) as i32, 8 * SCALE_X, SCALE_Y); canvas.fill_rect(rect).expect("Unable to draw rect"); } canvas.present(); } /* Chip-8 Keyboard +---+---+---+---+ +---+---+---+---+ | 1 | 2 | 3 | C | | 1 | 2 | 3 | 4 | +---+---+---+---+ +---+---+---+---+ | 4 | 5 | 6 | D | | Q | W | E | R | +---+---+---+---+ => +---+---+---+---+ | 7 | 8 | 9 | E | | A | S | D | F | +---+---+---+---+ +---+---+---+---+ | A | 0 | B | F | | Z | X | C | V | +---+---+---+---+ +---+---+---+---+ */ fn check_keypressed(cpu: &mut Cpu, event: &sdl2::event::Event) { // TODO: Find a way to make this more compact match event { Event::KeyDown{keycode: Some(Keycode::Num1), ..} => { cpu.keys[1] = true; }, Event::KeyDown{keycode: Some(Keycode::Num2), ..} => { cpu.keys[2] = true; }, Event::KeyDown{keycode: Some(Keycode::Num3), ..} => { cpu.keys[3] = true; }, Event::KeyDown{keycode: Some(Keycode::Num4), ..} => { cpu.keys[0xC] = true; }, Event::KeyDown{keycode: Some(Keycode::Q), ..} => { cpu.keys[4] = true; }, Event::KeyDown{keycode: Some(Keycode::W), ..} => { cpu.keys[5] = true; }, Event::KeyDown{keycode: Some(Keycode::E), ..} => { cpu.keys[6] = true; }, Event::KeyDown{keycode: Some(Keycode::R), ..} => { cpu.keys[0xD] = true; }, Event::KeyDown{keycode: Some(Keycode::A), ..} => { cpu.keys[7] = true; }, Event::KeyDown{keycode: Some(Keycode::S), ..} => { cpu.keys[8] = true; }, Event::KeyDown{keycode: Some(Keycode::D), ..} => { cpu.keys[9] = true; }, Event::KeyDown{keycode: Some(Keycode::F), ..} => { cpu.keys[0xE] = true; }, Event::KeyDown{keycode: Some(Keycode::Z), ..} => { cpu.keys[0xA] = true; }, Event::KeyDown{keycode: Some(Keycode::X), ..} => { cpu.keys[0] = true; }, Event::KeyDown{keycode: Some(Keycode::C), ..} => { cpu.keys[0xB] = true; }, Event::KeyDown{keycode: Some(Keycode::V), ..} => { cpu.keys[0xF] = true; }, _ => {} } } fn check_keyreleased(cpu: &mut Cpu, event: &sdl2::event::Event) { match event { Event::KeyUp{keycode: Some(Keycode::Num1), ..} => { cpu.keys[1] = false; }, Event::KeyUp{keycode: Some(Keycode::Num2), ..} => { cpu.keys[2] = false; }, Event::KeyUp{keycode: Some(Keycode::Num3), ..} => { cpu.keys[3] = false; }, Event::KeyUp{keycode: Some(Keycode::Num4), ..} => { cpu.keys[0xC] = false; }, Event::KeyUp{keycode: Some(Keycode::Q), ..} => { cpu.keys[4] = false; }, Event::KeyUp{keycode: Some(Keycode::W), ..} => { cpu.keys[5] = false; }, Event::KeyUp{keycode: Some(Keycode::E), ..} => { cpu.keys[6] = false; }, Event::KeyUp{keycode: Some(Keycode::R), ..} => { cpu.keys[0xD] = false; }, Event::KeyUp{keycode: Some(Keycode::A), ..} => { cpu.keys[7] = false; }, Event::KeyUp{keycode: Some(Keycode::S), ..} => { cpu.keys[8] = false; }, Event::KeyUp{keycode: Some(Keycode::D), ..} => { cpu.keys[9] = false; }, Event::KeyUp{keycode: Some(Keycode::F), ..} => { cpu.keys[0xE] = false; }, Event::KeyUp{keycode: Some(Keycode::Z), ..} => { cpu.keys[0xA] = false; }, Event::KeyUp{keycode: Some(Keycode::X), ..} => { cpu.keys[0] = false; }, Event::KeyUp{keycode: Some(Keycode::C), ..} => { cpu.keys[0xB] = false; }, Event::KeyUp{keycode: Some(Keycode::V), ..} => { cpu.keys[0xF] = false; }, _ => {} } }
use once_cell::sync as once_cell; use regex; use std::iter; pub fn solve_1() { let pairs = parse_lines(include_str!("input.txt")); let passing = pairs .iter() .filter(|(policy, password)| policy.check_count(password)); println!("{} passwords pass their old policies.", passing.count()); } pub fn solve_2() { let pairs = parse_lines(include_str!("input.txt")); let passing = pairs .iter() .filter(|(policy, password)| policy.check_pos(password)); println!("{} passwords pass their new policies.", passing.count()); } #[derive(Debug, Copy, Clone, PartialEq, Eq)] struct Policy { character: char, min: usize, max: usize, } impl Policy { pub fn check_count(&self, password: &str) -> bool { let occurrences = password.chars().filter(|c| *c == self.character).count(); self.min <= occurrences && occurrences <= self.max } pub fn check_pos(&self, password: &str) -> bool { let chars = iter::once('\0').chain(password.chars()); let one = chars.clone().nth(self.min).unwrap() == self.character; let two = chars.clone().nth(self.max).unwrap() == self.character; one != two } } static LINE_REGEX: once_cell::Lazy<regex::Regex> = once_cell::Lazy::new(|| regex::Regex::new(r"(\d+)-(\d+) (\w): (\w+)").unwrap()); fn parse_lines(input: &str) -> Vec<(Policy, &str)> { LINE_REGEX .captures_iter(input) .map(|cap| { ( Policy { min: cap.get(1).unwrap().as_str().parse().unwrap(), max: cap.get(2).unwrap().as_str().parse().unwrap(), character: cap.get(3).unwrap().as_str().chars().next().unwrap(), }, cap.get(4).unwrap().as_str(), ) }) .collect() } #[cfg(test)] mod tests { use super::*; #[test] fn it_parses() { let input = "1-3 a: abcde"; let policy = Policy { character: 'a', min: 1, max: 3, }; assert_eq!(vec![(policy, "abcde")], parse_lines(input)); } #[test] fn example_1_works() { let input = r"1-3 a: abcde 1-3 b: cdefg 2-9 c: ccccccccc"; let expected = vec![true, false, true]; assert_eq!( expected, parse_lines(input) .iter() .map(|(policy, password)| policy.check_count(password)) .collect::<Vec<bool>>() ) } #[test] fn example_2_works() { let input = r"1-3 a: abcde 1-3 b: cdefg 2-9 c: ccccccccc"; let expected = vec![true, false, false]; assert_eq!( expected, parse_lines(input) .iter() .map(|(policy, password)| policy.check_pos(password)) .collect::<Vec<bool>>() ) } }
//! Lorem ipsum generator. //! //! This crate generates pseudo-Latin [lorem ipsum placeholder //! text][wiki]. The traditional lorem ipsum text start like this: //! //! > Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do //! > eiusmod tempor incididunt ut labore et dolore magna aliqua. //! //! This text is in the [`LOREM_IPSUM`] constant. Random looking text //! like the above can be generated using the [`lipsum`] function. The //! function allows you to generate as much text as desired and each //! invocation will generate different text. //! //! The random looking text is generated using a [Markov chain] of //! order two, which simply means that the next word is based on the //! previous two words in the input texts. The Markov chain can be //! used with other input texts by creating an instance of //! [`MarkovChain`] and calling its [`learn`] method. //! //! [wiki]: https://en.wikipedia.org/wiki/Lorem_ipsum //! [`lipsum`]: fn.lipsum.html //! [`MarkovChain`]: struct.MarkovChain.html //! [`learn`]: struct.MarkovChain.html#method.learn //! [Markov chain]: https://en.wikipedia.org/wiki/Markov_chain #![doc(html_root_url = "https://docs.rs/lipsum/0.9.0")] #![forbid(unsafe_code)] #![deny(missing_docs)] use rand::seq::SliceRandom; use rand::{Rng, SeedableRng}; use rand_chacha::ChaCha20Rng; use std::collections::HashMap; /// A bigram is simply two consecutive words. pub type Bigram<'a> = (&'a str, &'a str); /// Simple order two Markov chain implementation. /// /// The [Markov chain] is a chain of order two, which means that it /// will use the previous two words (a bigram) when predicting the /// next word. This is normally enough to generate random text that /// looks somewhat plausible. The implementation is based on /// [Generating arbitrary text with Markov chains in Rust][blog post]. /// /// [Markov chain]: https://en.wikipedia.org/wiki/Markov_chain /// [blog post]: https://blakewilliams.me/posts/generating-arbitrary-text-with-markov-chains-in-rust #[derive(Debug, Clone, Default)] pub struct MarkovChain<'a> { map: HashMap<Bigram<'a>, Vec<&'a str>>, keys: Vec<Bigram<'a>>, } impl<'a> MarkovChain<'a> { /// Create a new empty Markov chain. /// /// # Examples /// /// ``` /// # fn main() { /// use lipsum::MarkovChain; /// use rand::SeedableRng; /// use rand_chacha::ChaCha20Rng; /// /// let mut chain = MarkovChain::new(); /// chain.learn("infra-red red orange yellow green blue indigo x-ray"); /// /// let mut rng = ChaCha20Rng::seed_from_u64(0); /// /// // The chain jumps consistently like this: /// assert_eq!(chain.generate_with_rng(&mut rng, 1), "Orange."); /// assert_eq!(chain.generate_with_rng(&mut rng, 1), "Infra-red."); /// assert_eq!(chain.generate_with_rng(&mut rng, 1), "Yellow."); /// # } /// ``` pub fn new() -> MarkovChain<'a> { Default::default() } /// Add new text to the Markov chain. This can be called several /// times to build up the chain. /// /// # Examples /// /// ``` /// use lipsum::MarkovChain; /// /// let mut chain = MarkovChain::new(); /// chain.learn("red green blue"); /// assert_eq!(chain.words(("red", "green")), Some(&vec!["blue"])); /// /// chain.learn("red green yellow"); /// assert_eq!(chain.words(("red", "green")), Some(&vec!["blue", "yellow"])); /// ``` pub fn learn(&mut self, sentence: &'a str) { let words = sentence.split_whitespace().collect::<Vec<&str>>(); for window in words.windows(3) { let (a, b, c) = (window[0], window[1], window[2]); self.map.entry((a, b)).or_insert_with(Vec::new).push(c); } // Sync the keys with the current map. self.keys = self.map.keys().cloned().collect(); self.keys.sort_unstable(); } /// Returs the number of states in the Markov chain. /// /// # Examples /// /// ``` /// use lipsum::MarkovChain; /// /// let mut chain = MarkovChain::new(); /// assert_eq!(chain.len(), 0); /// /// chain.learn("red orange yellow green blue indigo"); /// assert_eq!(chain.len(), 4); /// ``` #[inline] pub fn len(&self) -> usize { self.map.len() } /// Returns `true` if the Markov chain has no states. /// /// # Examples /// /// ``` /// use lipsum::MarkovChain; /// /// let mut chain = MarkovChain::new(); /// assert!(chain.is_empty()); /// /// chain.learn("foo bar baz"); /// assert!(!chain.is_empty()); /// ``` pub fn is_empty(&self) -> bool { self.len() == 0 } /// Get the possible words following the given bigram, or `None` /// if the state is invalid. /// /// # Examples /// /// ``` /// use lipsum::MarkovChain; /// /// let mut chain = MarkovChain::new(); /// chain.learn("red green blue"); /// assert_eq!(chain.words(("red", "green")), Some(&vec!["blue"])); /// assert_eq!(chain.words(("foo", "bar")), None); /// ``` pub fn words(&self, state: Bigram<'a>) -> Option<&Vec<&str>> { self.map.get(&state) } /// Generate a sentence with `n` words of lorem ipsum text. The /// sentence will start from a random point in the Markov chain /// generated using the specified random number generator, /// and a `.` will be added as necessary to form a full sentence. /// /// See [`generate_with_rng_from`] if you want to control the /// starting point for the generated text and see [`iter_with_rng`] /// if you simply want a sequence of words. /// /// # Examples /// /// Generating the sounds of a grandfather clock: /// /// ``` /// use lipsum::MarkovChain; /// use rand_chacha::ChaCha20Rng; /// use rand::SeedableRng; /// /// let mut chain = MarkovChain::new(); /// chain.learn("Tick, Tock, Tick, Tock, Ding! Tick, Tock, Ding! Ding!"); /// println!("{}", chain.generate_with_rng(ChaCha20Rng::seed_from_u64(0), 15)); /// ``` /// /// The output looks like this: /// /// > Ding! Tick, Tock, Tick, Tock, Ding! Ding! Tock, Ding! Tick, /// > Tock, Tick, Tock, Tick, Tock. /// /// [`generate_with_rng_from`]: struct.MarkovChain.html#method.generate_with_rng_from /// [`iter_with_rng`]: struct.MarkovChain.html#method.iter_with_rng pub fn generate_with_rng<R: Rng>(&self, rng: R, n: usize) -> String { join_words(self.iter_with_rng(rng).take(n)) } /// Generate a sentence with `n` words of lorem ipsum text. The sentence /// will start from a predetermined point in the Markov chain generated /// using the default random number generator and a `.` will be added as /// necessary to form a full sentence. /// /// See [`generate_from`] if you want to control the starting point for the /// generated text and see [`iter`] if you simply want a sequence of words. /// /// # Examples /// /// Generating the sounds of a grandfather clock: /// /// ``` /// use lipsum::MarkovChain; /// /// let mut chain = MarkovChain::new(); /// chain.learn("Tick, Tock, Tick, Tock, Ding! Tick, Tock, Ding! Ding!"); /// println!("{}", chain.generate(15)); /// ``` /// /// The output looks like this: /// /// > Ding! Tick, Tock, Tick, Tock, Ding! Ding! Tock, Ding! Tick, /// > Tock, Tick, Tock, Tick, Tock. /// /// [`generate_from`]: struct.MarkovChain.html#method.generate_from /// [`iter`]: struct.MarkovChain.html#method.iter pub fn generate(&self, n: usize) -> String { self.generate_with_rng(default_rng(), n) } /// Generate a sentence with `n` words of lorem ipsum text. The /// sentence will start from the given bigram and a `.` will be /// added as necessary to form a full sentence. /// /// Use [`generate_with_rng`] if the starting point is not important. See /// [`iter_with_rng_from`] if you want a sequence of words that you can /// format yourself. /// /// [`generate_with_rng`]: struct.MarkovChain.html#method.generate_with_rng /// [`iter_with_rng_from`]: struct.MarkovChain.html#method.iter_with_rng_from pub fn generate_with_rng_from<R: Rng>(&self, rng: R, n: usize, from: Bigram<'a>) -> String { join_words(self.iter_with_rng_from(rng, from).take(n)) } /// Generate a sentence with `n` words of lorem ipsum text. The /// sentence will start from the given bigram and a `.` will be /// added as necessary to form a full sentence. /// /// Use [`generate`] if the starting point is not important. See /// [`iter_from`] if you want a sequence of words that you can /// format yourself. /// /// [`generate`]: struct.MarkovChain.html#method.generate /// [`iter_from`]: struct.MarkovChain.html#method.iter_from pub fn generate_from(&self, n: usize, from: Bigram<'a>) -> String { self.generate_with_rng_from(default_rng(), n, from) } /// Make a never-ending iterator over the words in the Markov /// chain. The iterator starts at a random point in the chain. pub fn iter_with_rng<R: Rng>(&self, mut rng: R) -> Words<'_, R> { let initial_bigram = if self.is_empty() { ("", "") } else { *self.keys.choose(&mut rng).unwrap() }; self.iter_with_rng_from(rng, initial_bigram) } /// Make a never-ending iterator over the words in the Markov chain. The /// iterator starts at a predetermined point in the chain. pub fn iter(&self) -> Words<'_, impl Rng> { self.iter_with_rng(default_rng()) } /// Make a never-ending iterator over the words in the Markov /// chain. The iterator starts at the given bigram. pub fn iter_with_rng_from<R: Rng>(&self, rng: R, from: Bigram<'a>) -> Words<'_, R> { Words { map: &self.map, rng, keys: &self.keys, state: from, } } /// Make a never-ending iterator over the words in the Markov /// chain. The iterator starts at the given bigram. pub fn iter_from(&self, from: Bigram<'a>) -> Words<'_, impl Rng> { self.iter_with_rng_from(default_rng(), from) } } /// Provide a default random number generator. This generator is seeded and will /// always produce the same sequence of numbers. The seed is chosen to yield /// good results for the included Markov chain. fn default_rng() -> impl Rng { ChaCha20Rng::seed_from_u64(97) } /// Never-ending iterator over words in the Markov chain. /// /// Generated with the [`iter`] or [`iter_from`] methods. /// /// [`iter`]: struct.MarkovChain.html#method.iter /// [`iter_from`]: struct.MarkovChain.html#method.iter_from pub struct Words<'a, R: Rng> { map: &'a HashMap<Bigram<'a>, Vec<&'a str>>, rng: R, keys: &'a Vec<Bigram<'a>>, state: Bigram<'a>, } impl<'a, R: Rng> Iterator for Words<'a, R> { type Item = &'a str; fn next(&mut self) -> Option<&'a str> { if self.map.is_empty() { return None; } let result = Some(self.state.0); while !self.map.contains_key(&self.state) { self.state = *self.keys.choose(&mut self.rng).unwrap(); } let next_words = &self.map[&self.state]; let next = next_words.choose(&mut self.rng).unwrap(); self.state = (self.state.1, next); result } } /// Check if `c` is an ASCII punctuation character. fn is_ascii_punctuation(c: char) -> bool { c.is_ascii_punctuation() } /// Capitalize the first character in a string. fn capitalize(word: &str) -> String { let idx = match word.chars().next() { Some(c) => c.len_utf8(), None => 0, }; let mut result = String::with_capacity(word.len()); result.push_str(&word[..idx].to_uppercase()); result.push_str(&word[idx..]); result } /// Join words from an iterator. The first word is always capitalized /// and the generated sentence will end with `'.'` if it doesn't /// already end with some other ASCII punctuation character. fn join_words<'a, I: Iterator<Item = &'a str>>(mut words: I) -> String { match words.next() { None => String::new(), Some(word) => { // Punctuation characters which ends a sentence. let punctuation: &[char] = &['.', '!', '?']; let mut sentence = capitalize(word); let mut needs_cap = sentence.ends_with(punctuation); // Add remaining words. for word in words { sentence.push(' '); if needs_cap { sentence.push_str(&capitalize(word)); } else { sentence.push_str(word); } needs_cap = word.ends_with(punctuation); } // Ensure the sentence ends with either one of ".!?". if !sentence.ends_with(punctuation) { // Trim all trailing punctuation characters to avoid // adding '.' after a ',' or similar. let idx = sentence.trim_end_matches(is_ascii_punctuation).len(); sentence.truncate(idx); sentence.push('.'); } sentence } } } /// The traditional lorem ipsum text as given in [Wikipedia]. Using /// this text alone for a Markov chain of order two doesn't work very /// well since each bigram (two consequtive words) is followed by just /// one other word. In other words, the Markov chain will always /// produce the same output and recreate the lorem ipsum text /// precisely. However, combining it with the full text in /// [`LIBER_PRIMUS`] works well. /// /// [Wikipedia]: https://en.wikipedia.org/wiki/Lorem_ipsum /// [`LIBER_PRIMUS`]: constant.LIBER_PRIMUS.html pub const LOREM_IPSUM: &str = include_str!("lorem-ipsum.txt"); /// The first book in Cicero's work De finibus bonorum et malorum ("On /// the ends of good and evil"). The lorem ipsum text in /// [`LOREM_IPSUM`] is derived from part of this text. /// /// [`LOREM_IPSUM`]: constant.LOREM_IPSUM.html pub const LIBER_PRIMUS: &str = include_str!("liber-primus.txt"); thread_local! { // Markov chain generating lorem ipsum text. static LOREM_IPSUM_CHAIN: MarkovChain<'static> = { let mut chain = MarkovChain::new(); // The cost of learning increases as more and more text is // added, so we start with the smallest text. chain.learn(LOREM_IPSUM); chain.learn(LIBER_PRIMUS); chain } } /// Generate `n` words of lorem ipsum text. The output will always start with /// "Lorem ipsum". /// /// The text continues with the standard lorem ipsum text from [`LOREM_IPSUM`] /// and becomes randomly generated but deterministic if more than 18 words is /// requested. See [`lipsum_words`] if fully random text is needed. /// /// # Examples /// /// ``` /// use lipsum::lipsum; /// /// assert_eq!(lipsum(7), "Lorem ipsum dolor sit amet, consectetur adipiscing."); /// ``` /// /// [`LOREM_IPSUM`]: constant.LOREM_IPSUM.html /// [`lipsum_words`]: fn.lipsum_words.html pub fn lipsum(n: usize) -> String { LOREM_IPSUM_CHAIN.with(|chain| chain.generate_from(n, ("Lorem", "ipsum"))) } /// Generate `n` words of lorem ipsum text with a custom RNG. The output will /// always start with "Lorem ipsum". /// /// A custom RNG allows to base the markov chain on a different random number /// sequence. This also allows using a regular [`thread_rng`] random number /// generator. If that generator is used, the text will differ in each /// invocation. /// /// # Examples /// /// ``` /// use lipsum::lipsum_with_rng; /// use rand::thread_rng; /// /// println!("{}", lipsum_with_rng(thread_rng(), 23)); /// // -> "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do /// // eiusmod tempor incididunt ut labore et dolore magnam aliquam /// // quaerat voluptatem. Ut enim." /// ``` /// /// [`thread_rng`]: https://docs.rs/rand/latest/rand/fn.thread_rng.html pub fn lipsum_with_rng(rng: impl Rng, n: usize) -> String { LOREM_IPSUM_CHAIN.with(|chain| chain.generate_with_rng_from(rng, n, ("Lorem", "ipsum"))) } /// Generate `n` words of lorem ipsum text. /// /// The text is deterministically sampled from a Markov chain based on /// [`LOREM_IPSUM`]. Multiple sentences may be generated, depending on the /// punctuation of the words being selected. /// /// # Examples /// /// ``` /// use lipsum::lipsum_words; /// /// assert_eq!(lipsum_words(6), "Ullus investigandi veri, nisi inveneris, et."); /// ``` /// /// [`LOREM_IPSUM`]: constant.LOREM_IPSUM.html pub fn lipsum_words(n: usize) -> String { LOREM_IPSUM_CHAIN.with(|chain| chain.generate(n)) } /// Generate `n` words of lorem ipsum text with a custom RNG. /// /// A custom RNG allows to base the markov chain on a different random number /// sequence. This also allows using a regular [`thread_rng`] random number /// generator. If that generator is used, the text will differ in each /// invocation. /// /// # Examples /// /// ``` /// use lipsum::lipsum_words_with_rng; /// use rand::thread_rng; /// /// println!("{}", lipsum_words_with_rng(thread_rng(), 7)); /// // -> "Quot homines, tot sententiae; falli igitur possumus." /// ``` /// /// [`thread_rng`]: https://docs.rs/rand/latest/rand/fn.thread_rng.html pub fn lipsum_words_with_rng(rng: impl Rng, n: usize) -> String { LOREM_IPSUM_CHAIN.with(|chain| chain.generate_with_rng(rng, n)) } /// Minimum number of words to include in a title. const TITLE_MIN_WORDS: usize = 3; /// Maximum number of words to include in a title. const TITLE_MAX_WORDS: usize = 8; /// Words shorter than this size are not capitalized. const TITLE_SMALL_WORD: usize = 3; /// Generate a short lorem ipsum text with words in title case. /// /// The words are capitalized and stripped for punctuation characters. /// /// # Examples /// /// ``` /// use lipsum::lipsum_title; /// /// println!("{}", lipsum_title()); /// ``` /// /// This will generate a string like /// /// > Grate Meminit et Praesentibus /// /// which should be suitable for use in a document title for section /// heading. pub fn lipsum_title() -> String { LOREM_IPSUM_CHAIN.with(|chain| { let n = default_rng().gen_range(TITLE_MIN_WORDS..TITLE_MAX_WORDS); // The average word length with our corpus is 7.6 bytes so // this capacity will avoid most allocations. let mut title = String::with_capacity(8 * n); let words = chain .iter() .map(|word| word.trim_matches(is_ascii_punctuation)) .filter(|word| !word.is_empty()) .take(n); for (i, word) in words.enumerate() { if i > 0 { title.push(' '); } // Capitalize the first word and all long words. if i == 0 || word.len() > TITLE_SMALL_WORD { title.push_str(&capitalize(word)); } else { title.push_str(word); } } title }) } #[cfg(test)] mod tests { use super::*; use rand::{thread_rng, SeedableRng}; use rand_chacha::ChaCha20Rng; #[test] fn starts_with_lorem_ipsum() { assert_eq!(&lipsum(10)[..11], "Lorem ipsum"); } #[test] fn generate_zero_words() { assert_eq!(lipsum(0).split_whitespace().count(), 0); } #[test] fn generate_one_word() { assert_eq!(lipsum(1).split_whitespace().count(), 1); } #[test] fn generate_two_words() { assert_eq!(lipsum(2).split_whitespace().count(), 2); } #[test] fn starts_differently() { // Check that calls to lipsum_words don't always start with // "Lorem ipsum". let idx = "Lorem ipsum".len(); assert_ne!( &lipsum_words_with_rng(thread_rng(), 5)[..idx], &lipsum_words_with_rng(thread_rng(), 5)[..idx] ); } #[test] fn generate_title() { for word in lipsum_title().split_whitespace() { assert!( !word.starts_with(is_ascii_punctuation) && !word.ends_with(is_ascii_punctuation), "Unexpected punctuation: {:?}", word ); if word.len() > TITLE_SMALL_WORD { assert!( word.starts_with(char::is_uppercase), "Expected small word to be capitalized: {:?}", word ); } } } #[test] fn capitalize_after_punctiation() { // The Markov Chain will yield a "habitut." as the second word. However, // the following "voluptatem" is not capitalized, which does not make // much sense, given that it appears after a full stop. The `join_words` // must ensure that every word appearing after sentence-ending // punctuation is capitalized. assert_eq!( lipsum_words_with_rng(ChaCha20Rng::seed_from_u64(5), 9), "Nullam habuit. Voluptatem cum summum bonum in voluptate est." ); } #[test] fn empty_chain() { let chain = MarkovChain::new(); assert_eq!(chain.generate(10), ""); } #[test] fn generate_from() { let mut chain = MarkovChain::new(); chain.learn("red orange yellow green blue indigo violet"); assert_eq!( chain.generate_from(5, ("orange", "yellow")), "Orange yellow green blue indigo." ); } #[test] fn generate_last_bigram() { // The bigram "yyy zzz" will not be present in the Markov // chain's map, and so we will not generate "xxx yyy zzz" as // one would expect. The chain moves from state "xxx yyy" to // "yyy zzz", but sees that as invalid state and resets itself // back to "xxx yyy". let mut chain = MarkovChain::new(); chain.learn("xxx yyy zzz"); assert_ne!(chain.generate_from(3, ("xxx", "yyy")), "xxx yyy zzz"); } #[test] fn generate_from_no_panic() { // No panic when asked to generate a chain from a starting // point that doesn't exist in the chain. let mut chain = MarkovChain::new(); chain.learn("foo bar baz"); chain.generate_from(3, ("xxx", "yyy")); } #[test] fn chain_map() { let mut chain = MarkovChain::new(); chain.learn("foo bar baz quuz"); let map = &chain.map; assert_eq!(map.len(), 2); assert_eq!(map[&("foo", "bar")], vec!["baz"]); assert_eq!(map[&("bar", "baz")], vec!["quuz"]); } #[test] fn new_with_rng() { let rng = ChaCha20Rng::seed_from_u64(1234); let mut chain = MarkovChain::new(); chain.learn("foo bar x y z"); chain.learn("foo bar a b c"); assert_eq!( chain.generate_with_rng(rng, 15), "A b bar a b a b bar a b x y b y x." ); } }
#[doc = "Reader of register CLK_OUTPUT_FAST"] pub type R = crate::R<u32, super::CLK_OUTPUT_FAST>; #[doc = "Writer for register CLK_OUTPUT_FAST"] pub type W = crate::W<u32, super::CLK_OUTPUT_FAST>; #[doc = "Register CLK_OUTPUT_FAST `reset()`'s with value 0"] impl crate::ResetValue for super::CLK_OUTPUT_FAST { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Select signal for fast clock output #0\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum FAST_SEL0_A { #[doc = "0: Disabled - output is 0. For power savings, clocks are blocked before entering any muxes, including PATH_SEL0 and HFCLK_SEL0."] NC, #[doc = "1: External Crystal Oscillator (ECO)"] ECO, #[doc = "2: External clock input (EXTCLK)"] EXTCLK, #[doc = "3: Alternate High-Frequency (ALTHF) clock input to SRSS"] ALTHF, #[doc = "4: Timer clock. It is grouped with the fast clocks because it may be a gated version of a fast clock, and therefore may have a short high pulse."] TIMERCLK, #[doc = "5: Selects the clock path chosen by PATH_SEL0 field"] PATH_SEL0, #[doc = "6: Selects the output of the HFCLK_SEL0 mux"] HFCLK_SEL0, #[doc = "7: Selects the output of CLK_OUTPUT_SLOW.SLOW_SEL0"] SLOW_SEL0, } impl From<FAST_SEL0_A> for u8 { #[inline(always)] fn from(variant: FAST_SEL0_A) -> Self { match variant { FAST_SEL0_A::NC => 0, FAST_SEL0_A::ECO => 1, FAST_SEL0_A::EXTCLK => 2, FAST_SEL0_A::ALTHF => 3, FAST_SEL0_A::TIMERCLK => 4, FAST_SEL0_A::PATH_SEL0 => 5, FAST_SEL0_A::HFCLK_SEL0 => 6, FAST_SEL0_A::SLOW_SEL0 => 7, } } } #[doc = "Reader of field `FAST_SEL0`"] pub type FAST_SEL0_R = crate::R<u8, FAST_SEL0_A>; impl FAST_SEL0_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> crate::Variant<u8, FAST_SEL0_A> { use crate::Variant::*; match self.bits { 0 => Val(FAST_SEL0_A::NC), 1 => Val(FAST_SEL0_A::ECO), 2 => Val(FAST_SEL0_A::EXTCLK), 3 => Val(FAST_SEL0_A::ALTHF), 4 => Val(FAST_SEL0_A::TIMERCLK), 5 => Val(FAST_SEL0_A::PATH_SEL0), 6 => Val(FAST_SEL0_A::HFCLK_SEL0), 7 => Val(FAST_SEL0_A::SLOW_SEL0), i => Res(i), } } #[doc = "Checks if the value of the field is `NC`"] #[inline(always)] pub fn is_nc(&self) -> bool { *self == FAST_SEL0_A::NC } #[doc = "Checks if the value of the field is `ECO`"] #[inline(always)] pub fn is_eco(&self) -> bool { *self == FAST_SEL0_A::ECO } #[doc = "Checks if the value of the field is `EXTCLK`"] #[inline(always)] pub fn is_extclk(&self) -> bool { *self == FAST_SEL0_A::EXTCLK } #[doc = "Checks if the value of the field is `ALTHF`"] #[inline(always)] pub fn is_althf(&self) -> bool { *self == FAST_SEL0_A::ALTHF } #[doc = "Checks if the value of the field is `TIMERCLK`"] #[inline(always)] pub fn is_timerclk(&self) -> bool { *self == FAST_SEL0_A::TIMERCLK } #[doc = "Checks if the value of the field is `PATH_SEL0`"] #[inline(always)] pub fn is_path_sel0(&self) -> bool { *self == FAST_SEL0_A::PATH_SEL0 } #[doc = "Checks if the value of the field is `HFCLK_SEL0`"] #[inline(always)] pub fn is_hfclk_sel0(&self) -> bool { *self == FAST_SEL0_A::HFCLK_SEL0 } #[doc = "Checks if the value of the field is `SLOW_SEL0`"] #[inline(always)] pub fn is_slow_sel0(&self) -> bool { *self == FAST_SEL0_A::SLOW_SEL0 } } #[doc = "Write proxy for field `FAST_SEL0`"] pub struct FAST_SEL0_W<'a> { w: &'a mut W, } impl<'a> FAST_SEL0_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: FAST_SEL0_A) -> &'a mut W { unsafe { self.bits(variant.into()) } } #[doc = "Disabled - output is 0. For power savings, clocks are blocked before entering any muxes, including PATH_SEL0 and HFCLK_SEL0."] #[inline(always)] pub fn nc(self) -> &'a mut W { self.variant(FAST_SEL0_A::NC) } #[doc = "External Crystal Oscillator (ECO)"] #[inline(always)] pub fn eco(self) -> &'a mut W { self.variant(FAST_SEL0_A::ECO) } #[doc = "External clock input (EXTCLK)"] #[inline(always)] pub fn extclk(self) -> &'a mut W { self.variant(FAST_SEL0_A::EXTCLK) } #[doc = "Alternate High-Frequency (ALTHF) clock input to SRSS"] #[inline(always)] pub fn althf(self) -> &'a mut W { self.variant(FAST_SEL0_A::ALTHF) } #[doc = "Timer clock. It is grouped with the fast clocks because it may be a gated version of a fast clock, and therefore may have a short high pulse."] #[inline(always)] pub fn timerclk(self) -> &'a mut W { self.variant(FAST_SEL0_A::TIMERCLK) } #[doc = "Selects the clock path chosen by PATH_SEL0 field"] #[inline(always)] pub fn path_sel0(self) -> &'a mut W { self.variant(FAST_SEL0_A::PATH_SEL0) } #[doc = "Selects the output of the HFCLK_SEL0 mux"] #[inline(always)] pub fn hfclk_sel0(self) -> &'a mut W { self.variant(FAST_SEL0_A::HFCLK_SEL0) } #[doc = "Selects the output of CLK_OUTPUT_SLOW.SLOW_SEL0"] #[inline(always)] pub fn slow_sel0(self) -> &'a mut W { self.variant(FAST_SEL0_A::SLOW_SEL0) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !0x0f) | ((value as u32) & 0x0f); self.w } } #[doc = "Reader of field `PATH_SEL0`"] pub type PATH_SEL0_R = crate::R<u8, u8>; #[doc = "Write proxy for field `PATH_SEL0`"] pub struct PATH_SEL0_W<'a> { w: &'a mut W, } impl<'a> PATH_SEL0_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x0f << 4)) | (((value as u32) & 0x0f) << 4); self.w } } #[doc = "Reader of field `HFCLK_SEL0`"] pub type HFCLK_SEL0_R = crate::R<u8, u8>; #[doc = "Write proxy for field `HFCLK_SEL0`"] pub struct HFCLK_SEL0_W<'a> { w: &'a mut W, } impl<'a> HFCLK_SEL0_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x0f << 8)) | (((value as u32) & 0x0f) << 8); self.w } } #[doc = "Select signal for fast clock output #1\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum FAST_SEL1_A { #[doc = "0: Disabled - output is 0. For power savings, clocks are blocked before entering any muxes, including PATH_SEL1 and HFCLK_SEL1."] NC, #[doc = "1: External Crystal Oscillator (ECO)"] ECO, #[doc = "2: External clock input (EXTCLK)"] EXTCLK, #[doc = "3: Alternate High-Frequency (ALTHF) clock input to SRSS"] ALTHF, #[doc = "4: Timer clock. It is grouped with the fast clocks because it may be a gated version of a fast clock, and therefore may have a short high pulse."] TIMERCLK, #[doc = "5: Selects the clock path chosen by PATH_SEL1 field"] PATH_SEL1, #[doc = "6: Selects the output of the HFCLK_SEL1 mux"] HFCLK_SEL1, #[doc = "7: Selects the output of CLK_OUTPUT_SLOW.SLOW_SEL1"] SLOW_SEL1, } impl From<FAST_SEL1_A> for u8 { #[inline(always)] fn from(variant: FAST_SEL1_A) -> Self { match variant { FAST_SEL1_A::NC => 0, FAST_SEL1_A::ECO => 1, FAST_SEL1_A::EXTCLK => 2, FAST_SEL1_A::ALTHF => 3, FAST_SEL1_A::TIMERCLK => 4, FAST_SEL1_A::PATH_SEL1 => 5, FAST_SEL1_A::HFCLK_SEL1 => 6, FAST_SEL1_A::SLOW_SEL1 => 7, } } } #[doc = "Reader of field `FAST_SEL1`"] pub type FAST_SEL1_R = crate::R<u8, FAST_SEL1_A>; impl FAST_SEL1_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> crate::Variant<u8, FAST_SEL1_A> { use crate::Variant::*; match self.bits { 0 => Val(FAST_SEL1_A::NC), 1 => Val(FAST_SEL1_A::ECO), 2 => Val(FAST_SEL1_A::EXTCLK), 3 => Val(FAST_SEL1_A::ALTHF), 4 => Val(FAST_SEL1_A::TIMERCLK), 5 => Val(FAST_SEL1_A::PATH_SEL1), 6 => Val(FAST_SEL1_A::HFCLK_SEL1), 7 => Val(FAST_SEL1_A::SLOW_SEL1), i => Res(i), } } #[doc = "Checks if the value of the field is `NC`"] #[inline(always)] pub fn is_nc(&self) -> bool { *self == FAST_SEL1_A::NC } #[doc = "Checks if the value of the field is `ECO`"] #[inline(always)] pub fn is_eco(&self) -> bool { *self == FAST_SEL1_A::ECO } #[doc = "Checks if the value of the field is `EXTCLK`"] #[inline(always)] pub fn is_extclk(&self) -> bool { *self == FAST_SEL1_A::EXTCLK } #[doc = "Checks if the value of the field is `ALTHF`"] #[inline(always)] pub fn is_althf(&self) -> bool { *self == FAST_SEL1_A::ALTHF } #[doc = "Checks if the value of the field is `TIMERCLK`"] #[inline(always)] pub fn is_timerclk(&self) -> bool { *self == FAST_SEL1_A::TIMERCLK } #[doc = "Checks if the value of the field is `PATH_SEL1`"] #[inline(always)] pub fn is_path_sel1(&self) -> bool { *self == FAST_SEL1_A::PATH_SEL1 } #[doc = "Checks if the value of the field is `HFCLK_SEL1`"] #[inline(always)] pub fn is_hfclk_sel1(&self) -> bool { *self == FAST_SEL1_A::HFCLK_SEL1 } #[doc = "Checks if the value of the field is `SLOW_SEL1`"] #[inline(always)] pub fn is_slow_sel1(&self) -> bool { *self == FAST_SEL1_A::SLOW_SEL1 } } #[doc = "Write proxy for field `FAST_SEL1`"] pub struct FAST_SEL1_W<'a> { w: &'a mut W, } impl<'a> FAST_SEL1_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: FAST_SEL1_A) -> &'a mut W { unsafe { self.bits(variant.into()) } } #[doc = "Disabled - output is 0. For power savings, clocks are blocked before entering any muxes, including PATH_SEL1 and HFCLK_SEL1."] #[inline(always)] pub fn nc(self) -> &'a mut W { self.variant(FAST_SEL1_A::NC) } #[doc = "External Crystal Oscillator (ECO)"] #[inline(always)] pub fn eco(self) -> &'a mut W { self.variant(FAST_SEL1_A::ECO) } #[doc = "External clock input (EXTCLK)"] #[inline(always)] pub fn extclk(self) -> &'a mut W { self.variant(FAST_SEL1_A::EXTCLK) } #[doc = "Alternate High-Frequency (ALTHF) clock input to SRSS"] #[inline(always)] pub fn althf(self) -> &'a mut W { self.variant(FAST_SEL1_A::ALTHF) } #[doc = "Timer clock. It is grouped with the fast clocks because it may be a gated version of a fast clock, and therefore may have a short high pulse."] #[inline(always)] pub fn timerclk(self) -> &'a mut W { self.variant(FAST_SEL1_A::TIMERCLK) } #[doc = "Selects the clock path chosen by PATH_SEL1 field"] #[inline(always)] pub fn path_sel1(self) -> &'a mut W { self.variant(FAST_SEL1_A::PATH_SEL1) } #[doc = "Selects the output of the HFCLK_SEL1 mux"] #[inline(always)] pub fn hfclk_sel1(self) -> &'a mut W { self.variant(FAST_SEL1_A::HFCLK_SEL1) } #[doc = "Selects the output of CLK_OUTPUT_SLOW.SLOW_SEL1"] #[inline(always)] pub fn slow_sel1(self) -> &'a mut W { self.variant(FAST_SEL1_A::SLOW_SEL1) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x0f << 16)) | (((value as u32) & 0x0f) << 16); self.w } } #[doc = "Reader of field `PATH_SEL1`"] pub type PATH_SEL1_R = crate::R<u8, u8>; #[doc = "Write proxy for field `PATH_SEL1`"] pub struct PATH_SEL1_W<'a> { w: &'a mut W, } impl<'a> PATH_SEL1_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x0f << 20)) | (((value as u32) & 0x0f) << 20); self.w } } #[doc = "Reader of field `HFCLK_SEL1`"] pub type HFCLK_SEL1_R = crate::R<u8, u8>; #[doc = "Write proxy for field `HFCLK_SEL1`"] pub struct HFCLK_SEL1_W<'a> { w: &'a mut W, } impl<'a> HFCLK_SEL1_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x0f << 24)) | (((value as u32) & 0x0f) << 24); self.w } } impl R { #[doc = "Bits 0:3 - Select signal for fast clock output #0"] #[inline(always)] pub fn fast_sel0(&self) -> FAST_SEL0_R { FAST_SEL0_R::new((self.bits & 0x0f) as u8) } #[doc = "Bits 4:7 - Selects a clock path to use in fast clock output #0 logic. For FLL path, it connects after the bypass mux. For PLL path(s), it connects after the CLK_PLL_DDFT mux. 0: FLL output 1-15: PLL output on path1-path15 (if available)"] #[inline(always)] pub fn path_sel0(&self) -> PATH_SEL0_R { PATH_SEL0_R::new(((self.bits >> 4) & 0x0f) as u8) } #[doc = "Bits 8:11 - Selects a HFCLK tree for use in fast clock output #0"] #[inline(always)] pub fn hfclk_sel0(&self) -> HFCLK_SEL0_R { HFCLK_SEL0_R::new(((self.bits >> 8) & 0x0f) as u8) } #[doc = "Bits 16:19 - Select signal for fast clock output #1"] #[inline(always)] pub fn fast_sel1(&self) -> FAST_SEL1_R { FAST_SEL1_R::new(((self.bits >> 16) & 0x0f) as u8) } #[doc = "Bits 20:23 - Selects a clock path to use in fast clock output #1 logic. For FLL path, it connects after the bypass mux. For PLL path(s), it connects after the CLK_PLL_DDFT mux. 1-15: PLL output on path1-path15 (if available)"] #[inline(always)] pub fn path_sel1(&self) -> PATH_SEL1_R { PATH_SEL1_R::new(((self.bits >> 20) & 0x0f) as u8) } #[doc = "Bits 24:27 - Selects a HFCLK tree for use in fast clock output #1 logic"] #[inline(always)] pub fn hfclk_sel1(&self) -> HFCLK_SEL1_R { HFCLK_SEL1_R::new(((self.bits >> 24) & 0x0f) as u8) } } impl W { #[doc = "Bits 0:3 - Select signal for fast clock output #0"] #[inline(always)] pub fn fast_sel0(&mut self) -> FAST_SEL0_W { FAST_SEL0_W { w: self } } #[doc = "Bits 4:7 - Selects a clock path to use in fast clock output #0 logic. For FLL path, it connects after the bypass mux. For PLL path(s), it connects after the CLK_PLL_DDFT mux. 0: FLL output 1-15: PLL output on path1-path15 (if available)"] #[inline(always)] pub fn path_sel0(&mut self) -> PATH_SEL0_W { PATH_SEL0_W { w: self } } #[doc = "Bits 8:11 - Selects a HFCLK tree for use in fast clock output #0"] #[inline(always)] pub fn hfclk_sel0(&mut self) -> HFCLK_SEL0_W { HFCLK_SEL0_W { w: self } } #[doc = "Bits 16:19 - Select signal for fast clock output #1"] #[inline(always)] pub fn fast_sel1(&mut self) -> FAST_SEL1_W { FAST_SEL1_W { w: self } } #[doc = "Bits 20:23 - Selects a clock path to use in fast clock output #1 logic. For FLL path, it connects after the bypass mux. For PLL path(s), it connects after the CLK_PLL_DDFT mux. 1-15: PLL output on path1-path15 (if available)"] #[inline(always)] pub fn path_sel1(&mut self) -> PATH_SEL1_W { PATH_SEL1_W { w: self } } #[doc = "Bits 24:27 - Selects a HFCLK tree for use in fast clock output #1 logic"] #[inline(always)] pub fn hfclk_sel1(&mut self) -> HFCLK_SEL1_W { HFCLK_SEL1_W { w: self } } }
use std::env; #[inline(never)] pub fn foo(next_in : u32, input: &[u8]) -> u64 { let v = 0 as u64; let w = v | (input[next_in as usize] as u64) << 56; w } fn main() { let args: Vec<String> = env::args().skip(1).collect(); let a = args.iter().map(|x| x.parse::<u8>().unwrap()).collect::<Vec<u8>>(); let x = foo(0, &a); println!("{:x}", x); }
use core::marker::PhantomData; use necsim_core::{ cogs::{ Backup, CoalescenceRngSample, CoalescenceSampler, DispersalSampler, EmigrationExit, EventSampler, Habitat, MinSpeciationTrackingEventSampler, RngCore, SpeciationProbability, SpeciationSample, TurnoverRate, }, event::{DispersalEvent, PackedEvent, SpeciationEvent}, landscape::IndexedLocation, lineage::GlobalLineageReference, simulation::partial::event_sampler::PartialSimulation, }; use necsim_core_bond::{NonNegativeF64, PositiveF64}; use crate::cogs::{ coalescence_sampler::independent::IndependentCoalescenceSampler, lineage_store::independent::IndependentLineageStore, }; #[allow(clippy::module_name_repetitions)] #[cfg_attr(feature = "cuda", derive(rust_cuda::common::RustToCuda))] #[cfg_attr(feature = "cuda", r2cBound(H: rust_cuda::common::RustToCuda))] #[cfg_attr(feature = "cuda", r2cBound(G: rust_cuda::common::RustToCuda))] #[cfg_attr(feature = "cuda", r2cBound(X: rust_cuda::common::RustToCuda))] #[cfg_attr(feature = "cuda", r2cBound(D: rust_cuda::common::RustToCuda))] #[cfg_attr(feature = "cuda", r2cBound(T: rust_cuda::common::RustToCuda))] #[cfg_attr(feature = "cuda", r2cBound(N: rust_cuda::common::RustToCuda))] #[derive(Debug)] pub struct IndependentEventSampler< H: Habitat, G: RngCore, X: EmigrationExit<H, G, GlobalLineageReference, IndependentLineageStore<H>>, D: DispersalSampler<H, G>, T: TurnoverRate<H>, N: SpeciationProbability<H>, > { min_spec_sample: Option<SpeciationSample>, marker: PhantomData<(H, G, X, D, T, N)>, } impl< H: Habitat, G: RngCore, X: EmigrationExit<H, G, GlobalLineageReference, IndependentLineageStore<H>>, D: DispersalSampler<H, G>, T: TurnoverRate<H>, N: SpeciationProbability<H>, > Default for IndependentEventSampler<H, G, X, D, T, N> { fn default() -> Self { Self { min_spec_sample: None, marker: PhantomData::<(H, G, X, D, T, N)>, } } } #[contract_trait] impl< H: Habitat, G: RngCore, X: EmigrationExit<H, G, GlobalLineageReference, IndependentLineageStore<H>>, D: DispersalSampler<H, G>, T: TurnoverRate<H>, N: SpeciationProbability<H>, > Backup for IndependentEventSampler<H, G, X, D, T, N> { unsafe fn backup_unchecked(&self) -> Self { Self { min_spec_sample: self.min_spec_sample.clone(), marker: PhantomData::<(H, G, X, D, T, N)>, } } } #[contract_trait] impl< H: Habitat, G: RngCore, X: EmigrationExit<H, G, GlobalLineageReference, IndependentLineageStore<H>>, D: DispersalSampler<H, G>, T: TurnoverRate<H>, N: SpeciationProbability<H>, > EventSampler< H, G, GlobalLineageReference, IndependentLineageStore<H>, X, D, IndependentCoalescenceSampler<H>, T, N, > for IndependentEventSampler<H, G, X, D, T, N> { #[must_use] #[allow(clippy::type_complexity, clippy::shadow_unrelated)] #[inline] fn sample_event_for_lineage_at_indexed_location_time_or_emigrate( &mut self, lineage_reference: GlobalLineageReference, indexed_location: IndexedLocation, prior_time: NonNegativeF64, event_time: PositiveF64, simulation: &mut PartialSimulation< H, G, GlobalLineageReference, IndependentLineageStore<H>, X, D, IndependentCoalescenceSampler<H>, T, N, >, rng: &mut G, ) -> Option<PackedEvent> { use necsim_core::cogs::RngSampler; let speciation_sample = rng.sample_uniform(); let min_speciation_sample = SpeciationSample::new(indexed_location.clone(), event_time, speciation_sample); match &self.min_spec_sample { Some(spec_sample) if spec_sample <= &min_speciation_sample => (), _ => self.min_spec_sample = Some(min_speciation_sample), } let dispersal_origin = indexed_location; if speciation_sample < simulation .speciation_probability .get_speciation_probability_at_location( dispersal_origin.location(), &simulation.habitat, ) { Some( SpeciationEvent { origin: dispersal_origin, prior_time, event_time, global_lineage_reference: lineage_reference, } .into(), ) } else { let dispersal_target = simulation.dispersal_sampler.sample_dispersal_from_location( dispersal_origin.location(), &simulation.habitat, rng, ); // Check for emigration and return None iff lineage emigrated let (lineage_reference, dispersal_origin, dispersal_target, prior_time, event_time) = simulation.with_mut_split_emigration_exit(|emigration_exit, simulation| { emigration_exit.optionally_emigrate( lineage_reference, dispersal_origin, dispersal_target, prior_time, event_time, simulation, rng, ) })?; let (dispersal_target, interaction) = simulation .coalescence_sampler .sample_interaction_at_location( dispersal_target, &simulation.habitat, &simulation.lineage_store, CoalescenceRngSample::new(rng), ); Some( DispersalEvent { origin: dispersal_origin, prior_time, event_time, global_lineage_reference: lineage_reference, target: dispersal_target, interaction, } .into(), ) } } } impl< H: Habitat, G: RngCore, X: EmigrationExit<H, G, GlobalLineageReference, IndependentLineageStore<H>>, D: DispersalSampler<H, G>, T: TurnoverRate<H>, N: SpeciationProbability<H>, > MinSpeciationTrackingEventSampler< H, G, GlobalLineageReference, IndependentLineageStore<H>, X, D, IndependentCoalescenceSampler<H>, T, N, > for IndependentEventSampler<H, G, X, D, T, N> { fn replace_min_speciation( &mut self, new: Option<SpeciationSample>, ) -> Option<SpeciationSample> { // `core::mem::replace()` would be semantically better // - but `clone()` does not spill to local memory let old_value = self.min_spec_sample.clone(); self.min_spec_sample = new; old_value } }
//!Handles the boot command line multiboot2 tag. ///Represents the boot command line tag. #[repr(C)] struct BootCommandLine { //type = 1 tag_type: u32, size: u32, string: [u8] }
use std::{error::Error, fmt, fs::{self, File}, str}; use fnv::FnvHashMap; use std::path::{Path, PathBuf}; use std::io::Read; #[derive(Debug)] struct MimeInfoDbError(String); impl fmt::Display for MimeInfoDbError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.0) } } impl Error for MimeInfoDbError {} #[derive(Debug, PartialEq, Eq)] pub enum Mime { Generic, WithExt(String), Unknown, } pub struct MimeInfoDb { db_root_path: Option<PathBuf>, mime_map: FnvHashMap<String, Mime>, } impl MimeInfoDb { pub fn new(db_root_path: &Path) -> Self { let path_info_result = fs::metadata(db_root_path); let db_root_opt = match path_info_result { Ok(path_info) => if path_info.is_dir() { Some(db_root_path) } else { eprintln!("Warning: ignoring db_root_path, it is not a directory: {}", db_root_path.display()); None } _ => { eprintln!("Warning: ignoring non-existing db_root_path {}", db_root_path.display()); None } }; Self{ db_root_path: db_root_opt.map(PathBuf::from), mime_map: FnvHashMap::default(), } } pub fn get(&mut self, mime: &str) -> &Mime { let Self { db_root_path, mime_map } = self; let entry = mime_map.entry(mime.to_owned()); entry.or_insert_with(|| { let mime_info = match db_root_path { Some(db_root) => Self::load_mime_info(db_root, mime), None => Mime::Unknown, }; if mime_info == Mime::Unknown { // eprintln!("using secondary extension db"); match mime_db::extensions(mime) { Some(exts) => if exts.len() > 0 { Mime::WithExt(exts[0].to_owned()) } else { Mime::Generic }, None => Mime::Unknown, } } else { mime_info } }) } pub fn set(&mut self, mime: &str, ext: &str) { self.mime_map.insert(mime.to_owned(), Mime::WithExt(ext.to_owned())); } fn load_mime_info(root_path: &Path, mime: &str) -> Mime { let mime_path = root_path.join(format!("{}.xml", mime)); // eprintln!("loading {} from {}", mime, mime_path.display()); let mime_info_file = File::open(mime_path); match mime_info_file { Ok(mut file) => Self::parse_mime_info(&mut file), Err(_) => Mime::Unknown, } } fn extract_glob(doc: &roxmltree::Document) -> Mime { match doc.descendants().find(|n| n.tag_name().name() == "glob") { Some(node) => match node.attribute("pattern") { Some(pattern) => Mime::WithExt(pattern.trim_start_matches("*.").to_owned()), None => Mime::Generic, }, None => Mime::Generic, } } fn parse_mime_info(f: &mut File) -> Mime { use roxmltree::Document; let mut xml_str = String::new(); if let Err(_) = f.read_to_string(&mut xml_str) { return Mime::Unknown; } match Document::parse(&xml_str) { Ok(doc) => Self::extract_glob(&doc), Err(e) => panic!("Error: {}", e), } } }
use anyhow::Error; use serde::de::DeserializeOwned; use yew::format::{Json, Nothing}; use yew::services::fetch::{FetchTask, Request, Response}; use yew::services::FetchService; use yew::{html, Component, ComponentLink, Html, Properties, ShouldRender}; use yew_router::components::RouterAnchor; use yew_router::router::Router as YewRouter; #[derive(Debug, Switch, Clone, Copy)] pub enum AppRoute { #[to = "/festivals"] Festivals, #[to = "/events"] Events, #[to = "/"] Home, } pub type Anchor = RouterAnchor<AppRoute>; pub type Router = YewRouter<AppRoute>; pub type JsonResponse<T> = Response<Json<Result<T, Error>>>; pub enum FetchMsg<T> { FetchStart, FetchSucceed(T), FetchFailed, } pub struct FetchModel<T: 'static + DeserializeOwned + Table> { link: ComponentLink<Self>, fetch_task: Option<FetchTask>, objects: Vec<T>, url: String, } #[derive(Properties, Clone)] pub struct FetchProps { pub url: String, } pub trait Table { fn title() -> &'static str; fn render_header() -> Html; fn render_row(&self) -> Html; } impl<T: 'static + DeserializeOwned + Table> Component for FetchModel<T> { type Message = FetchMsg<Vec<T>>; type Properties = FetchProps; fn create(props: Self::Properties, link: ComponentLink<Self>) -> Self { Self { link, fetch_task: None, objects: Vec::new(), url: props.url, } } fn update(&mut self, msg: Self::Message) -> ShouldRender { match msg { FetchMsg::FetchSucceed(objects) => { self.fetch_task = None; self.objects = objects; true } FetchMsg::FetchFailed => false, FetchMsg::FetchStart => { if self.fetch_task.is_none() { let request = Request::get(&*self.url).body(Nothing).unwrap(); let task = FetchService::fetch( request, self.link.callback(|response: JsonResponse<Vec<T>>| { match response.into_parts() { (_meta, Json(Ok(data))) => FetchMsg::FetchSucceed(data), (_meta, Json(Err(_error))) => FetchMsg::FetchFailed, } }), ); self.fetch_task = task.ok(); } false } } } fn change(&mut self, _props: Self::Properties) -> ShouldRender { false } fn view(&self) -> Html { html! { <div class="container"> <h1>{ <T as Table>::title() }</h1> <table class="table"> <thead> { <T as Table>::render_header() } </thead> <tbody> { for self.objects.iter().map(<T as Table>::render_row) } </tbody> </table> </div> } } fn rendered(&mut self, first_render: bool) { if first_render { self.link.send_message(FetchMsg::FetchStart); } } }
extern crate serde; extern crate bincode; extern crate gtk; pub mod manager; pub mod ui;
#![doc = "generated by AutoRust 0.1.0"] #[cfg(feature = "package-2020-03-30")] mod package_2020_03_30; #[cfg(feature = "package-2020-03-30")] pub use package_2020_03_30::{models, operations, API_VERSION}; #[cfg(feature = "package-2020-03")] mod package_2020_03; #[cfg(feature = "package-2020-03")] pub use package_2020_03::{models, operations, API_VERSION}; #[cfg(feature = "package-2019-09")] mod package_2019_09; #[cfg(feature = "package-2019-09")] pub use package_2019_09::{models, operations, API_VERSION}; #[cfg(feature = "package-2018-08-preview")] mod package_2018_08_preview; #[cfg(feature = "package-2018-08-preview")] pub use package_2018_08_preview::{models, operations, API_VERSION}; pub struct OperationConfig { pub api_version: String, pub client: reqwest::Client, pub base_path: String, pub token_credential: Option<Box<dyn azure_core::TokenCredential>>, pub token_credential_resource: String, } impl OperationConfig { pub fn new(token_credential: Box<dyn azure_core::TokenCredential>) -> Self { Self { token_credential: Some(token_credential), ..Default::default() } } } impl Default for OperationConfig { fn default() -> Self { Self { api_version: API_VERSION.to_owned(), client: reqwest::Client::new(), base_path: "https://management.azure.com".to_owned(), token_credential: None, token_credential_resource: "https://management.azure.com/".to_owned(), } } }
#[doc = r"Register block"] #[repr(C)] pub struct RegisterBlock { #[doc = "0x00 - Voltage regulator control and status"] pub vreg: VREG, #[doc = "0x04 - brown-out detection control"] pub bod: BOD, #[doc = "0x08 - Chip reset control and status"] pub chip_reset: CHIP_RESET, } #[doc = "Voltage regulator control and status\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [vreg](vreg) module"] pub type VREG = crate::Reg<u32, _VREG>; #[allow(missing_docs)] #[doc(hidden)] pub struct _VREG; #[doc = "`read()` method returns [vreg::R](vreg::R) reader structure"] impl crate::Readable for VREG {} #[doc = "`write(|w| ..)` method takes [vreg::W](vreg::W) writer structure"] impl crate::Writable for VREG {} #[doc = "Voltage regulator control and status"] pub mod vreg; #[doc = "brown-out detection control\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [bod](bod) module"] pub type BOD = crate::Reg<u32, _BOD>; #[allow(missing_docs)] #[doc(hidden)] pub struct _BOD; #[doc = "`read()` method returns [bod::R](bod::R) reader structure"] impl crate::Readable for BOD {} #[doc = "`write(|w| ..)` method takes [bod::W](bod::W) writer structure"] impl crate::Writable for BOD {} #[doc = "brown-out detection control"] pub mod bod; #[doc = "Chip reset control and status\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [chip_reset](chip_reset) module"] pub type CHIP_RESET = crate::Reg<u32, _CHIP_RESET>; #[allow(missing_docs)] #[doc(hidden)] pub struct _CHIP_RESET; #[doc = "`read()` method returns [chip_reset::R](chip_reset::R) reader structure"] impl crate::Readable for CHIP_RESET {} #[doc = "`write(|w| ..)` method takes [chip_reset::W](chip_reset::W) writer structure"] impl crate::Writable for CHIP_RESET {} #[doc = "Chip reset control and status"] pub mod chip_reset;
extern crate hyper; extern crate slack_api; use self::hyper::Client; use self::slack_api::search; pub fn all_messages<F>(token: &str, username: &str, mut handler: F) where F: FnMut(&str) { let client = Client::new(); let query = format!("from:@{}", username); let mut page = 0; loop { let result = search::messages( &client, &token, &query, None, None, None, Some(1000), Some(page) ); match result { Ok(search::MessagesResponse { messages, .. }) => { let paging = messages.paging; println!("Importing page {} of {}", paging.page, paging.pages); for message in messages.matches { handler(&message.text); } if page == paging.pages { break; } else { page = paging.page + 1; } } Err(e) => panic!(e) } } }
use super::{path_to_str, OpenBackend}; use crate::error; use crate::EditorOpts; use std::env; use std::path::PathBuf; use std::process::Command; pub struct Tmux; impl OpenBackend for Tmux { fn run(&mut self, mut path: PathBuf, name: &str, opts: EditorOpts) -> error::Result<()> { let self_path = env::current_exe()?; let watch_cmd = format!( "{} watch {}", path_to_str(&self_path, "cargo-playground")?, name ); #[rustfmt::skip] Command::new("tmux") .args(&[ "split-window", "-h", "-e", "HISTFILE=/dev/null", // prevent command from going into history "-c", path_to_str(&path, "playground")?, &watch_cmd, ";", "select-pane", "-L", ]) .output()?; let mut editor = Command::new(opts.editor); editor.current_dir(&path); path.clear(); // Now represents path to entrypoint (main.rs) path.push("src"); path.push("main.rs"); editor.args(opts.args).arg(&path).status()?; #[rustfmt::skip] Command::new("tmux").args(&[ "select-pane", "-R", ";", // Select the right pane "send-keys", "C-c", // and kill it ]).output()?; Ok(()) } }
// This file is part of Substrate. // Copyright (C) 2020 Parity Technologies (UK) Ltd. // SPDX-License-Identifier: Apache-2.0 // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! Fuzzing for staking pallet. //! //! HFUZZ_RUN_ARGS="-n 8" cargo hfuzz run submit_solution use honggfuzz::fuzz; use mock::Test; use pallet_staking::testing_utils::*; use frame_support::{assert_ok, storage::StorageValue, traits::UnfilteredDispatchable}; use frame_system::RawOrigin; use sp_runtime::DispatchError; use sp_core::offchain::{testing::TestOffchainExt, OffchainExt}; use pallet_staking::{EraElectionStatus, ElectionStatus, Module as Staking, Call as StakingCall}; mod mock; #[repr(u32)] #[allow(dead_code)] #[derive(Debug, Clone, Copy, PartialEq, Eq)] enum Mode { /// Initial submission. This will be rather cheap. InitialSubmission, /// A better submission that will replace the previous ones. This is the most expensive. StrongerSubmission, /// A weak submission that will be rejected. This will be rather cheap. WeakerSubmission, } pub fn new_test_ext(iterations: u32) -> sp_io::TestExternalities { let mut ext: sp_io::TestExternalities = frame_system::GenesisConfig::default() .build_storage::<mock::Test>() .map(Into::into) .expect("Failed to create test externalities."); let (offchain, offchain_state) = TestOffchainExt::new(); let mut seed = [0u8; 32]; seed[0..4].copy_from_slice(&iterations.to_le_bytes()); offchain_state.write().seed = seed; ext.register_extension(OffchainExt::new(offchain)); ext } fn main() { let to_range = |x: u32, a: u32, b: u32| { let collapsed = x % b; if collapsed >= a { collapsed } else { collapsed + a } }; loop { fuzz!(|data: (u32, u32, u32, u32, u32)| { let (mut num_validators, mut num_nominators, mut edge_per_voter, mut to_elect, mode_u32) = data; // always run with 5 iterations. let mut ext = new_test_ext(5); let mode: Mode = unsafe { std::mem::transmute(mode_u32) }; num_validators = to_range(num_validators, 50, 1000); num_nominators = to_range(num_nominators, 50, 2000); edge_per_voter = to_range(edge_per_voter, 1, 16); to_elect = to_range(to_elect, 20, num_validators); let do_reduce = true; println!("+++ instance with params {} / {} / {} / {} / {:?}({})", num_nominators, num_validators, edge_per_voter, to_elect, mode, mode_u32, ); ext.execute_with(|| { // initial setup init_active_era(); assert_ok!(create_validators_with_nominators_for_era::<Test>( num_validators, num_nominators, edge_per_voter as usize, true, None, )); <EraElectionStatus<Test>>::put(ElectionStatus::Open(1)); assert!(<Staking<Test>>::create_stakers_snapshot().0); let origin = RawOrigin::Signed(create_funded_user::<Test>("fuzzer", 0, 100)); // stuff to submit let (winners, compact, score, size) = match mode { Mode::InitialSubmission => { // No need to setup anything get_seq_phragmen_solution::<Test>(do_reduce) }, Mode::StrongerSubmission => { let (winners, compact, score, size) = get_weak_solution::<Test>(false); println!("Weak on chain score = {:?}", score); assert_ok!( <Staking<Test>>::submit_election_solution( origin.clone().into(), winners, compact, score, current_era::<Test>(), size, ) ); get_seq_phragmen_solution::<Test>(do_reduce) }, Mode::WeakerSubmission => { let (winners, compact, score, size) = get_seq_phragmen_solution::<Test>(do_reduce); println!("Strong on chain score = {:?}", score); assert_ok!( <Staking<Test>>::submit_election_solution( origin.clone().into(), winners, compact, score, current_era::<Test>(), size, ) ); get_weak_solution::<Test>(false) } }; // must have chosen correct number of winners. assert_eq!(winners.len() as u32, <Staking<Test>>::validator_count()); // final call and origin let call = StakingCall::<Test>::submit_election_solution( winners, compact, score, current_era::<Test>(), size, ); // actually submit match mode { Mode::WeakerSubmission => { assert_eq!( call.dispatch_bypass_filter(origin.into()).unwrap_err().error, DispatchError::Module { index: 0, error: 16, message: Some("OffchainElectionWeakSubmission"), }, ); }, // NOTE: so exhaustive pattern doesn't work here.. maybe some rust issue? // or due to `#[repr(u32)]`? Mode::InitialSubmission | Mode::StrongerSubmission => { assert_ok!(call.dispatch_bypass_filter(origin.into())); } }; }) }); } }
extern crate time; extern crate rayon; use std::io; use std::env; const OUTPUT_TIMES : bool = false; fn factorsums(sums: &mut [u64], start: u64) { // Either split in two or calculate current part of array if sums.len() > 50 { let split = sums.len()/2; let (left, right) = sums.split_at_mut(split); let split = split as u64; rayon::join(|| factorsums(left, start), || factorsums(right, start+split)); } else { for i in 0..sums.len() { sums[i] = 0; let cur_number = i as u64 + start; let limit = (cur_number as f64).sqrt() as u64 + 1; for factor in 1..limit { if cur_number % factor == 0 { sums[i] += factor; if factor*factor != cur_number { sums[i] += cur_number/factor; } } } } } } fn find_matching(sums: &[u64], mystart: u64, myend: u64, globalstart: u64, globalend: u64) -> Vec<(u64, u64)> { // Either split recursively or actually execute some work if myend-mystart > 50 { let split = (myend-mystart) / 2 + mystart; let (mut a, b) = rayon::join(|| find_matching(sums, mystart, split, globalstart, globalend), || find_matching(sums, split, myend, globalstart, globalend)); a.extend(b); return a; } else { let mut v = Vec::new(); for a in mystart..myend { for b in (a+1)..globalend { let ia = (a - globalstart) as usize; let ib = (b - globalstart) as usize; if sums[ia]*b == sums[ib]*a {v.push((a, b));} } } return v; } } fn find_mut_friendly(start: u64, end: u64) { let begin = time::now(); let mut sums : Vec<u64> = vec![0; (end-start) as usize]; factorsums(sums.as_mut_slice(), start); let sums = sums; let before_search = time::now(); if OUTPUT_TIMES {println!("{} - Start searching", before_search - begin);} let pairs = find_matching(sums.as_slice(), start, end, start, end); if OUTPUT_TIMES {println!("{} - Total", time::now() - before_search);} else { println!("Numbers {} to {}", start, end); for item in pairs { println!("{} and {} are FRIENDLY", item.0, item.1); } } } fn main() { let numthreads = if let Some(n) = env::args().nth(1) { n.parse::<usize>().unwrap_or(1) } else {1}; rayon::initialize(rayon::Configuration::new().set_num_threads(numthreads)).expect("Could not initialize rayon environment"); loop { let mut input = String::new(); io::stdin().read_line(&mut input).expect("Could not read stdin"); let input = input; let v : Vec<u64> = input.trim().split(' ').map(|x| x.parse::<u64>().unwrap() ).collect(); if v.len() < 2 { println!("Please enter at least two numbers!"); continue; } let start = v[0]; let end = v[1]+1; if end == 1 && start == 0 {break;} find_mut_friendly(start, end); } }
use chrono::{Datelike, Duration, Utc, Weekday, NaiveDate}; use serenity::builder::CreateEmbed; use serenity::framework::standard::CommandResult; use serenity::model::prelude::{ChannelId, Message, MessageId, ReactionType, UserId}; use serenity::prelude::Context; use std::ops::Add; use crate::reactions; use crate::types::{Pagination, PaginationContainer, PaginationInfo}; #[inline] pub async fn send_embed_message( context: &Context, channel_id: &ChannelId, embed: &CreateEmbed, reactions: Vec<ReactionType>, ) -> CommandResult<Message> { let sent = channel_id .send_message(&context, |m| { m.embed(|e| { e.clone_from(embed); e }) .reactions(reactions) }) .await?; Ok(sent) } pub async fn add_pagination_to_store<P>( context: &Context, pagination: P, message_id: MessageId, author_id: UserId, ) where P: Pagination + 'static, { let data = context.data.write().await; let container = data.get::<PaginationContainer>().unwrap(); let pagination_info = PaginationInfo::new(author_id, pagination); container.write().await.insert(message_id, pagination_info); } pub fn num_to_emoji(num: u32) -> String { match num { 0 => ":zero:", 1 => ":one:", 2 => ":two:", 3 => ":three:", 4 => ":four:", 5 => ":five:", 6 => ":six:", 7 => ":seven:", 8 => ":eight:", 9 => ":nine:", _ => unreachable!("Input should not be a number above 9."), } .to_string() } pub fn reaction_to_weekday(reaction: &str) -> Option<Weekday> { match reaction { reactions::ONE => Some(Weekday::Mon), reactions::TWO => Some(Weekday::Tue), reactions::THREE => Some(Weekday::Wed), reactions::FOUR => Some(Weekday::Thu), reactions::FIVE => Some(Weekday::Fri), reactions::SIX => Some(Weekday::Sat), reactions::SEVEN => Some(Weekday::Sun), _ => None, } } pub fn weekday_to_date(weekday: Weekday) -> NaiveDate { let mut date = Utc::now().date_naive(); while date.weekday() != weekday { date = date.add(Duration::days(1)); } date }
fn main() { let a = [-1, 0, 1]; let mut iter = a.iter().take_while(|x| **x < 0); // need two *s! assert_eq!(iter.next(), Some(&-1)); assert_eq!(iter.next(), None); let mut iter = a.iter(); assert_eq!(iter.next(), Some(&-1)); let mut iter = a.iter().map(|x| *x * 2); assert_eq!(iter.next(), Some(-2)); }
use serenity::framework::standard::{Args, CommandError}; use serenity::model::channel::Message; use serenity::prelude::Context; use super::alias_from_arg_or_channel_name; use crate::db::*; #[cfg(test)] mod tests; fn remove_server_helper(db_conn: &DbConnection, alias: &str) -> Result<(), CommandError> { db_conn.remove_server(&alias).map_err(CommandError::from)?; Ok(()) } pub fn remove_server( context: &mut Context, message: &Message, mut args: Args, ) -> Result<(), CommandError> { let alias = alias_from_arg_or_channel_name(&mut args, &message)?; if !args.is_empty() { return Err(CommandError::from( "Too many arguments. TIP: spaces in arguments need to be quoted \"like this\"", )); } let data = context.data.lock(); let db_conn = data.get::<DbConnectionKey>().ok_or("No DB connection")?; remove_server_helper(db_conn, &alias)?; let _ = message.reply(&format!("successfully removed server {}", alias)); Ok(()) }
use self::unify::TypeVarId; use crate::hir::{EnumDefId, HirData, StructDefId}; use derive_more::Display; mod infer; mod unify; pub use self::infer::{infer, InferenceId, InferenceResult, VarMode}; #[derive(Debug, Clone, PartialEq, Eq)] pub enum Type { Primitive(PrimitiveType), Fn(FnType), Struct(StructDefId), Enum(EnumDefId), Tuple(Vec<Self>), Infer(InferType), Unknown, } impl From<PrimitiveType> for Type { fn from(other: PrimitiveType) -> Self { Self::Primitive(other) } } impl From<FnType> for Type { fn from(other: FnType) -> Self { Self::Fn(other) } } impl From<InferType> for Type { fn from(other: InferType) -> Self { Self::Infer(other) } } impl Type { pub const fn is_integral(&self) -> bool { matches!( self, Type::Primitive(PrimitiveType::Int | PrimitiveType::Char | PrimitiveType::Bool) ) } /// Can comparisons (==, !=) be performed on this type? pub const fn is_eq(&self) -> bool { matches!(self, Type::Primitive(_) | Type::Infer(_) | Type::Unknown) } /// Can ordering (<, <=, >, >=) be performed on this type? pub const fn is_ord(&self) -> bool { matches!(self, Type::Primitive(_) | Type::Infer(_) | Type::Unknown) } /// Can airthmetic (+, -, *, /) be performed on this type? pub const fn is_num(&self) -> bool { matches!( self, Type::Primitive(PrimitiveType::Int | PrimitiveType::Float) | Type::Infer(_) | Type::Unknown ) } pub const fn is_int(&self) -> bool { matches!(self, Type::Primitive(PrimitiveType::Int)) } pub const fn is_float(&self) -> bool { matches!(self, Type::Primitive(PrimitiveType::Float)) } pub fn to_string(&self, hir: &HirData) -> String { match self { Type::Primitive(ty) => ty.to_string(), Type::Fn(ty) => ty.to_string(hir), Type::Struct(struct_id) => { let struct_def = &hir[*struct_id]; let name = &hir[struct_def.name]; name.as_str().into() } Type::Enum(enum_id) => { let enum_def = &hir[*enum_id]; let name = &hir[enum_def.name]; name.as_str().into() } Type::Tuple(tys) => { let inner = tys .iter() .map(|ty| ty.to_string(hir)) .collect::<Vec<_>>() .join(", "); if tys.len() == 1 { format!("({inner},)") } else { format!("({inner})") } } Type::Infer(_) | Type::Unknown => "{unknown}".into(), } } } #[derive(Debug, Clone, PartialEq, Eq)] pub struct FnType { pub params: Vec<Type>, pub ret: Box<Type>, } impl FnType { pub fn new(params: &[Type], ret: Type) -> Self { Self { params: params.to_vec(), ret: Box::new(ret), } } pub fn to_string(&self, hir: &HirData) -> String { let params = self .params .iter() .map(|ty| ty.to_string(hir)) .collect::<Vec<_>>() .join(", "); let ret = self.ret.to_string(hir); format!("({params}) -> {ret}") } } #[derive(Debug, Display, Copy, Clone, PartialEq, Eq)] pub enum PrimitiveType { Bool, Int, Float, Char, String, Never, } impl Type { pub const UNIT: Self = Self::Tuple(vec![]); pub const BOOL: Self = Self::Primitive(PrimitiveType::Bool); pub const INT: Self = Self::Primitive(PrimitiveType::Int); pub const FLOAT: Self = Self::Primitive(PrimitiveType::Float); pub const CHAR: Self = Self::Primitive(PrimitiveType::Char); pub const STRING: Self = Self::Primitive(PrimitiveType::String); pub const NEVER: Self = Self::Primitive(PrimitiveType::Never); pub fn function(params: Vec<Self>, ret: Self) -> Self { Self::Fn(FnType { params, ret: box ret, }) } pub const fn as_fn(&self) -> Option<&FnType> { match self { Self::Fn(func) => Some(func), _ => None, } } pub fn as_tuple(&self) -> Option<&[Self]> { match self { Self::Tuple(tys) => Some(tys), _ => None, } } pub const fn as_struct(&self) -> Option<StructDefId> { match self { Self::Struct(id) => Some(*id), _ => None, } } pub const fn as_enum(&self) -> Option<EnumDefId> { match self { Self::Enum(id) => Some(*id), _ => None, } } fn walk_mut(&mut self, f: &mut impl FnMut(&mut Self)) { f(self); match self { Self::Tuple(tys) => { for ty in tys { ty.walk_mut(f) } } Self::Fn(FnType { params, ret }) => { for ty in params { ty.walk_mut(f) } ret.walk_mut(f) } _ => {} } } } impl Type { fn fold(mut self, f: &mut impl FnMut(Self) -> Self) -> Self { self.walk_mut(&mut |ty_mut| { let ty = std::mem::replace(ty_mut, Self::Unknown); *ty_mut = f(ty); }); self } pub const fn is_stack(&self) -> bool { !matches!(self, Self::Enum(_) | Self::Struct(_)) } } #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum InferType { Var(TypeVarId), } impl InferType { const fn fallback_value(self) -> Type { match self { Self::Var(_) => Type::Unknown, } } const fn to_inner(self) -> TypeVarId { match self { Self::Var(ty) => ty, } } } #[cfg(test)] mod tests { use super::*; use insta::*; macro_rules! test_infer { ($name:ident, $src:expr, $expected:expr) => { #[test] fn $name() { test_infer($src, &$expected) } }; } fn test_infer(src: &str, expected: &Type) { let syntax = walrus_parser::parse(src); let hir = crate::hir::lower(&syntax); let scopes = crate::scopes::scopes(&hir); let types = infer(hir.clone(), scopes); let first_fn = &hir.hir.fn_defs.iter().next().unwrap(); let ret_type = &types.type_of_fn[first_fn.0].ret; assert_eq!(ret_type.as_ref(), expected); let mut settings = insta::Settings::new(); settings.set_snapshot_path("../snapshots/infer"); settings.set_prepend_module_to_snapshot(false); settings.bind(|| assert_debug_snapshot!(types)); } test_infer!(empty_fn, r#"fn f() {}"#, Type::UNIT); test_infer!(infer_ret_type, r#"fn f() -> _ {}"#, Type::UNIT); test_infer!(bool_lit, r#"fn f() -> _ {true}"#, Type::BOOL); test_infer!(int_lit, r#"fn f() -> _ {1}"#, Type::INT); test_infer!(float_lit, r#"fn f() -> _ {1.0}"#, Type::FLOAT); test_infer!(char_lit, r#"fn f() -> _ {'a'}"#, Type::CHAR); test_infer!(unit_stmt, r#"fn f() -> _ {1;}"#, Type::UNIT); test_infer!(let_var, r#"fn f() -> _ {let x = 5; x}"#, Type::INT); test_infer!( fn_var, r#" fn f() -> _ {g} fn g() -> _ {1} "#, Type::function(vec![], Type::INT) ); test_infer!( if_then_else, r#"fn f() -> _ {if true {1} else {0}}"#, Type::INT ); test_infer!( tuple, r#"fn f() -> _ {(1,false)}"#, Type::Tuple(vec![Type::INT, Type::BOOL]) ); test_infer!( tuple_destructure, r#"fn f() -> _ {let (x, y) = (1, false); (y, x)}"#, Type::Tuple(vec![Type::BOOL, Type::INT]) ); test_infer!( tuple_field, r#"fn f() -> _ {let x = (1, false); (x.1, x.0)}"#, Type::Tuple(vec![Type::BOOL, Type::INT]) ); test_infer!( struct_constructor, r#" struct Foo { x: Int, y: Bool, } fn f() -> _ { Foo { x: 0, y: false } } "#, Type::Struct(StructDefId::new(0)) ); test_infer!( enum_constructor, r#" enum Foo { X { x: Int }, Y { y: Bool }, } fn f() -> _ { Foo::X { x: 0 } } "#, Type::Enum(EnumDefId::new(0)) ); test_infer!( struct_destructure, r#"fn f() -> _ {let (x, y) = (1, false); (y, x)}"#, Type::Tuple(vec![Type::BOOL, Type::INT]) ); test_infer!( struct_field, r#" struct Foo { x: Int, y: Bool, } fn f() -> _ { let foo = Foo { x: 0, y: false }; (foo.x, foo.y) } "#, Type::Tuple(vec![Type::INT, Type::BOOL]) ); test_infer!( lambda, r#"fn f() -> _ { (x: Int) => false }"#, Type::function(vec![Type::INT], Type::BOOL) ); test_infer!( lambda2, r#"fn f() -> _ { (x) => x + 1 }"#, Type::function(vec![Type::INT], Type::INT) ); test_infer!( lambda_call, r#" fn f() -> _ { let id = (x) => x; id(1) }"#, Type::INT ); test_infer!(unary_sub, r#"fn f() -> _ {-0}"#, Type::INT); test_infer!(unary_add, r#"fn f() -> _ {+0}"#, Type::INT); test_infer!(cmp, r#"fn f() -> _ {0 == 1}"#, Type::BOOL); test_infer!(loop_never, r#"fn f() -> Never { loop {} }"#, Type::NEVER); test_infer!(loop_unit, r#"fn f() -> _ { loop { break } }"#, Type::UNIT); test_infer!(loop_int, r#"fn f() -> _ { loop { break 1 } }"#, Type::INT); test_infer!(return_unit, r#"fn f() -> _ { return }"#, Type::UNIT); test_infer!(return_int, r#"fn f() -> _ { return 1 }"#, Type::INT); test_infer!( lambda_return_unit, r#"fn f() -> _ { () => return }"#, Type::function(vec![], Type::UNIT) ); test_infer!( lambda_return_int, r#"fn f() -> _ { () => return 1 }"#, Type::function(vec![], Type::INT) ); test_infer!( fn_params, r#" fn f() -> _ { g } fn g(_: Int) -> _ {} "#, Type::function(vec![Type::INT], Type::UNIT) ); test_infer!( factorial, r#" fn f() -> _ { factorial } fn factorial(x: _) -> _ { if x == 0 { 1 } else { x * factorial(x-1) } } "#, Type::function(vec![Type::INT], Type::INT) ); test_infer!( add, r#" fn f() -> _ { add } fn add(x: _, y: _) -> _ { x + y + 1 } "#, Type::function(vec![Type::INT, Type::INT], Type::INT) ); test_infer!( parity, r#" fn f() -> _ { (is_odd, is_even) } fn is_odd(x: _) -> _ { if x == 0 { false } else { is_even(x - 1) } } fn is_even(x: _) -> _ { if x == 0 { true } else { is_odd(x - 1) } } "#, Type::Tuple(vec![ Type::function(vec![Type::INT], Type::BOOL), Type::function(vec![Type::INT], Type::BOOL), ]) ); test_infer!( match_expr, r#" fn f() -> _ { match 5 { x => x, } }"#, Type::INT ); test_infer!( coerce_if_branches_then, r#" fn f() -> _ { if true {loop{}} else {5} } "#, Type::INT ); test_infer!( coerce_if_branches_else, r#" fn f() -> _ { if true {5} else {loop{}} } "#, Type::INT ); test_infer!( coerce_match_branches1, r#" fn f() -> _ { match 5 { foo => 5, bar => loop {}, } } "#, Type::INT ); test_infer!( coerce_match_branches2, r#" fn f() -> _ { match 5 { foo => loop {}, bar => 5, } } "#, Type::INT ); test_infer!( struct_pat, r#" struct Foo {x: Int, y: Int} fn main() -> _ { let Foo {y,x} = Foo{x: 5, y: 10}; x } "#, Type::INT ); test_infer!( named_struct_pat, r#" struct Foo {x: Int, y: Int} fn main() -> _ { let Foo {y,x: z} = Foo{x: 5, y: 10}; z } "#, Type::INT ); }
//< Helpers that are useful for tests and doctests. use super::rsrc::*; use super::loader; use super::arch::Arch; use super::loaders::sc::ShellcodeLoader; use super::workspace::Workspace; /// Helper to construct a 32-bit Windows shellcode workspace from raw bytes. /// /// It may panic when the workspace cannot be created/loaded. /// Therefore, this is best used for tests. /// /// ``` /// use lancelot::test; /// use lancelot::arch::*; /// /// let ws = test::get_shellcode32_workspace(b"\xEB\xFE"); /// assert_eq!(ws.read_u8(RVA(0x0)).unwrap(), 0xEB); /// ``` pub fn get_shellcode32_workspace(buf: &[u8]) -> Workspace { Workspace::from_bytes("foo.bin", buf) .with_loader(Box::new(ShellcodeLoader::new( loader::Platform::Windows, Arch::X32 ))) .load() .unwrap() } pub fn get_shellcode64_workspace(buf: &[u8]) -> Workspace { Workspace::from_bytes("foo.bin", buf) .with_loader(Box::new(ShellcodeLoader::new( loader::Platform::Windows, Arch::X64 ))) .load() .unwrap() } pub fn get_rsrc_workspace(rsrc: Rsrc) -> Workspace { Workspace::from_bytes("foo.bin", &get_buf(rsrc)) .load() .unwrap() } /// configure a global logger at level==DEBUG. pub fn init_logging() { let log_level = log::LevelFilter::Debug; fern::Dispatch::new() .format(move |out, message, record| { out.finish(format_args!( "{} [{:5}] {} {}", chrono::Local::now().format("%Y-%m-%d %H:%M:%S"), record.level(), if log_level == log::LevelFilter::Trace {record.target()} else {""}, message )) }) .level(log_level) .chain(std::io::stderr()) .filter(|metadata| { !metadata.target().starts_with("goblin::pe") }) .apply() .expect("failed to configure logging"); }
extern crate actix_web; #[macro_use] extern crate rust_embed; extern crate mime_guess; use std::{env, io}; use actix_web::body::Body; use actix_web::{web, App, HttpRequest, HttpResponse, HttpServer}; use mime_guess::from_path; use std::borrow::Cow; #[derive(RustEmbed)] #[folder = "face/public/"] struct Asset; fn handle_embedded_file(path: &str) -> HttpResponse { match Asset::get(path) { Some(content) => { let body: Body = match content { Cow::Borrowed(bytes) => bytes.into(), Cow::Owned(bytes) => bytes.into(), }; HttpResponse::Ok().content_type(from_path(path).first_or_octet_stream().as_ref()).body(body) } None => HttpResponse::NotFound().body("404 Not Found"), } } async fn index(_req: HttpRequest) -> HttpResponse { handle_embedded_file("index.html") } async fn dist(req: HttpRequest) -> HttpResponse { let path = &req.path()["/".len()..]; // trim the preceding `/dist/` in path handle_embedded_file(path) } #[actix_rt::main] async fn main() -> io::Result<()> { /* if open::that("http://localhost:8000").is_ok() { println!("Look at your browser !"); }; */ env::set_var("RUST_LOG", "actix_web=debug,actix_server=info"); env_logger::init(); HttpServer::new(|| { App::new() .service(web::resource("/").route(web::get().to(index))) .service(web::resource("/{_:.*}").route(web::get().to(dist))) }) .bind("0.0.0.0:8000") .unwrap() .run() .await }
//! "lw" log-watcher utility //! LW docs #![forbid(unsafe_code)] #![deny( missing_docs, unstable_features, missing_debug_implementations, missing_copy_implementations, trivial_casts, trivial_numeric_casts, unused_import_braces, unused_qualifications, bad_style, const_err, dead_code, improper_ctypes, non_shorthand_field_patterns, no_mangle_generic_items, overflowing_literals, path_statements, patterns_in_fns_without_body, private_in_public, unconditional_recursion, unused, unused_allocation, unused_comparisons, unused_parens, while_true, missing_debug_implementations, missing_docs, trivial_casts, trivial_numeric_casts, unused_extern_crates, unused_import_braces, unused_qualifications )] /// Use MiMalloc as default allocator: #[global_allocator] static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc; #[macro_use] extern crate log; use config::Config; use kqueue2::{Ident::*, *}; use std::{ collections::HashMap, env, fs::{metadata, File}, io::{prelude::*, BufReader, SeekFrom}, path::Path, process::exit, thread, }; use chrono::Local; use colored::Colorize; use fern::Dispatch; use std::time::Duration; use walkdir::WalkDir; mod config; /// FileAndPosition alias type for HashMap of File path and file cursor position (in bytes) type FileAndPosition = HashMap<String, u64>; /// Resursively filter out all unreadable/unaccessible/inproper and handle proper files fn walkdir_recursive(kqueue_watcher: &mut Watcher, file_path: &Path, config: &Config) { WalkDir::new(&file_path) .same_file_system(false) .contents_first(true) .follow_links(config.follow_links.unwrap_or_default()) .max_open(config.max_open_files.unwrap_or_default()) .max_depth(config.max_dir_depth.unwrap_or_default()) .into_iter() .filter_map(|element| element.ok()) .for_each(|element| watch_file(kqueue_watcher, element.path())); } fn main() { let config = Config::load(); let log_level = config.get_log_level(); let output = config.output.clone().unwrap_or_default(); // read paths given as arguments: let paths_to_watch: Vec<String> = env::args() .skip(1) // first arg is $0 .collect(); // mutable hashmap keeping position of all watched files: let mut watched_file_states = FileAndPosition::new(); // mutable kqueue watcher: let mut kqueue_watcher = Watcher::new().expect("Could not create kq watcher!"); // name of the last logged file: let mut last_file = String::new(); // Dispatch logger: Dispatch::new() .format(|out, message, _record| { out.finish(format_args!( "{}: {}", Local::now().to_rfc3339().black(), message )) }) .level(log_level) .chain(File::open(output.clone()).unwrap_or_else(|_| { panic!("{}: Couldn't open: {}!", "FATAL ERROR".red(), output.cyan()) })) .apply() .expect("Couldn't initialize Fern logger!"); debug!("Watching paths: {}", paths_to_watch.join(", ")); if paths_to_watch.is_empty() { error!("FATAL ERROR: {}", "No paths specified as arguments! You have to specify at least a single directory/file to watch!".red()); exit(1) } // initial watches for specified dirs/files: paths_to_watch.into_iter().for_each(|a_path| { // Handle case when given a file as argument walkdir_recursive(&mut kqueue_watcher, Path::new(&a_path), &config); }); // handle events dynamically, including new files loop { watch_the_watcher(&mut kqueue_watcher); while let Some(an_event) = kqueue_watcher.iter().next() { debug!("Watched files: {}", watched_file_states.len()); match an_event.ident { Filename(_file_descriptor, abs_file_name) => { process_file_event( &abs_file_name, &mut kqueue_watcher, &mut watched_file_states, &mut last_file, &config, ); // handle_config_changes(&mut log_level); watch_the_watcher(&mut kqueue_watcher); } event => warn!("Unknown event: {}", format!("{:?}", event).cyan()), } } // throttle 100ms thread::sleep(Duration::from_millis(100)); } } // /// Hot reload configuration // fn _handle_config_changes(log_level: &mut LevelFilter) { // let level = Config::load().get_log_level(); // if level != *log_level { // info!("Changing log level to: {}", format!("{:?}", level).cyan()); // *log_level = level // } // } /// Process file with event fn process_file_event( abs_file_name: &str, kqueue_watcher: &mut Watcher, watched_file_states: &mut FileAndPosition, last_file: &mut String, config: &Config, ) { let file_path = Path::new(&abs_file_name); match metadata(file_path) { Ok(file_metadata) => { if file_metadata.is_dir() { trace!("{}: {}", "+DirLoad".magenta(), abs_file_name.cyan()); walkdir_recursive(kqueue_watcher, file_path, config); } else { trace!("{}: {}", "+FileWatchHandle".magenta(), abs_file_name.cyan()); calculate_position_and_handle( file_metadata.len(), watched_file_states, abs_file_name, last_file, config, ); } } Err(error_cause) => { // handle situation when logs are wiped out and unavailable to read anymore kqueue_watcher .remove_filename(file_path, EventFilter::EVFILT_VNODE) .map(|e| { trace!("{}: {}", "-Watch".magenta(), abs_file_name.cyan()); e }) .unwrap_or_else(|error| { error!( "Could not remove watch on file: {:?}. Error cause: {}", abs_file_name.cyan(), error.to_string().red() ) }); // try to build list if path exists if file_path.exists() { if file_path.is_dir() { trace!("{}: {}", "+DirLoad".magenta(), abs_file_name.cyan()); walkdir_recursive(kqueue_watcher, file_path, config); } else if file_path.is_file() { watch_file(kqueue_watcher, file_path); } } else { debug!( "Dropped watch on file/dir: {}. Last value: {}. Error cause: {}", format!("{:?}", &file_path).cyan(), format!( "{}", watched_file_states .remove(abs_file_name) .unwrap_or_default() ) .cyan(), format!("{}", &error_cause).red() ); } } }; debug!( "Watched files list: [{}]", format!("{:?}", watched_file_states).cyan() ); } /// Process file position and handle the event fn calculate_position_and_handle( file_size: u64, watched_file_states: &mut FileAndPosition, abs_file_name: &str, last_file: &mut String, config: &Config, ) { let tail_bytes = config.tail_bytes.unwrap_or_default(); let initial_file_position = if file_size + 1 > tail_bytes && !watched_file_states.contains_key(abs_file_name) { file_size - tail_bytes } else { *watched_file_states.get(abs_file_name).unwrap_or(&0) }; if watched_file_states.contains_key(abs_file_name) { let current_position = *watched_file_states .get(abs_file_name) .unwrap_or(&initial_file_position); handle_file_event(current_position, file_size, abs_file_name, last_file); let _removed = watched_file_states .remove(abs_file_name) .unwrap_or_default(); watched_file_states.insert(abs_file_name.to_string(), file_size); } else { watched_file_states.insert(abs_file_name.to_string(), initial_file_position); handle_file_event(initial_file_position, file_size, abs_file_name, last_file); } } /// Kqueue wrapper for watch() fn watch_the_watcher(kqueue_watcher: &mut Watcher) { trace!("{}: watch()", "+Trigger".magenta()); kqueue_watcher.watch().unwrap_or_default(); } /// kqueue flags, from: /usr/include/sys/event.h /// NOTE_DELETE 0x00000001 /* vnode was removed */ /// NOTE_WRITE 0x00000002 /* data contents changed */ /// NOTE_EXTEND 0x00000004 /* size increased */ /// NOTE_ATTRIB 0x00000008 /* attributes changed */ /// NOTE_LINK 0x00000010 /* link count changed */ /// NOTE_RENAME 0x00000020 /* vnode was renamed */ /// NOTE_REVOKE 0x00000040 /* vnode access was revoked */ /// /// Add watch on specified file path fn watch_file(kqueue_watcher: &mut Watcher, file: &Path) { kqueue_watcher .remove_filename(file, EventFilter::EVFILT_VNODE) .map(|e| { trace!("{}: {}", "-Watch".magenta(), format!("{:?}", file).cyan()); e }) .unwrap_or_default(); kqueue_watcher .add_filename( &file, EventFilter::EVFILT_VNODE, NOTE_WRITE | NOTE_LINK | NOTE_RENAME | NOTE_DELETE | NOTE_EXTEND // | NOTE_ATTRIB // | NOTE_REVOKE, ) .map(|e| { trace!("{}: {}", "+Watch".magenta(), format!("{:?}", file).cyan()); e }) .unwrap_or_else(|error_cause| { error!( "Could not watch file: {}. Caused by: {}", format!("{:?}", file).cyan(), error_cause.to_string().red() ) }); } /// Handle action triggered by an event fn handle_file_event( file_position: u64, file_size: u64, file_path: &str, last_file: &mut String, ) { let watched_file = file_path.to_string(); { debug!( "Watched file position: {}, file size: {}, file name: {}", format!("{}", file_position).cyan(), format!("{}", file_size).cyan(), watched_file.cyan() ); trace!( "{}: {} {}", "+EventHandle".magenta(), watched_file.cyan(), format!("@{}", file_position).black() ); // print header only when file is at beginning and not often than N bytes after previous one (limits header spam) if file_position == 0 || *last_file != watched_file { println!(); println!(); // just start new entry after \n\n info!( "{} {}", watched_file.blue(), format!("@{}", file_position).black() ); } // print content of file that triggered the event if file_position < file_size { let content = seek_file_to_position_and_read(&watched_file, file_position); println!("{}", content.join("\n")); } } *last_file = watched_file; } /// Set file position in bytes and print new file contents fn seek_file_to_position_and_read(file_to_watch: &str, file_position: u64) -> Vec<String> { match File::open(&file_to_watch) { Ok(some_file) => { let mut cursor = BufReader::new(some_file); cursor.seek(SeekFrom::Start(file_position)).unwrap_or(0); let lines_out: Vec<_> = cursor.lines().filter_map(|line| line.ok()).collect(); trace!("Lines out: '{}'", format!("{:?}", lines_out).cyan()); if lines_out.is_empty() { vec![String::from("* binary file modification *")] } else { lines_out } } Err(error_cause) => { error!( "Couldn't open file: {}. Caused by: {}", file_to_watch.cyan(), error_cause.to_string().red() ); vec![] } } }
use http::{HeaderMap, HeaderValue}; use hyper::{Body, Response, StatusCode, Uri}; use std::collections::BTreeMap; pub async fn body_to_str(body: &mut Body) -> Option<String> { match body.next().await { Some(Ok(chunk)) => { let bytes = chunk.into_bytes(); let json_body = String::from_utf8(bytes.as_ref().to_vec()); match json_body { Ok(json) => Some(json), Err(e) => { eprintln!("body_to_str failed: {}", e); None } } } _ => None, } } pub fn ok() -> Result<Response<Body>, hyper::Error> { Ok(Response::builder() .status(StatusCode::OK) .body(Body::empty()) .unwrap()) } pub fn bad_request() -> Result<Response<Body>, hyper::Error> { Ok(Response::builder() .status(StatusCode::BAD_REQUEST) .body(Body::empty()) .unwrap()) } pub fn not_found() -> Result<Response<Body>, hyper::Error> { Ok(Response::builder() .status(StatusCode::NOT_FOUND) .body(Body::empty()) .unwrap()) } pub fn decode_query_params(uri: &Uri) -> BTreeMap<String, String> { match uri.query() { Some(query) => { let mut m = BTreeMap::new(); for (k, v) in query .split("&") .filter(|s| s.trim().len() > 0) .map(|s| split_str_to_pair(s, "=")) { m.insert(k, v); } m } None => BTreeMap::new(), } } fn split_str_to_pair(s: &str, splitter: &str) -> (String, String) { let vec = s.splitn(2, splitter).collect::<Vec<&str>>(); let left = vec.get(0).map(|s| s.to_owned()).unwrap().to_owned(); let right = vec.get(1).map(|s| s.to_owned()).unwrap_or("").to_owned(); (left, right) } pub fn decode_headers(header_map: &HeaderMap<HeaderValue>) -> BTreeMap<String, Option<String>> { let mut headers: BTreeMap<String, Option<String>> = BTreeMap::new(); for (name, value) in header_map.iter() { headers.insert( name.as_str().to_owned(), value.to_str().ok().map(str::to_owned), ); } headers }
/// Iterator adapters for working with Result<> iterators pub mod optfilter; /// Common interfaces and formatting rules accross repository objects pub mod repoobject;
mod commands; mod config; mod utils; #[macro_use] extern crate serde; #[macro_use] extern crate failure; use exitfailure::ExitFailure; use std::path::PathBuf; use structopt::StructOpt; /// Manages a list of projects throughout your file system #[derive(StructOpt)] #[structopt(name = "projects-cli")] struct App { #[structopt(subcommand)] cmd: Command, } #[derive(StructOpt)] enum Command { /// Track projects in the given directory #[structopt(name = "track")] Track { #[structopt(parse(from_os_str), default_value = ".")] path: PathBuf, }, /// Stops tracking projects in the given directory #[structopt(name = "remove")] Remove { #[structopt(parse(from_os_str))] /// The path to stop tracking. If no path is passed, we'll display a list or track directories to select from. path: Option<PathBuf>, }, /// Displays the current list of tracked directories #[structopt(name = "list")] List { #[structopt(long = "paths")] paths: bool, }, /// Displays searchable menu of all projects. Will return the selected project's path #[structopt(name = "select")] Select, /// Prints a shell script that can be used to enable jumping to project directories #[structopt(name = "init")] Init { /// What shell are you initializing in? Right now only supports "bash" #[structopt(default_value = "bash")] shell: commands::Shells, /// Don't automatically define the "p" alias #[structopt(long = "no-alias")] no_alias: bool, }, } fn main() -> Result<(), ExitFailure> { let config = config::load()?; let app = App::from_args(); match app.cmd { Command::Track { path } => commands::track(path, config)?, Command::Remove { path } => commands::remove(path, config)?, Command::List { paths } => commands::list(paths, config)?, Command::Select => commands::select(config)?, Command::Init { shell, no_alias } => commands::init(shell, no_alias)?, }; Ok(()) }
pub trait ShannonEntropy { fn shannon_entropy(&self) -> f64; } impl ShannonEntropy for [u8] { fn shannon_entropy(&self) -> f64 { // Initialize a dataset of byte frequencies let mut frequencies: [usize; 256] = [0; 256]; // Get byte frequencies for byte in self { frequencies[*byte as usize] += 1; } // Iterate over frequencies frequencies .into_iter() .map(|frequency| { // Handle 0 values if *frequency == 0 { 0.0 } else { // Normalize the frequency let frequency: f64 = (*frequency as f64) / (self.len() as f64); // Individual entropy value frequency * frequency.log2() } }) .sum::<f64>() .abs() } } #[cfg(test)] mod tests { use super::*; use std::iter; /// Tests the shannon entropy function #[test] fn test_shannon_entropy() { // Empty slice assert_eq!([].shannon_entropy(), 0.0); // Single value assert_eq!([0].shannon_entropy(), 0.0); assert_eq!([1].shannon_entropy(), 0.0); // Many single values for exponent in 1..10 { // Build a slice let data: Vec<u8> = iter::repeat(1).take(2_usize.pow(exponent)).collect(); // Evaluate entropy assert_eq!(data.shannon_entropy(), 0.0); } // Uniform distribution for exponent in 1..8 { // Build a slice let data: Vec<u8> = (0..2_u8.pow(exponent)).collect(); // Evaluate entropy assert_eq!(data.shannon_entropy(), exponent as f64); } // TODO: more distribution tests } }
extern crate agg; use agg::Render; fn rgb64(r: f64, g: f64,b: f64,a: f64) -> agg::Rgba8 { agg::Rgba8::new((r * 255.0).round() as u8, (g * 255.0).round() as u8, (b * 255.0).round() as u8, (a * 255.0).round() as u8) } #[test] fn rasterizers() { let (w,h) = (500,330); let m_x = [100.+120., 369.+120., 143.+120.]; let m_y = [60., 170., 310.0]; let pixf = agg::Pixfmt::<agg::Rgb8>::new(w,h); let mut ren_base = agg::RenderingBase::new(pixf); ren_base.clear( agg::Rgba8::new(255, 255, 255, 255) ); //let gamma = 1.0; let alpha = 0.5; let mut ras = agg::RasterizerScanline::new(); // Anti-Aliased { let mut ren_aa = agg::RenderingScanlineAASolid::with_base(&mut ren_base); let mut path = agg::Path::new(); path.move_to(m_x[0], m_y[0]); path.line_to(m_x[1], m_y[1]); path.line_to(m_x[2], m_y[2]); path.close_polygon(); ren_aa.color( rgb64(0.7, 0.5, 0.1, alpha)); ras.add_path(&path); agg::render_scanlines(&mut ras, &mut ren_aa); } // Aliased { let mut ren_bin = agg::RenderingScanlineBinSolid::with_base(&mut ren_base); let mut path = agg::Path::new(); path.move_to(m_x[0] - 200., m_y[0]); path.line_to(m_x[1] - 200., m_y[1]); path.line_to(m_x[2] - 200., m_y[2]); path.close_polygon(); ren_bin.color( rgb64(0.1, 0.5, 0.7, alpha) ); ras.add_path(&path); //ras. agg::render_scanlines(&mut ras, &mut ren_bin); } ren_base.to_file("tests/tmp/rasterizers.png").unwrap(); assert!(agg::ppm::img_diff("tests/tmp/rasterizers.png", "images/rasterizers.png").unwrap()); }
/*! # FM-Index Long Read Corrector v2 This library provides access to the functionality used by FMLRC2 to perform read correction using a Burrows Wheeler Transform (BWT). Currently, the BWT is assumed to have been generated externally (typically with a tool like ropebwt2) and stored in the same numpy format as FMLRC v1. FMLRC load a binary representation of the BWT into memory for performing very fast queries at the cost of memory usage. This particular implementation is accelerated over FMLRC v1 by using a cache to pre-compute common queries to the BWT. ## Example ```rust use fmlrc::bv_bwt::BitVectorBWT; use fmlrc::bwt_converter::convert_to_vec; use fmlrc::ropebwt2_util::create_bwt_from_strings; use fmlrc::string_util::convert_stoi; use std::io::Cursor; //example with in-memory BWT let data: Vec<&str> = vec!["ACGT", "CCGG"]; let seq = create_bwt_from_strings(&data).unwrap(); let cursor_seq = Cursor::new(seq); let vec_form = convert_to_vec(cursor_seq); let mut bwt = BitVectorBWT::new(); bwt.load_vector(vec_form); //bwt.load_numpy_file(filename); <- if in a numpy file //do a count let kmer: Vec<u8> = convert_stoi(&"ACGT"); let kmer_count = bwt.count_kmer(&kmer); //ACGT assert_eq!(kmer_count, 1); ``` */ /// Contains the bit vector implementation of the BWT pub mod bv_bwt; /// Contains the function for reformating a BWT string into the expected run-length format or numpy file pub mod bwt_converter; /// Contains bit vector with basic rank support; other crates exist with this, but they tended to be slow for some reason pub mod indexed_bit_vec; /// Contains a wrapper around the rust-bio FASTA writer, but forces an ordering on the reads pub mod ordered_fasta_writer; /// Contains the logic for performing the read correction pub mod read_correction; /// Contains wrapper functions for `ropebwt2`, most will fail if `ropebwt2` is not on the PATH pub mod ropebwt2_util; /// Contains special statistics functions, mainly an ignored median score pub mod stats_util; /// Contains inline functions for converting between strings and integer formats pub mod string_util;
use std::pin::Pin; pub enum MaybeResult<F, T> { Future(F), Result(T), Gone, } impl<F, T> MaybeResult<F, T> { pub fn project(self: Pin<&mut Self>) -> MaybeResult<Pin<&mut F>, &mut T> { // safety: we only need to keep the `Future` pinned let this = unsafe { self.get_unchecked_mut() }; match this { Self::Future(f) => { // safety: the `Future` variant is always pinned let f = unsafe { Pin::new_unchecked(f) }; MaybeResult::Future(f) } Self::Result(t) => MaybeResult::Result(t), Self::Gone => MaybeResult::Gone, } } pub fn is_result(self: Pin<&mut Self>) -> bool { // safety: we don't actually access any of the fields let this = unsafe { self.get_unchecked_mut() }; matches!(this, Self::Result(_)) } pub fn set_result(self: Pin<&mut Self>, t: T) { // safety: we're throwing away the Future, so we no longer // need to be pinned let this = unsafe { self.get_unchecked_mut() }; *this = Self::Result(t) } /// Panics if variant isn't `Self::Result<T>` pub fn take_result(self: Pin<&mut Self>) -> T { let this = unsafe { self.get_unchecked_mut() }; if let Self::Result(_) = this { // okay good } else { panic!("trying to take Result when the variant isn't Result") } let mut alt = Self::Gone; std::mem::swap(this, &mut alt); if let Self::Result(t) = alt { t } else { unreachable!() } } }
use std::ptr; #[allow(dead_code)] // may be used more later #[repr(C, packed)] pub struct Context { rax: u64, rbx: u64, rcx: u64, rdx: u64, rbp: u64, rsi: u64, rdi: u64, r8: u64, r9: u64, r10: u64, r11: u64, r12: u64, r13: u64, r14: u64, r15: u64, // begin interrupt info error_code: u64, rip: u64, cs: u64, rflags: u64, rsp: u64, ss: u64, } #[no_mangle] pub unsafe extern "C" fn interrupt_breakpoint(context: *const Context) { let context = ptr::read(context); debug!("Breakpoint at 0x{:x}", context.rip); } #[no_mangle] pub unsafe extern "C" fn interrupt_general_protection_fault(context: *const Context) { let context = ptr::read(context); panic!("General protection fault at 0x{:x}, error 0x{:x}", context.rip, context.error_code); } #[no_mangle] pub unsafe extern "C" fn interrupt_page_fault(context: *const Context) { let context = ptr::read(context); let error = if (context.error_code & 1) != 0 { if (context.error_code & (1 << 3)) != 0 { "reserved bit set" } else if (context.error_code & (1 << 5)) != 0 { "protection key error" } else if (context.error_code & (1 << 15)) != 0 { "SGX error" } else { "other error" } } else { "non-present" }; let access_level = if (context.error_code & (1 << 2)) != 0 { "user" } else { "supervisor" }; let access_type = if (context.error_code & (1 << 4)) != 0 { "instruction fetch" } else { if (context.error_code & (1 << 1)) != 0 { "data write" } else { "data read" } }; panic!("Page fault at 0x{:x}: {} on {}-level {}", context.rip, error, access_level, access_type); } #[no_mangle] pub unsafe extern "C" fn early_interrupt_breakpoint(context: *const Context) { let context = ptr::read(context); debug!("Breakpoint at 0x{:x}", context.rip); } #[no_mangle] pub unsafe extern "C" fn early_interrupt_general_protection_fault(context: *const Context) { let context = ptr::read(context); panic!("General protection fault at 0x{:x}, error 0x{:x}", context.rip, context.error_code); } #[no_mangle] pub unsafe extern "C" fn early_interrupt_page_fault(context: *const Context) { let context = ptr::read(context); let error = if (context.error_code & 1) != 0 { if (context.error_code & (1 << 3)) != 0 { "reserved bit set" } else if (context.error_code & (1 << 5)) != 0 { "protection key error" } else if (context.error_code & (1 << 15)) != 0 { "SGX error" } else { "other error" } } else { "non-present" }; let access_level = if (context.error_code & (1 << 2)) != 0 { "user" } else { "supervisor" }; let access_type = if (context.error_code & (1 << 4)) != 0 { "instruction fetch" } else { if (context.error_code & (1 << 1)) != 0 { "data write" } else { "data read" } }; panic!("Page fault at 0x{:x}: {} on {}-level {}", context.rip, error, access_level, access_type); }
#[macro_use] extern crate failure; #[macro_use] extern crate log; extern crate serde_json; extern crate hyper; #[cfg(test)] extern crate serde; #[cfg(test)] #[macro_use] extern crate serde_derive; pub mod error;
use oxygengine::prelude::*; #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub enum PlayerType { North, South, } #[derive(Debug, Copy, Clone)] pub struct Player(pub PlayerType); impl Component for Player { type Storage = VecStorage<Self>; }
use super::{u256mod, ModulusTrait}; // Addition impl<M: ModulusTrait> std::ops::Add for &u256mod<M> { type Output = u256mod<M>; fn add(self, other: &u256mod<M>) -> u256mod<M> { let sum_value = self.value + other.value; if sum_value >= M::modulus() { return u256mod { value: &sum_value - &M::modulus(), this_is_stupid_why: std::marker::PhantomData }; } else { return u256mod { value: sum_value, this_is_stupid_why: std::marker::PhantomData }; } } } impl<M: ModulusTrait> std::ops::AddAssign<&u256mod<M>> for u256mod<M> { fn add_assign(&mut self, other: &u256mod<M>) { *self = *self + other; } } // Versions with different reference combinations impl<M: ModulusTrait> std::ops::Add for u256mod<M> { type Output = u256mod<M>; fn add(self, other: u256mod<M>) -> u256mod<M> { return &self + &other; } } impl<M: ModulusTrait> std::ops::Add<&u256mod<M>> for u256mod<M> { type Output = u256mod<M>; fn add(self, other: &u256mod<M>) -> u256mod<M> { return &self + other; } } impl<M: ModulusTrait> std::ops::Add<u256mod<M>> for &u256mod<M> { type Output = u256mod<M>; fn add(self, other: u256mod<M>) -> u256mod<M> { return self + &other; } } impl<M: ModulusTrait> std::ops::AddAssign for u256mod<M> { fn add_assign(&mut self, other: u256mod<M>) { *self = *self + other; } } #[cfg(test)] mod tests { #[test] fn basic_add() { } }
use crate::solutions::Solution; use crate::utilities::intcode; pub struct Day02 {} impl Solution for Day02 { fn part_one(&self, input: &str) -> String { let tape: Vec<i32> = input .split(',') .map(|t| t.parse::<i32>().unwrap()) .collect(); let mut vm = intcode::VirtualMachine::new(tape, None); vm.set_word(1, 12).unwrap(); vm.set_word(2, 02).unwrap(); vm.run().unwrap(); vm.get_word(0).unwrap().to_string() } fn part_two(&self, input: &str) -> String { const TARGET: i32 = 19690720; let tape: Vec<i32> = input .split(',') .map(|t| t.parse::<i32>().unwrap()) .collect(); for noun in 0..=99 { for verb in 0..=99 { let mut vm = intcode::VirtualMachine::new(tape.clone(), None); vm.set_word(1, noun).unwrap(); vm.set_word(2, verb).unwrap(); vm.run().unwrap(); let output = vm.get_word(0).unwrap(); if output == TARGET { return (100 * noun + verb).to_string(); } } } panic!("Could not match target") } }
// Copyright 2014-2018 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![warn(clippy::all, clippy::pedantic)] #![allow(clippy::missing_docs_in_private_items)] fn main() { let _: Vec<i8> = vec![5_i8; 6].iter().map(|x| *x).collect(); let _: Vec<String> = vec![String::new()].iter().map(|x| x.clone()).collect(); let _: Vec<u32> = vec![42, 43].iter().map(|&x| x).collect(); let _: Option<u64> = Some(Box::new(16)).map(|b| *b); }
mod funcs; pub use self::funcs::*; pub const ERROR_BUFFER_LENGTH: usize = 256;
use anilist::models::Staff; use serenity::builder::CreateEmbed; use serenity::framework::standard::CommandResult; use serenity::model::channel::{Message, Reaction, ReactionType}; use serenity::prelude::Context; use crate::anilist::embeds::{ staff_overview_embed, staff_related_anime_embed, staff_related_manga_embed, }; use crate::anilist::{AniListPagination, AniListPaginationKind, AniListStaffView}; use crate::types::PaginationResult; use crate::{reactions, utils}; impl AniListPagination { pub async fn new_staff_pagination( context: &Context, message: &Message, staff: &[Staff], view: AniListStaffView, ) -> CommandResult { let ids = staff.iter().map(|staff| staff.id).collect(); let kind = AniListPaginationKind::Staff(view); let pagination = AniListPagination::new(ids, kind); let embed = pagination.staff_embed(&staff[0]); let reactions = reactions::staff(staff.len()); let sent = utils::send_embed_message(&context, &message.channel_id, &embed, reactions).await?; utils::add_pagination_to_store(&context, pagination, sent.id, message.author.id).await; Ok(()) } pub(crate) fn set_staff_view(&mut self, reaction: &Reaction) { self.kind = match reaction.emoji { ReactionType::Unicode(ref x) if x == reactions::OVERVIEW => { AniListPaginationKind::Staff(AniListStaffView::Overview) } ReactionType::Unicode(ref x) if x == reactions::ANIME => { AniListPaginationKind::Staff(AniListStaffView::RelatedAnime) } ReactionType::Unicode(ref x) if x == reactions::MANGA => { AniListPaginationKind::Staff(AniListStaffView::RelatedManga) } _ => return, } } pub fn staff_embed(&self, staff: &Staff) -> CreateEmbed { match &self.kind { AniListPaginationKind::Staff(view) => { let footer = Some(self.standard_footer()); match view { AniListStaffView::Overview => staff_overview_embed(&staff, footer), AniListStaffView::RelatedAnime => staff_related_anime_embed(&staff, footer), AniListStaffView::RelatedManga => staff_related_manga_embed(&staff, footer), } } _ => CreateEmbed::default(), } } pub(crate) async fn _staff_handler( &mut self, context: &Context, reaction: &Reaction, ) -> PaginationResult { let staff = anilist::client::fetch_staff(self.ids[self.cursor]).await?; let embed = self.staff_embed(&staff); self.update_message(&context, &reaction, embed).await; Ok(()) } }
#[macro_use] extern crate maplit; pub mod character;
use std::cmp::Ordering; use std::collections::BTreeMap; use std::ops::Bound; #[derive(Eq, PartialEq, Debug, Clone)] pub struct ChunkOffsetSize { pub offset: u64, pub size: usize, } impl ChunkOffsetSize { pub fn new(offset: u64, size: usize) -> Self { Self { offset, size } } pub fn end(&self) -> u64 { self.offset + self.size as u64 } } impl Ord for ChunkOffsetSize { fn cmp(&self, other: &Self) -> Ordering { match self.offset.cmp(&other.offset) { Ordering::Equal => self.size.cmp(&other.size), v => v, } } } impl PartialOrd for ChunkOffsetSize { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { Some(self.cmp(other)) } } #[derive(Default)] pub struct ChunkMap<V> { btm: BTreeMap<ChunkOffsetSize, V>, } impl<V> ChunkMap<V> { pub fn new() -> Self { Self { btm: BTreeMap::new(), } } pub fn insert(&mut self, location: ChunkOffsetSize, value: V) { self.btm.insert(location, value); } pub fn iter_overlapping( &self, location: ChunkOffsetSize, ) -> impl Iterator<Item = (&ChunkOffsetSize, &V)> { let location_offset = location.offset; self.btm .range(( Bound::Unbounded, Bound::Excluded(ChunkOffsetSize::new(location.end(), 0)), )) .rev() .take_while(move |(loc, _v)| location_offset < loc.end()) } }
use crate::packet::SerializedPacket; use crate::Notification; #[cfg(not(feature = "std"))] use crate::PublishNotification; use core::convert::TryInto; use fugit::TimerDurationU32; use fugit::TimerInstantU32; #[cfg(not(feature = "std"))] use heapless::{pool, pool::singleton::Pool}; use heapless::{FnvIndexMap, FnvIndexSet, IndexMap, IndexSet}; use mqttrust::encoding::v4::*; #[allow(unused)] #[derive(Debug, Clone, Copy, PartialEq, Eq)] #[cfg_attr(feature = "defmt-impl", derive(defmt::Format))] pub enum MqttConnectionStatus { Handshake, Connected, Disconnected, } #[derive(Debug, PartialEq)] #[cfg_attr(feature = "defmt-impl", derive(defmt::Format))] pub enum StateError { /// Broker's error reply to client's connect packet Connect(ConnectReturnCode), /// Invalid state for a given operation InvalidState, /// Received a packet (ack) which isn't asked for Unsolicited, /// Last pingreq isn't acked AwaitPingResp, /// Received a wrong packet while waiting for another packet WrongPacket, PayloadEncoding, InvalidUtf8, /// The maximum number of messages allowed to be simultaneously in-flight has been reached. MaxMessagesInflight, /// Non-zero QoS publications require PID PidMissing, InvalidHeader, } #[cfg(not(feature = "std"))] pool!( #[allow(non_upper_case_globals)] BoxedPublish: PublishNotification ); /// State of the mqtt connection. /// /// Methods will just modify the state of the object without doing any network /// operations This abstracts the functionality better so that it's easy to /// switch between synchronous code, tokio (or) async/await pub struct MqttState<const TIMER_HZ: u32> { /// Connection status pub connection_status: MqttConnectionStatus, /// Status of last ping pub await_pingresp: bool, /// Packet id of the last outgoing packet pub last_pid: Pid, /// Outgoing QoS 1, 2 publishes which aren't acked yet pub(crate) outgoing_pub: FnvIndexMap<u16, Inflight<TIMER_HZ, 1536>, 2>, /// Packet ids of released QoS 2 publishes pub outgoing_rel: FnvIndexSet<u16, 2>, /// Packet ids on incoming QoS 2 publishes pub incoming_pub: FnvIndexSet<u16, 2>, last_ping: StartTime<TIMER_HZ>, } impl<const TIMER_HZ: u32> MqttState<TIMER_HZ> { /// Creates new mqtt state. Same state should be used during a /// connection for persistent sessions while new state should /// instantiated for clean sessions pub fn new() -> Self { #[cfg(not(feature = "std"))] { const LEN: usize = core::mem::size_of::<heapless::pool::Node<PublishNotification>>() + core::mem::align_of::<heapless::pool::Node<PublishNotification>>() - (core::mem::size_of::<heapless::pool::Node<PublishNotification>>() % core::mem::align_of::<heapless::pool::Node<PublishNotification>>()); static mut PUBLISH_MEM: [u8; LEN] = [0u8; LEN]; BoxedPublish::grow(unsafe { &mut PUBLISH_MEM }); } MqttState { connection_status: MqttConnectionStatus::Disconnected, await_pingresp: false, last_pid: Pid::new(), outgoing_pub: IndexMap::new(), outgoing_rel: IndexSet::new(), incoming_pub: IndexSet::new(), last_ping: StartTime::default(), } } /// Consolidates handling of all outgoing mqtt packet logic. Returns a /// packet which should be put on to the network by the eventloop pub fn handle_outgoing_packet<'b>( &mut self, packet: Packet<'b>, ) -> Result<Packet<'b>, StateError> { match packet { Packet::Pingreq => self.handle_outgoing_ping(), _ => unreachable!(), } } /// Consolidates handling of all outgoing mqtt packet logic. Returns a /// packet which should be put on to the network by the eventloop pub fn handle_outgoing_request( &mut self, request: &mut SerializedPacket<'_>, now: &TimerInstantU32<TIMER_HZ>, ) -> Result<(), StateError> { match request.header()?.typ { PacketType::Publish => self.handle_outgoing_publish(request, now)?, PacketType::Subscribe => { let pid = self.next_pid(); trace!("Sending Subscribe({:?})", pid); request.set_pid(pid)? } PacketType::Unsubscribe => { let pid = self.next_pid(); trace!("Sending Unsubscribe({:?})", pid); request.set_pid(pid)? } _ => unreachable!(), } Ok(()) } /// Consolidates handling of all incoming mqtt packets. Returns a /// `Notification` which for the user to consume and `Packet` which for the /// eventloop to put on the network E.g For incoming QoS1 publish packet, /// this method returns (Publish, Puback). Publish packet will be forwarded /// to user and Pubck packet will be written to network pub fn handle_incoming_packet<'b>( &mut self, packet: Packet<'b>, ) -> Result<(Option<Notification>, Option<Packet<'static>>), StateError> { match packet { Packet::Connack(connack) => self .handle_incoming_connack(connack) .map(|()| (Notification::ConnAck.into(), None)), Packet::Pingresp => self.handle_incoming_pingresp(), Packet::Publish(publish) => self.handle_incoming_publish(publish), Packet::Suback(suback) => self.handle_incoming_suback(suback), Packet::Unsuback(pid) => self.handle_incoming_unsuback(pid), Packet::Puback(pid) => self.handle_incoming_puback(pid), Packet::Pubrec(pid) => self.handle_incoming_pubrec(pid), Packet::Pubrel(pid) => self.handle_incoming_pubrel(pid), Packet::Pubcomp(pid) => self.handle_incoming_pubcomp(pid), _ => { error!("Invalid incoming packet!"); Ok((None, None)) } } } /// Adds next packet identifier to QoS 1 and 2 publish packets and returns /// it by wrapping publish in packet fn handle_outgoing_publish( &mut self, request: &mut SerializedPacket<'_>, now: &TimerInstantU32<TIMER_HZ>, ) -> Result<(), StateError> { match request.header()?.qos { QoS::AtMostOnce => { trace!("Sending Publish({:?})", QoS::AtMostOnce); } QoS::AtLeastOnce => { let pid = self.next_pid(); trace!("Sending Publish({:?}, {:?})", pid, QoS::AtLeastOnce); self.outgoing_pub .insert(pid.get(), Inflight::new(StartTime::new(*now), &request.0)) .map_err(|_| StateError::MaxMessagesInflight)?; request.set_pid(pid)?; } QoS::ExactlyOnce => { let pid = self.next_pid(); trace!("Sending Publish({:?}, {:?})", pid, QoS::ExactlyOnce); self.outgoing_pub .insert(pid.get(), Inflight::new(StartTime::new(*now), &request.0)) .map_err(|_| StateError::MaxMessagesInflight)?; request.set_pid(pid)?; } } Ok(()) } /// Iterates through the list of stored publishes and removes the publish /// with the matching packet identifier. Removal is now a O(n) operation. /// This should be usually ok in case of acks due to ack ordering in normal /// conditions. But in cases where the broker doesn't guarantee the order of /// acks, the performance won't be optimal fn handle_incoming_puback( &mut self, pid: Pid, ) -> Result<(Option<Notification>, Option<Packet<'static>>), StateError> { if self.outgoing_pub.contains_key(&pid.get()) { let _publish = self.outgoing_pub.remove(&pid.get()); let request = None; let notification = Some(Notification::Puback(pid)); trace!("Received Puback({:?})", pid); Ok((notification, request)) } else { error!("Unsolicited puback packet: {:?}", pid.get()); // Err(StateError::Unsolicited) Ok((None, None)) } } fn handle_incoming_suback<'a>( &mut self, suback: Suback<'a>, ) -> Result<(Option<Notification>, Option<Packet<'static>>), StateError> { let request = None; trace!("Received Suback({:?})", suback.pid); // TODO: Add suback packet info here let notification = Some(Notification::Suback(suback.pid)); Ok((notification, request)) } fn handle_incoming_unsuback( &mut self, pid: Pid, ) -> Result<(Option<Notification>, Option<Packet<'static>>), StateError> { let request = None; trace!("Received Unsuback({:?})", pid); let notification = Some(Notification::Unsuback(pid)); Ok((notification, request)) } /// Iterates through the list of stored publishes and removes the publish with the /// matching packet identifier. Removal is now a O(n) operation. This should be /// usually ok in case of acks due to ack ordering in normal conditions. But in cases /// where the broker doesn't guarantee the order of acks, the performance won't be optimal fn handle_incoming_pubrec( &mut self, pid: Pid, ) -> Result<(Option<Notification>, Option<Packet<'static>>), StateError> { if self.outgoing_pub.contains_key(&pid.get()) { self.outgoing_pub.remove(&pid.get()); self.outgoing_rel .insert(pid.get()) .map_err(|_| StateError::InvalidState)?; let reply = Some(Packet::Pubrel(pid)); let notification = Some(Notification::Pubrec(pid)); Ok((notification, reply)) } else { error!("Unsolicited pubrec packet: {:?}", pid.get()); // Err(StateError::Unsolicited) Ok((None, None)) } } /// Results in a publish notification in all the QoS cases. Replys with an ack /// in case of QoS1 and Replys rec in case of QoS while also storing the message fn handle_incoming_publish<'b>( &mut self, publish: Publish<'b>, ) -> Result<(Option<Notification>, Option<Packet<'static>>), StateError> { let qospid = (publish.qos, publish.pid); #[cfg(not(feature = "std"))] let boxed_publish = BoxedPublish::alloc().unwrap(); #[cfg(not(feature = "std"))] let notification = Notification::Publish(boxed_publish.init(publish.try_into().unwrap())); #[cfg(feature = "std")] let notification = Notification::Publish(std::boxed::Box::new(publish.try_into().unwrap())); let request = match qospid { (QoS::AtMostOnce, _) => None, (QoS::AtLeastOnce, Some(pid)) => Some(Packet::Puback(pid)), (QoS::ExactlyOnce, Some(pid)) => { self.incoming_pub.insert(pid.get()).map_err(|_| { error!("Failed to insert incoming pub!"); StateError::InvalidState })?; Some(Packet::Pubrec(pid)) } _ => return Err(StateError::InvalidHeader), }; Ok((Some(notification), request)) } fn handle_incoming_pubrel( &mut self, pid: Pid, ) -> Result<(Option<Notification>, Option<Packet<'static>>), StateError> { if self.incoming_pub.contains(&pid.get()) { self.incoming_pub.remove(&pid.get()); let reply = Packet::Pubcomp(pid); Ok((None, Some(reply))) } else { error!("Unsolicited pubrel packet: {:?}", pid.get()); // Err(StateError::Unsolicited) Ok((None, None)) } } fn handle_incoming_pubcomp( &mut self, pid: Pid, ) -> Result<(Option<Notification>, Option<Packet<'static>>), StateError> { if self.outgoing_rel.contains(&pid.get()) { self.outgoing_rel.remove(&pid.get()); let notification = Some(Notification::Pubcomp(pid)); let reply = None; Ok((notification, reply)) } else { error!("Unsolicited pubcomp packet: {:?}", pid.get()); // Err(StateError::Unsolicited) Ok((None, None)) } } /// check when the last control packet/pingreq packet is received and return /// the status which tells if keep alive time has exceeded /// NOTE: status will be checked for zero keepalive times also fn handle_outgoing_ping<'b>(&mut self) -> Result<Packet<'b>, StateError> { // raise error if last ping didn't receive ack if self.await_pingresp { error!("Error awaiting for last ping response"); return Err(StateError::AwaitPingResp); } self.await_pingresp = true; trace!("Sending Pingreq"); Ok(Packet::Pingreq) } fn handle_incoming_pingresp( &mut self, ) -> Result<(Option<Notification>, Option<Packet<'static>>), StateError> { self.await_pingresp = false; trace!("Received Pingresp"); Ok((None, None)) } pub(crate) fn handle_outgoing_connect(&mut self) { self.connection_status = MqttConnectionStatus::Handshake; } pub fn handle_incoming_connack(&mut self, connack: Connack) -> Result<(), StateError> { match connack.code { ConnectReturnCode::Accepted if self.connection_status == MqttConnectionStatus::Handshake => { debug!("MQTT connected!"); self.connection_status = MqttConnectionStatus::Connected; Ok(()) } ConnectReturnCode::Accepted if self.connection_status != MqttConnectionStatus::Handshake => { error!( "Invalid state. Expected = {:?}, Current = {:?}", MqttConnectionStatus::Handshake, self.connection_status ); self.connection_status = MqttConnectionStatus::Disconnected; Err(StateError::InvalidState) } code => { error!("Connection failed. Connection error = {:?}", code as u8); self.connection_status = MqttConnectionStatus::Disconnected; Err(StateError::Connect(code)) } } } fn next_pid(&mut self) -> Pid { self.last_pid = self.last_pid + 1; self.last_pid } pub(crate) fn last_ping_entry(&mut self) -> &mut StartTime<TIMER_HZ> { &mut self.last_ping } pub(crate) fn retries( &mut self, now: TimerInstantU32<TIMER_HZ>, interval: TimerDurationU32<TIMER_HZ>, ) -> impl Iterator<Item = (&u16, &mut Inflight<TIMER_HZ, 1536>)> + '_ { self.outgoing_pub .iter_mut() .filter(move |(_, inflight)| inflight.last_touch.has_elapsed(&now, interval)) } } #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub struct StartTime<const TIMER_HZ: u32>(Option<TimerInstantU32<TIMER_HZ>>); impl<const TIMER_HZ: u32> Default for StartTime<TIMER_HZ> { fn default() -> Self { Self(None) } } impl<const TIMER_HZ: u32> StartTime<TIMER_HZ> { pub fn new(start_time: TimerInstantU32<TIMER_HZ>) -> Self { Self(start_time.into()) } pub fn or_insert(&mut self, now: TimerInstantU32<TIMER_HZ>) -> &mut Self { self.0.get_or_insert(now); self } pub fn insert(&mut self, now: TimerInstantU32<TIMER_HZ>) { self.0.replace(now); } } impl<const TIMER_HZ: u32> StartTime<TIMER_HZ> { /// Check whether an interval has elapsed since this start time. pub fn has_elapsed( &self, now: &TimerInstantU32<TIMER_HZ>, interval: TimerDurationU32<TIMER_HZ>, ) -> bool { if let Some(start_time) = self.0 { let elapse_time = start_time + interval; elapse_time <= *now } else { false } } } /// Client publication message data. #[derive(Debug)] pub(crate) struct Inflight<const TIMER_HZ: u32, const L: usize> { /// A publish of non-zero QoS. publish: heapless::Vec<u8, L>, /// A timestmap used for retry and expiry. last_touch: StartTime<TIMER_HZ>, } impl<const TIMER_HZ: u32, const L: usize> Inflight<TIMER_HZ, L> { pub(crate) fn new(last_touch: StartTime<TIMER_HZ>, publish: &[u8]) -> Self { assert!( !matches!( decoder::Header::new(publish[0]).unwrap().qos, QoS::AtMostOnce ), "Only non-zero QoSs are allowed." ); Self { publish: heapless::Vec::from_slice(publish).unwrap(), last_touch, } } pub(crate) fn last_touch_entry(&mut self) -> &mut StartTime<TIMER_HZ> { &mut self.last_touch } } impl<const TIMER_HZ: u32, const L: usize> Inflight<TIMER_HZ, L> { pub(crate) fn packet<'b>(&'b mut self, pid: u16) -> Result<&'b [u8], StateError> { let pid = pid.try_into().map_err(|_| StateError::PayloadEncoding)?; let mut packet = SerializedPacket(self.publish.as_mut()); packet.set_pid(pid)?; Ok(packet.to_inner()) } } #[cfg(test)] mod test { use super::{BoxedPublish, MqttConnectionStatus, MqttState, Packet, StateError}; use crate::{packet::SerializedPacket, Notification}; use core::convert::TryFrom; use fugit::TimerInstantU32; use heapless::pool::singleton::Pool; use mqttrust::{ encoding::v4::{decode_slice, encode_slice, Pid}, Publish, QoS, }; fn build_publish<'a>(qos: QoS, pid: Option<u16>) -> Publish<'a> { let topic = "hello/world"; let payload = &[1, 2, 3]; let pid = match qos { QoS::AtMostOnce => None, QoS::AtLeastOnce => pid.and_then(|p| Pid::try_from(p).ok()), QoS::ExactlyOnce => pid.and_then(|p| Pid::try_from(p).ok()), }; Publish { qos, pid, payload, dup: false, retain: false, topic_name: topic, } } fn build_mqttstate() -> MqttState<1000> { let state = MqttState::new(); const LEN: usize = 1024 * 10; static mut PUBLISH_MEM: [u8; LEN] = [0u8; LEN]; BoxedPublish::grow(unsafe { &mut PUBLISH_MEM }); state } #[test] fn handle_outgoing_requests() { let buf = &mut [0u8; 256]; let now = TimerInstantU32::from_ticks(0); let mut mqtt = build_mqttstate(); // Publish let publish = Packet::Publish(build_publish(QoS::AtMostOnce, None)); let len = encode_slice(&publish, buf).unwrap(); // Packet id shouldn't be set and publish shouldn't be saved in queue mqtt.handle_outgoing_request(&mut SerializedPacket(&mut buf[..len]), &now) .unwrap(); // assert_eq!(publish_out.qos, QoS::AtMostOnce); // assert_eq!(mqtt.outgoing_pub.len(), 0); // // Subscribe // let subscribe = SubscribeRequest { // topics: Vec::from_slice(&[ // SubscribeTopic { // topic_path: String::from("some/topic"), // qos: QoS::AtLeastOnce, // }, // SubscribeTopic { // topic_path: String::from("some/other/topic"), // qos: QoS::ExactlyOnce, // }, // ]) // .unwrap(), // }; // // Packet id should be set and subscribe shouldn't be saved in publish queue // mqtt.handle_outgoing_request(subscribe.try_into().unwrap(), buf, &now) // .unwrap(); // let mut topics_iter = subscribe_out.topics.iter(); // assert_eq!(subscribe_out.pid, Pid::try_from(2).unwrap()); // assert_eq!( // topics_iter.next(), // Some(&SubscribeTopic { // qos: QoS::AtLeastOnce, // topic_path: String::from("some/topic") // }) // ); // assert_eq!( // topics_iter.next(), // Some(&SubscribeTopic { // qos: QoS::ExactlyOnce, // topic_path: String::from("some/other/topic") // }) // ); // assert_eq!(topics_iter.next(), None); // assert_eq!(mqtt.outgoing_pub.len(), 0); // // Unsubscribe // let unsubscribe = UnsubscribeRequest { // topics: Vec::from_slice(&[ // String::from("some/topic"), // String::from("some/other/topic"), // ]) // .unwrap(), // }; // // Packet id should be set and subscribe shouldn't be saved in publish queue // let unsubscribe_out = // match mqtt.handle_outgoing_request(unsubscribe.try_into().unwrap(), buf, &now) { // Ok(Packet::Unsubscribe(p)) => p, // _ => panic!("Invalid packet. Should've been a unsubscribe packet"), // }; // let mut topics_iter = unsubscribe_out.topics.iter(); // assert_eq!(unsubscribe_out.pid, Pid::try_from(3).unwrap()); // assert_eq!(topics_iter.next(), Some(&String::from("some/topic"))); // assert_eq!(topics_iter.next(), Some(&String::from("some/other/topic"))); // assert_eq!(topics_iter.next(), None); // assert_eq!(mqtt.outgoing_pub.len(), 0); } #[test] fn outgoing_publish_handle_should_set_pid_correctly_and_add_publish_to_queue_correctly() { let buf = &mut [0u8; 256]; let now = TimerInstantU32::from_ticks(0); let mut mqtt = build_mqttstate(); // QoS0 Publish let publish = Packet::Publish(build_publish(QoS::AtMostOnce, None)); let len = encode_slice(&publish, buf).unwrap(); let mut pkg = SerializedPacket(&mut buf[..len]); // Packet id shouldn't be set and publish shouldn't be saved in queue mqtt.handle_outgoing_publish(&mut pkg, &now).unwrap(); let publish_out = match decode_slice(pkg.to_inner()).unwrap() { Some(Packet::Publish(p)) => p, _ => panic!(), }; assert_eq!(publish_out.qos, QoS::AtMostOnce); assert_eq!(mqtt.outgoing_pub.len(), 0); // QoS1 Publish let publish = Packet::Publish(build_publish(QoS::AtLeastOnce, None)); let len = encode_slice(&publish, buf).unwrap(); let mut pkg = SerializedPacket(&mut buf[..len]); // Packet id should be set and publish should be saved in queue mqtt.handle_outgoing_publish(&mut pkg, &now).unwrap(); let publish_out = match decode_slice(pkg.to_inner()).unwrap() { Some(Packet::Publish(p)) => p, _ => panic!(), }; assert_eq!(publish_out.qos, QoS::AtLeastOnce); assert_eq!(publish_out.pid, Some(Pid::try_from(2).unwrap())); assert_eq!(mqtt.outgoing_pub.len(), 1); } #[test] fn incoming_publish_should_be_added_to_queue_correctly() { let mut mqtt = build_mqttstate(); // QoS0, 1, 2 Publishes let publish1 = build_publish(QoS::AtMostOnce, Some(1)); let publish2 = build_publish(QoS::AtLeastOnce, Some(2)); let publish3 = build_publish(QoS::ExactlyOnce, Some(3)); mqtt.handle_incoming_publish(publish1).unwrap(); mqtt.handle_incoming_publish(publish2).unwrap(); mqtt.handle_incoming_publish(publish3).unwrap(); // only qos2 publish should be add to queue assert_eq!(mqtt.incoming_pub.len(), 1); assert!(mqtt.incoming_pub.contains(&3)); } #[test] fn incoming_qos2_publish_should_send_rec_to_network_and_publish_to_user() { let mut mqtt = build_mqttstate(); let publish = build_publish(QoS::ExactlyOnce, Some(1)); let (notification, request) = mqtt.handle_incoming_publish(publish).unwrap(); match notification { Some(Notification::Publish(publish)) => assert_eq!(publish.qospid, QoS::ExactlyOnce), _ => panic!("Invalid notification: {:?}", notification), } match request { Some(Packet::Pubrec(pid)) => assert_eq!(pid.get(), 1), _ => panic!("Invalid network request: {:?}", request), } } #[test] fn incoming_puback_should_remove_correct_publish_from_queue() { let mut mqtt = build_mqttstate(); let buf = &mut [0u8; 256]; let now = TimerInstantU32::from_ticks(0); let publish1 = Packet::Publish(build_publish(QoS::AtLeastOnce, None)); let len = encode_slice(&publish1, buf).unwrap(); let mut pkg1 = SerializedPacket(&mut buf[..len]); mqtt.handle_outgoing_publish(&mut pkg1, &now).unwrap(); assert_eq!(mqtt.outgoing_pub.len(), 1); let backup = mqtt.outgoing_pub.get_mut(&2).unwrap().packet(1).unwrap(); let publish_out = match decode_slice(backup).unwrap() { Some(Packet::Publish(p)) => p, _ => panic!(), }; assert_eq!(publish_out.qos, QoS::AtLeastOnce); mqtt.handle_incoming_puback(Pid::try_from(2).unwrap()) .unwrap(); assert_eq!(mqtt.outgoing_pub.len(), 0); } #[test] fn incoming_pubrec_should_release_correct_publish_from_queue_and_add_releaseid_to_rel_queue() { let mut mqtt = build_mqttstate(); let buf = &mut [0u8; 256]; let now = TimerInstantU32::from_ticks(0); let publish = Packet::Publish(build_publish(QoS::ExactlyOnce, None)); let len = encode_slice(&publish, buf).unwrap(); let mut pkg = SerializedPacket(&mut buf[..len]); mqtt.handle_outgoing_publish(&mut pkg, &now).unwrap(); mqtt.handle_incoming_pubrec(Pid::try_from(2).unwrap()) .unwrap(); assert_eq!(mqtt.outgoing_pub.len(), 0); assert_eq!(mqtt.outgoing_rel.len(), 1); // check if the element's pid is 2 assert!(mqtt.outgoing_rel.contains(&2)); } #[test] fn incoming_pubrec_should_send_release_to_network_and_nothing_to_user() { let mut mqtt = build_mqttstate(); let buf = &mut [0u8; 256]; let now = TimerInstantU32::from_ticks(0); let pid = Pid::try_from(2).unwrap(); assert_eq!(pid.get(), 2); let publish = Packet::Publish(build_publish(QoS::ExactlyOnce, None)); let len = encode_slice(&publish, buf).unwrap(); let mut pkg = SerializedPacket(&mut buf[..len]); mqtt.handle_outgoing_publish(&mut pkg, &now).unwrap(); let (notification, request) = mqtt.handle_incoming_pubrec(pid).unwrap(); assert_eq!(notification, Some(Notification::Pubrec(pid))); assert_eq!(request, Some(Packet::Pubrel(pid))); } #[test] fn incoming_pubrel_should_send_comp_to_network_and_nothing_to_user() { let mut mqtt = build_mqttstate(); let publish = build_publish(QoS::ExactlyOnce, Some(1)); let pid = Pid::try_from(1).unwrap(); assert_eq!(pid.get(), 1); mqtt.handle_incoming_publish(publish).unwrap(); let (notification, request) = mqtt.handle_incoming_pubrel(pid).unwrap(); assert_eq!(notification, None); assert_eq!(request, Some(Packet::Pubcomp(pid))); } #[test] fn incoming_pubcomp_should_release_correct_pid_from_release_queue() { let mut mqtt = build_mqttstate(); let buf = &mut [0u8; 256]; let now = TimerInstantU32::from_ticks(0); let publish = Packet::Publish(build_publish(QoS::ExactlyOnce, None)); let len = encode_slice(&publish, buf).unwrap(); let mut pkg = SerializedPacket(&mut buf[..len]); let pid = Pid::try_from(2).unwrap(); mqtt.handle_outgoing_publish(&mut pkg, &now).unwrap(); mqtt.handle_incoming_pubrec(pid).unwrap(); mqtt.handle_incoming_pubcomp(pid).unwrap(); assert_eq!(mqtt.outgoing_pub.len(), 0); } #[test] fn outgoing_ping_handle_should_throw_errors_for_no_pingresp() { let mut mqtt = build_mqttstate(); let buf = &mut [0u8; 256]; let now = TimerInstantU32::from_ticks(0); mqtt.connection_status = MqttConnectionStatus::Connected; assert_eq!(mqtt.handle_outgoing_ping(), Ok(Packet::Pingreq)); assert!(mqtt.await_pingresp); // network activity other than pingresp let publish = Packet::Publish(build_publish(QoS::AtLeastOnce, None)); let len = encode_slice(&publish, buf).unwrap(); let mut pkg = SerializedPacket(&mut buf[..len]); mqtt.handle_outgoing_publish(&mut pkg, &now).unwrap(); mqtt.handle_incoming_packet(Packet::Puback(Pid::try_from(2).unwrap())) .unwrap(); // should throw error because we didn't get pingresp for previous ping assert_eq!(mqtt.handle_outgoing_ping(), Err(StateError::AwaitPingResp)); } #[test] fn outgoing_ping_handle_should_succeed_if_pingresp_is_received() { let mut mqtt = build_mqttstate(); mqtt.connection_status = MqttConnectionStatus::Connected; // should ping assert_eq!(mqtt.handle_outgoing_ping(), Ok(Packet::Pingreq)); assert!(mqtt.await_pingresp); assert_eq!( mqtt.handle_incoming_packet(Packet::Pingresp), Ok((None, None)) ); assert!(!mqtt.await_pingresp); // should ping assert_eq!(mqtt.handle_outgoing_ping(), Ok(Packet::Pingreq)); assert!(mqtt.await_pingresp); } }
/// This file handles bindings from the DOM to the application /// It sets up bindings from the canvas into the program for example mouse /// clicks, selecting the canvas to use etc. use std::cell::RefCell; use std::rc::Rc; use wasm_bindgen::prelude::{wasm_bindgen, Closure}; use wasm_bindgen::JsCast; pub mod app; pub mod full_screen_quad; pub mod shader; use web_sys::{window, HtmlCanvasElement, HtmlElement}; // When the `wee_alloc` feature is enabled, use `wee_alloc` as the global // allocator. #[cfg(feature = "wee_alloc")] #[global_allocator] static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT; #[wasm_bindgen] extern "C" { #[wasm_bindgen(js_namespace = console)] fn log(s: &str); } fn request_animation_frame(f: &Closure<dyn FnMut()>) { window() .expect("no global window?!") .request_animation_frame(f.as_ref().unchecked_ref()) .expect("should register `requestAnimationFrame` OK"); } #[wasm_bindgen] pub struct Core { app: Rc<RefCell<app::App>>, } #[wasm_bindgen] impl Core { #[wasm_bindgen(constructor)] pub fn new() -> Self { log("WASM Started"); let window = window().unwrap(); let document = window.document().unwrap(); let canvas: HtmlCanvasElement = match document.query_selector("#viewport_3d").unwrap() { Some(container) => container.dyn_into().unwrap(), None => { log(&format!("No Canvas")); panic!("Failed to create app"); } }; let overlay: HtmlElement = match document.query_selector("#overlay").unwrap() { Some(container) => container.dyn_into().unwrap(), None => { log(&format!("No Overlay")); panic!("Failed to create overlay"); } }; overlay.set_inner_text(""); // Clear loading spinner log("Starting App"); match app::App::new(canvas) { Ok(ap) => { log("App Created"); let ap = Rc::new(RefCell::new(ap)); // Set up bindings Self { app: ap } } Err(err) => { log(&format!("{:?}", &err)); panic!("Failed to create app"); } } } #[wasm_bindgen] pub fn start(&mut self) { let f = Rc::new(RefCell::new(None)); let g = f.clone(); let ap = self.app.clone(); *g.borrow_mut() = Some(Closure::wrap(Box::new(move || { // Set the body's text content to how many times this // requestAnimationFrame callback has fired. ap.borrow_mut().update(); // Schedule ourself for another requestAnimationFrame callback. request_animation_frame(f.borrow().as_ref().unwrap()); }) as Box<FnMut()>)); request_animation_frame(g.borrow().as_ref().unwrap()); log("App Started"); } }
// `error_chain!` can recurse deeply #![recursion_limit = "1024"] extern crate env_logger; extern crate actix_web; extern crate postgres; extern crate oracle; extern crate serde; extern crate serde_json; extern crate inner; extern crate futures; extern crate csv; extern crate ini; extern crate bio; extern crate rayon; #[macro_use] extern crate log; #[macro_use] extern crate serde_derive; #[macro_use] extern crate error_chain; mod controller; mod router; mod database; mod models; mod errors; mod misc; mod flatten; mod query_builder; mod msa; use actix_web::middleware::Logger; use actix_web::*; pub use errors::*; use ini::Ini; pub struct State { pub db_params: database::DBParams, } fn main() { //Logging is hard coded for now, but soon can be configured through config file std::env::set_var("RUST_LOG", "actix_web=trace,iptmnet_api=trace"); env_logger::Builder::from_default_env() .default_format_timestamp(false) .default_format_module_path(false) .init(); let conf; let conf_result = Ini::load_from_file("config.ini"); match conf_result { Ok(value) => { conf = value; }, Err(error) => { error!("{}",error); std::process::exit(1); } } let db_params = misc::parse_configs(&conf); let app = move || { let app = App::with_state(State{db_params: db_params.clone()}) .middleware(Logger::new("STATUS : %s | %t | %D ms | PID: %P | %r ")); return router::init_routes(app); }; server::HttpServer::new(app) .bind("0.0.0.0:8088") .expect("Can not bind to 0.0.0.0:8080") .keep_alive(None) .run(); }
use exonum::crypto::PublicKey; #[derive(Serialize, Deserialize)] pub struct FuzzData { pub genesis: PublicKey, pub alice: PublicKey, pub bob: PublicKey, }
fn main() { println!("こんにちは"); println!("AtCoder"); }
#![doc = "generated by AutoRust 0.1.0"] #![allow(unused_mut)] #![allow(unused_variables)] #![allow(unused_imports)] use super::{models, API_VERSION}; #[non_exhaustive] #[derive(Debug, thiserror :: Error)] #[allow(non_camel_case_types)] pub enum Error { #[error(transparent)] Operations_List(#[from] operations::list::Error), #[error(transparent)] NetAppResource_CheckNameAvailability(#[from] net_app_resource::check_name_availability::Error), #[error(transparent)] NetAppResource_CheckFilePathAvailability(#[from] net_app_resource::check_file_path_availability::Error), #[error(transparent)] Accounts_List(#[from] accounts::list::Error), #[error(transparent)] Accounts_Get(#[from] accounts::get::Error), #[error(transparent)] Accounts_CreateOrUpdate(#[from] accounts::create_or_update::Error), #[error(transparent)] Accounts_Update(#[from] accounts::update::Error), #[error(transparent)] Accounts_Delete(#[from] accounts::delete::Error), #[error(transparent)] Pools_List(#[from] pools::list::Error), #[error(transparent)] Pools_Get(#[from] pools::get::Error), #[error(transparent)] Pools_CreateOrUpdate(#[from] pools::create_or_update::Error), #[error(transparent)] Pools_Update(#[from] pools::update::Error), #[error(transparent)] Pools_Delete(#[from] pools::delete::Error), #[error(transparent)] Volumes_List(#[from] volumes::list::Error), #[error(transparent)] Volumes_Get(#[from] volumes::get::Error), #[error(transparent)] Volumes_CreateOrUpdate(#[from] volumes::create_or_update::Error), #[error(transparent)] Volumes_Update(#[from] volumes::update::Error), #[error(transparent)] Volumes_Delete(#[from] volumes::delete::Error), #[error(transparent)] Volumes_Revert(#[from] volumes::revert::Error), #[error(transparent)] Volumes_BreakReplication(#[from] volumes::break_replication::Error), #[error(transparent)] Volumes_ReplicationStatus(#[from] volumes::replication_status::Error), #[error(transparent)] Volumes_ResyncReplication(#[from] volumes::resync_replication::Error), #[error(transparent)] Volumes_DeleteReplication(#[from] volumes::delete_replication::Error), #[error(transparent)] Volumes_AuthorizeReplication(#[from] volumes::authorize_replication::Error), #[error(transparent)] Snapshots_List(#[from] snapshots::list::Error), #[error(transparent)] Snapshots_Get(#[from] snapshots::get::Error), #[error(transparent)] Snapshots_Create(#[from] snapshots::create::Error), #[error(transparent)] Snapshots_Update(#[from] snapshots::update::Error), #[error(transparent)] Snapshots_Delete(#[from] snapshots::delete::Error), } pub mod operations { use super::{models, API_VERSION}; pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<models::OperationListResult, list::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/providers/Microsoft.NetApp/operations", operation_config.base_path(),); let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::OperationListResult = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(list::Error::DefaultResponse { status_code }), } } pub mod list { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod net_app_resource { use super::{models, API_VERSION}; pub async fn check_name_availability( operation_config: &crate::OperationConfig, body: &models::ResourceNameAvailabilityRequest, subscription_id: &str, location: &str, ) -> std::result::Result<models::ResourceNameAvailability, check_name_availability::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/providers/Microsoft.NetApp/locations/{}/checkNameAvailability", operation_config.base_path(), subscription_id, location ); let mut url = url::Url::parse(url_str).map_err(check_name_availability::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(check_name_availability::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(body).map_err(check_name_availability::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(check_name_availability::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(check_name_availability::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::ResourceNameAvailability = serde_json::from_slice(rsp_body) .map_err(|source| check_name_availability::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(check_name_availability::Error::DefaultResponse { status_code }), } } pub mod check_name_availability { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn check_file_path_availability( operation_config: &crate::OperationConfig, body: &models::ResourceNameAvailabilityRequest, subscription_id: &str, location: &str, ) -> std::result::Result<models::ResourceNameAvailability, check_file_path_availability::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/providers/Microsoft.NetApp/locations/{}/checkFilePathAvailability", operation_config.base_path(), subscription_id, location ); let mut url = url::Url::parse(url_str).map_err(check_file_path_availability::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(check_file_path_availability::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(body).map_err(check_file_path_availability::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(check_file_path_availability::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(check_file_path_availability::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::ResourceNameAvailability = serde_json::from_slice(rsp_body) .map_err(|source| check_file_path_availability::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(check_file_path_availability::Error::DefaultResponse { status_code }), } } pub mod check_file_path_availability { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod accounts { use super::{models, API_VERSION}; pub async fn list( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, ) -> std::result::Result<models::NetAppAccountList, list::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.NetApp/netAppAccounts", operation_config.base_path(), subscription_id, resource_group_name ); let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::NetAppAccountList = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(list::Error::DefaultResponse { status_code }), } } pub mod list { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, account_name: &str, ) -> std::result::Result<models::NetAppAccount, get::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.NetApp/netAppAccounts/{}", operation_config.base_path(), subscription_id, resource_group_name, account_name ); let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::NetAppAccount = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(get::Error::DefaultResponse { status_code }), } } pub mod get { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn create_or_update( operation_config: &crate::OperationConfig, body: &models::NetAppAccount, subscription_id: &str, resource_group_name: &str, account_name: &str, ) -> std::result::Result<create_or_update::Response, create_or_update::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.NetApp/netAppAccounts/{}", operation_config.base_path(), subscription_id, resource_group_name, account_name ); let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(create_or_update::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(body).map_err(create_or_update::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(create_or_update::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::NetAppAccount = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(create_or_update::Response::Ok200(rsp_value)) } http::StatusCode::CREATED => { let rsp_body = rsp.body(); let rsp_value: models::NetAppAccount = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(create_or_update::Response::Created201(rsp_value)) } http::StatusCode::ACCEPTED => Ok(create_or_update::Response::Accepted202), status_code => Err(create_or_update::Error::DefaultResponse { status_code }), } } pub mod create_or_update { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200(models::NetAppAccount), Created201(models::NetAppAccount), Accepted202, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn update( operation_config: &crate::OperationConfig, body: &models::NetAppAccountPatch, subscription_id: &str, resource_group_name: &str, account_name: &str, ) -> std::result::Result<update::Response, update::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.NetApp/netAppAccounts/{}", operation_config.base_path(), subscription_id, resource_group_name, account_name ); let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PATCH); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(update::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(body).map_err(update::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::NetAppAccount = serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(update::Response::Ok200(rsp_value)) } http::StatusCode::CREATED => { let rsp_body = rsp.body(); let rsp_value: models::NetAppAccount = serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(update::Response::Created201(rsp_value)) } http::StatusCode::ACCEPTED => Ok(update::Response::Accepted202), status_code => Err(update::Error::DefaultResponse { status_code }), } } pub mod update { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200(models::NetAppAccount), Created201(models::NetAppAccount), Accepted202, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn delete( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, account_name: &str, ) -> std::result::Result<delete::Response, delete::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.NetApp/netAppAccounts/{}", operation_config.base_path(), subscription_id, resource_group_name, account_name ); let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(delete::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202), http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204), status_code => Err(delete::Error::DefaultResponse { status_code }), } } pub mod delete { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Accepted202, NoContent204, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod pools { use super::{models, API_VERSION}; pub async fn list( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, account_name: &str, ) -> std::result::Result<models::CapacityPoolList, list::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.NetApp/netAppAccounts/{}/capacityPools", operation_config.base_path(), subscription_id, resource_group_name, account_name ); let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::CapacityPoolList = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(list::Error::DefaultResponse { status_code }), } } pub mod list { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, account_name: &str, pool_name: &str, ) -> std::result::Result<models::CapacityPool, get::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.NetApp/netAppAccounts/{}/capacityPools/{}", operation_config.base_path(), subscription_id, resource_group_name, account_name, pool_name ); let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::CapacityPool = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(get::Error::DefaultResponse { status_code }), } } pub mod get { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn create_or_update( operation_config: &crate::OperationConfig, body: &models::CapacityPool, subscription_id: &str, resource_group_name: &str, account_name: &str, pool_name: &str, ) -> std::result::Result<create_or_update::Response, create_or_update::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.NetApp/netAppAccounts/{}/capacityPools/{}", operation_config.base_path(), subscription_id, resource_group_name, account_name, pool_name ); let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(create_or_update::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(body).map_err(create_or_update::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(create_or_update::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::CapacityPool = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(create_or_update::Response::Ok200(rsp_value)) } http::StatusCode::CREATED => { let rsp_body = rsp.body(); let rsp_value: models::CapacityPool = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(create_or_update::Response::Created201(rsp_value)) } http::StatusCode::ACCEPTED => Ok(create_or_update::Response::Accepted202), status_code => Err(create_or_update::Error::DefaultResponse { status_code }), } } pub mod create_or_update { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200(models::CapacityPool), Created201(models::CapacityPool), Accepted202, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn update( operation_config: &crate::OperationConfig, body: &models::CapacityPoolPatch, subscription_id: &str, resource_group_name: &str, account_name: &str, pool_name: &str, ) -> std::result::Result<update::Response, update::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.NetApp/netAppAccounts/{}/capacityPools/{}", operation_config.base_path(), subscription_id, resource_group_name, account_name, pool_name ); let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PATCH); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(update::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(body).map_err(update::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::CapacityPool = serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(update::Response::Ok200(rsp_value)) } http::StatusCode::ACCEPTED => Ok(update::Response::Accepted202), status_code => Err(update::Error::DefaultResponse { status_code }), } } pub mod update { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200(models::CapacityPool), Accepted202, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn delete( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, account_name: &str, pool_name: &str, ) -> std::result::Result<delete::Response, delete::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.NetApp/netAppAccounts/{}/capacityPools/{}", operation_config.base_path(), subscription_id, resource_group_name, account_name, pool_name ); let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(delete::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202), http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204), status_code => Err(delete::Error::DefaultResponse { status_code }), } } pub mod delete { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Accepted202, NoContent204, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod volumes { use super::{models, API_VERSION}; pub async fn list( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, account_name: &str, pool_name: &str, ) -> std::result::Result<models::VolumeList, list::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.NetApp/netAppAccounts/{}/capacityPools/{}/volumes", operation_config.base_path(), subscription_id, resource_group_name, account_name, pool_name ); let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::VolumeList = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(list::Error::DefaultResponse { status_code }), } } pub mod list { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, account_name: &str, pool_name: &str, volume_name: &str, ) -> std::result::Result<models::Volume, get::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.NetApp/netAppAccounts/{}/capacityPools/{}/volumes/{}", operation_config.base_path(), subscription_id, resource_group_name, account_name, pool_name, volume_name ); let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::Volume = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(get::Error::DefaultResponse { status_code }), } } pub mod get { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn create_or_update( operation_config: &crate::OperationConfig, body: &models::Volume, subscription_id: &str, resource_group_name: &str, account_name: &str, pool_name: &str, volume_name: &str, ) -> std::result::Result<create_or_update::Response, create_or_update::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.NetApp/netAppAccounts/{}/capacityPools/{}/volumes/{}", operation_config.base_path(), subscription_id, resource_group_name, account_name, pool_name, volume_name ); let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(create_or_update::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(body).map_err(create_or_update::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(create_or_update::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::Volume = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(create_or_update::Response::Ok200(rsp_value)) } http::StatusCode::CREATED => { let rsp_body = rsp.body(); let rsp_value: models::Volume = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(create_or_update::Response::Created201(rsp_value)) } http::StatusCode::ACCEPTED => Ok(create_or_update::Response::Accepted202), status_code => Err(create_or_update::Error::DefaultResponse { status_code }), } } pub mod create_or_update { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200(models::Volume), Created201(models::Volume), Accepted202, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn update( operation_config: &crate::OperationConfig, body: &models::VolumePatch, subscription_id: &str, resource_group_name: &str, account_name: &str, pool_name: &str, volume_name: &str, ) -> std::result::Result<update::Response, update::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.NetApp/netAppAccounts/{}/capacityPools/{}/volumes/{}", operation_config.base_path(), subscription_id, resource_group_name, account_name, pool_name, volume_name ); let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PATCH); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(update::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(body).map_err(update::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::Volume = serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(update::Response::Ok200(rsp_value)) } http::StatusCode::ACCEPTED => Ok(update::Response::Accepted202), status_code => Err(update::Error::DefaultResponse { status_code }), } } pub mod update { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200(models::Volume), Accepted202, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn delete( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, account_name: &str, pool_name: &str, volume_name: &str, ) -> std::result::Result<delete::Response, delete::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.NetApp/netAppAccounts/{}/capacityPools/{}/volumes/{}", operation_config.base_path(), subscription_id, resource_group_name, account_name, pool_name, volume_name ); let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(delete::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202), http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204), status_code => Err(delete::Error::DefaultResponse { status_code }), } } pub mod delete { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Accepted202, NoContent204, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn revert( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, account_name: &str, pool_name: &str, volume_name: &str, body: &models::VolumeRevert, ) -> std::result::Result<revert::Response, revert::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.NetApp/netAppAccounts/{}/capacityPools/{}/volumes/{}/revert", operation_config.base_path(), subscription_id, resource_group_name, account_name, pool_name, volume_name ); let mut url = url::Url::parse(url_str).map_err(revert::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(revert::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(body).map_err(revert::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(revert::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(revert::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(revert::Response::Ok200), http::StatusCode::ACCEPTED => Ok(revert::Response::Accepted202), status_code => Err(revert::Error::DefaultResponse { status_code }), } } pub mod revert { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200, Accepted202, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn break_replication( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, account_name: &str, pool_name: &str, volume_name: &str, ) -> std::result::Result<break_replication::Response, break_replication::Error> { let http_client = operation_config.http_client(); let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.NetApp/netAppAccounts/{}/capacityPools/{}/volumes/{}/breakReplication" , operation_config . base_path () , subscription_id , resource_group_name , account_name , pool_name , volume_name) ; let mut url = url::Url::parse(url_str).map_err(break_replication::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(break_replication::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(break_replication::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(break_replication::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(break_replication::Response::Ok200), http::StatusCode::ACCEPTED => Ok(break_replication::Response::Accepted202), status_code => Err(break_replication::Error::DefaultResponse { status_code }), } } pub mod break_replication { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200, Accepted202, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn replication_status( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, account_name: &str, pool_name: &str, volume_name: &str, ) -> std::result::Result<models::ReplicationStatus, replication_status::Error> { let http_client = operation_config.http_client(); let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.NetApp/netAppAccounts/{}/capacityPools/{}/volumes/{}/replicationStatus" , operation_config . base_path () , subscription_id , resource_group_name , account_name , pool_name , volume_name) ; let mut url = url::Url::parse(url_str).map_err(replication_status::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(replication_status::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(replication_status::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(replication_status::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::ReplicationStatus = serde_json::from_slice(rsp_body) .map_err(|source| replication_status::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(replication_status::Error::DefaultResponse { status_code }), } } pub mod replication_status { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn resync_replication( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, account_name: &str, pool_name: &str, volume_name: &str, ) -> std::result::Result<resync_replication::Response, resync_replication::Error> { let http_client = operation_config.http_client(); let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.NetApp/netAppAccounts/{}/capacityPools/{}/volumes/{}/resyncReplication" , operation_config . base_path () , subscription_id , resource_group_name , account_name , pool_name , volume_name) ; let mut url = url::Url::parse(url_str).map_err(resync_replication::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(resync_replication::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(resync_replication::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(resync_replication::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(resync_replication::Response::Ok200), http::StatusCode::ACCEPTED => Ok(resync_replication::Response::Accepted202), status_code => Err(resync_replication::Error::DefaultResponse { status_code }), } } pub mod resync_replication { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200, Accepted202, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn delete_replication( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, account_name: &str, pool_name: &str, volume_name: &str, ) -> std::result::Result<delete_replication::Response, delete_replication::Error> { let http_client = operation_config.http_client(); let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.NetApp/netAppAccounts/{}/capacityPools/{}/volumes/{}/deleteReplication" , operation_config . base_path () , subscription_id , resource_group_name , account_name , pool_name , volume_name) ; let mut url = url::Url::parse(url_str).map_err(delete_replication::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(delete_replication::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(delete_replication::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(delete_replication::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(delete_replication::Response::Ok200), http::StatusCode::ACCEPTED => Ok(delete_replication::Response::Accepted202), status_code => Err(delete_replication::Error::DefaultResponse { status_code }), } } pub mod delete_replication { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200, Accepted202, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn authorize_replication( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, account_name: &str, pool_name: &str, volume_name: &str, body: &models::AuthorizeRequest, ) -> std::result::Result<authorize_replication::Response, authorize_replication::Error> { let http_client = operation_config.http_client(); let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.NetApp/netAppAccounts/{}/capacityPools/{}/volumes/{}/authorizeReplication" , operation_config . base_path () , subscription_id , resource_group_name , account_name , pool_name , volume_name) ; let mut url = url::Url::parse(url_str).map_err(authorize_replication::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(authorize_replication::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(body).map_err(authorize_replication::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(authorize_replication::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(authorize_replication::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(authorize_replication::Response::Ok200), http::StatusCode::ACCEPTED => Ok(authorize_replication::Response::Accepted202), status_code => Err(authorize_replication::Error::DefaultResponse { status_code }), } } pub mod authorize_replication { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200, Accepted202, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod snapshots { use super::{models, API_VERSION}; pub async fn list( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, account_name: &str, pool_name: &str, volume_name: &str, ) -> std::result::Result<models::SnapshotsList, list::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.NetApp/netAppAccounts/{}/capacityPools/{}/volumes/{}/snapshots", operation_config.base_path(), subscription_id, resource_group_name, account_name, pool_name, volume_name ); let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::SnapshotsList = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(list::Error::DefaultResponse { status_code }), } } pub mod list { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, account_name: &str, pool_name: &str, volume_name: &str, snapshot_name: &str, ) -> std::result::Result<models::Snapshot, get::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.NetApp/netAppAccounts/{}/capacityPools/{}/volumes/{}/snapshots/{}", operation_config.base_path(), subscription_id, resource_group_name, account_name, pool_name, volume_name, snapshot_name ); let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::Snapshot = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(get::Error::DefaultResponse { status_code }), } } pub mod get { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn create( operation_config: &crate::OperationConfig, body: &models::Snapshot, subscription_id: &str, resource_group_name: &str, account_name: &str, pool_name: &str, volume_name: &str, snapshot_name: &str, ) -> std::result::Result<create::Response, create::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.NetApp/netAppAccounts/{}/capacityPools/{}/volumes/{}/snapshots/{}", operation_config.base_path(), subscription_id, resource_group_name, account_name, pool_name, volume_name, snapshot_name ); let mut url = url::Url::parse(url_str).map_err(create::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(create::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(body).map_err(create::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(create::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(create::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::CREATED => { let rsp_body = rsp.body(); let rsp_value: models::Snapshot = serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?; Ok(create::Response::Created201(rsp_value)) } http::StatusCode::ACCEPTED => Ok(create::Response::Accepted202), status_code => Err(create::Error::DefaultResponse { status_code }), } } pub mod create { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Created201(models::Snapshot), Accepted202, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn update( operation_config: &crate::OperationConfig, body: &models::SnapshotPatch, subscription_id: &str, resource_group_name: &str, account_name: &str, pool_name: &str, volume_name: &str, snapshot_name: &str, ) -> std::result::Result<update::Response, update::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.NetApp/netAppAccounts/{}/capacityPools/{}/volumes/{}/snapshots/{}", operation_config.base_path(), subscription_id, resource_group_name, account_name, pool_name, volume_name, snapshot_name ); let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PATCH); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(update::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(body).map_err(update::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::Snapshot = serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(update::Response::Ok200(rsp_value)) } http::StatusCode::ACCEPTED => Ok(update::Response::Accepted202), status_code => Err(update::Error::DefaultResponse { status_code }), } } pub mod update { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200(models::Snapshot), Accepted202, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn delete( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, account_name: &str, pool_name: &str, volume_name: &str, snapshot_name: &str, ) -> std::result::Result<delete::Response, delete::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.NetApp/netAppAccounts/{}/capacityPools/{}/volumes/{}/snapshots/{}", operation_config.base_path(), subscription_id, resource_group_name, account_name, pool_name, volume_name, snapshot_name ); let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(delete::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(delete::Response::Ok200), http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202), http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204), status_code => Err(delete::Error::DefaultResponse { status_code }), } } pub mod delete { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200, Accepted202, NoContent204, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } }
#[derive(Debug, Clone)] pub struct SpherePacking { min: [f64; 3], max: [f64; 3], r: f64, current: [f64; 3], current_offset: f64, } impl SpherePacking { pub fn fit_n_in_box(n: u64, min: [f64; 3], max: [f64; 3]) -> Self { let range = [max[0] - min[0], max[1] - min[1], max[2] - min[2]]; let mut approx_r = (range[0] * range[1] + range[2]).powf(1.0 / 3.0); let epsilon = 1e-5 * approx_r; let mut min_r = epsilon; let mut max_r = 2.0 * approx_r; assert!(fit(range, min_r) >= n); while max_r - min_r > epsilon { if fit(range, approx_r) >= n { min_r = approx_r; } else { max_r = approx_r; } approx_r = 0.5 * (min_r + max_r); } let mut current = min; current[0] -= 2.0 * min_r; Self { min, max, r: min_r * 2.0, current, current_offset: 0.0, } } } impl Iterator for SpherePacking { type Item = [f64; 3]; fn next(&mut self) -> Option<[f64; 3]> { if self.current[0] + self.r <= self.max[0] { self.current[0] += self.r; return Some(self.current); } if self.current[1] + self.r <= self.max[1] { self.current[0] = self.min[0] + self.current_offset; self.current[1] += self.r; return Some(self.current); } if self.current[2] + pitch(self.r) <= self.max[2] { self.current_offset = if self.current_offset > 0.0 { 0.0 } else { 0.5 * self.r }; self.current[2] += pitch(self.r); self.current[0] = self.min[0] + self.current_offset; self.current[1] = self.min[1] + self.current_offset; return Some(self.current); } None } } pub fn pitch(r: f64) -> f64 { 6.0f64.sqrt() * r / 3.0 } fn fit(range: [f64; 3], r: f64) -> u64 { let p = pitch(r); (1 + (range[0] / (2.0 * r)).floor() as u64) * (1 + (range[1] / (2.0 * r)).floor() as u64) * (1 + (range[2] / (4.0 * p)) as u64) + ((range[0] - r) / (2.0 * r)).ceil() as u64 * ((range[1] - r) / (2.0 * r)).ceil() as u64 * ((range[2] - 2.0 * p) / (4.0 * p)).ceil() as u64 }
use crate::lbvrf::*; use crate::param::*; use crate::rand::RngCore; use crate::serde::Serdes; use crate::VRF; #[test] fn test_param_gen() { let p = LBVRF::paramgen([0; 32]); println!("{:?}", p); } #[test] fn test_hash_to_challenge() { let input = "this is a random input for testing"; let c = hash_to_challenge(input.as_ref()); let mut sum = 0; for e in c.coeff.iter() { assert!(*e <= 1 || *e >= -1, "coefficients out of range {}", *e); if *e != 0 { sum += 1; } } assert_eq!(sum, KAPPA) } #[test] fn test_lbvrf() { let seed = [0u8; 32]; // let mut rng = rand::thread_rng(); // let param = Param::init(&mut rng); let param: Param = <LBVRF as VRF>::paramgen(seed).unwrap(); let (pk, sk) = <LBVRF as VRF>::keygen(seed, param).unwrap(); let message = "this is a message that vrf signs"; let seed = [0u8; 32]; let proof = <LBVRF as VRF>::prove(message, param, pk, sk, seed).unwrap(); let mut buf: Vec<u8> = vec![]; assert!(proof.serialize(&mut buf).is_ok()); println!("{:?}", buf); let proof2 = <LBVRF as VRF>::Proof::deserialize(&mut buf[..].as_ref()).unwrap(); assert_eq!(proof, proof2); let res = <LBVRF as VRF>::verify(message, param, pk, proof).unwrap(); assert!(res.is_some()); assert_eq!(res.unwrap(), proof.v); } #[test] fn test_rs() { let mut rng = rand::thread_rng(); let mut pp_seed = [0u8; 32]; let mut key_seed = [0u8; 32]; let mut vrf_seed = [0u8; 32]; let mut t = 0; let total = 10; for _i in 0..total { rng.fill_bytes(&mut pp_seed); rng.fill_bytes(&mut key_seed); rng.fill_bytes(&mut vrf_seed); let param: Param = <LBVRF as VRF>::paramgen(pp_seed).unwrap(); let (pk, sk) = <LBVRF as VRF>::keygen(key_seed, param).unwrap(); let message = "this is a message that vrf signs"; let (proof, rs) = prove_with_rs(message, param, pk, sk, vrf_seed).unwrap(); t += rs; let res = <LBVRF as VRF>::verify(message, param, pk, proof).unwrap(); assert!(res.is_some()); assert_eq!(res.unwrap(), proof.v); } println!("rs times {} for {} vrfs", t, total); // assert!(false) }
use crate::config; use crate::util::*; use std::fs::File; pub fn run() { let name = if args_len() < 3 { get_input() } else { get_argument(2) }; match File::create(format!("{}{}{}", &config::directory(), name, config::EXTENSION)) { Ok(_) => println!("Note {} created.", name), Err(_) => println!("Failed to create note."), }; }
use crates_io_api::{SyncClient, Crate, Error, Sort, ListOptions}; use serde::{Deserialize}; use serde_json::{Deserializer}; use std::process::{Command, Stdio}; use std::io::Write; #[derive(Deserialize, Debug)] struct BuildMetadataTarget { kind: Vec<String>, crate_types: Vec<String>, name: String, } #[derive(Deserialize, Debug)] struct BuildMetadata { reason: String, package_id: String, target: Option<BuildMetadataTarget>, filenames: Option<Vec<String>>, executable: Option<String> } #[derive(Debug, PartialEq)] enum ArtifactType { Binary, RustLibrary, } #[derive(Debug)] struct Artifact { filename: String, kind: ArtifactType, size: u64, } #[derive(Debug)] struct CrateResult { deps: usize, artifacts: Vec<Artifact>, } impl std::fmt::Display for CrateResult { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { let libs: Vec<&Artifact> = self.artifacts.iter().filter(|a| a.kind == ArtifactType::RustLibrary).collect(); let bins: Vec<&Artifact> = self.artifacts.iter().filter(|a| a.kind == ArtifactType::Binary).collect(); let lib = libs.first().map(|a| a.size).unwrap_or(0); let bin = bins.first().map(|a| a.size).unwrap_or(0); let lib_str = match lib { 0 => " ".to_string(), _ => format!("{:>8.2}", lib as f64 / 1024. / 1024.), }; let bin_str = match bin { 0 => " ".to_string(), _ => format!("{:>8.2}", bin as f64 / 1024. / 1024.), }; let res = write!(f, " {:>6} \t {} \t {} ", self.deps, lib_str, bin_str); if libs.len() > 1 { let _ = write!(f, " WARNING: >1 lib"); } res } } #[derive(Debug)] struct PkgId { name: String, version: String, } fn top_crates(count: usize, category: Option<String>, filter_fn: Option<fn(&Crate) -> bool>) -> Result<Vec<Crate>, Error> { let mut remaining = count; let mut page = 1; let mut all_crates = Vec::<Crate>::with_capacity(count); while remaining > 0 { let client = SyncClient::new(); let options = ListOptions { sort: Sort::Downloads, per_page: 100, page: page, query: None, category: category.clone(), }; let mut crates = client.crates(options)?; crates.crates.retain(|c| match filter_fn { Some(f) => f(&c), None => true, }); all_crates.append(&mut crates.crates); remaining -= match remaining { x if x > 100 => 100, x => x, }; page += 1; } Ok(all_crates) } #[allow(dead_code)] fn search_crates(query: &str) -> Result<Vec<Crate>, Error> { let client = SyncClient::new(); let options = ListOptions { sort: Sort::Downloads, per_page: 100, page: 1, query: Some(query.to_string()), category: None, }; let crates = client.crates(options)?; Ok(crates.crates) } fn pkgid(crt: &Crate) -> Result<PkgId, std::io::Error> { let dir = format!("clone_{}", crt.id); let output = Command::new("cargo") .args(&["pkgid"]) .current_dir(&dir) .output() .expect("pkgid failed"); let output = String::from_utf8(output.stdout).expect("Unreadable pkgid output"); let substr = output.split("#").nth(1).expect("pkgid missing #"); let mut bits = substr.split(":"); Ok(PkgId { name: bits.next().expect("pkgid missing crate").trim().into(), version: bits.next().expect("pkgid missing version").trim().into(), }) } fn artifacts(_crt: &Crate, metadata: &str, pkgid: &PkgId) -> Result<Vec<Artifact>, std::io::Error> { let mut artifacts: Vec<Artifact> = vec!(); let pkgid_str = format!("{} {}", pkgid.name, pkgid.version); let stream = Deserializer::from_str(metadata).into_iter::<BuildMetadata>(); let mut meta_objs: Vec<BuildMetadata> = vec!(); for value in stream { let m: BuildMetadata = value.expect("Fail parsing metadata json"); if m.package_id.starts_with(&pkgid_str) && m.reason == "compiler-artifact" && m.target.is_some() { meta_objs.push(m); } } for m in &meta_objs { let target = m.target.as_ref().unwrap(); if target.kind.iter().any(|x| x == "bin") { if let Some(exe) = &m.executable { artifacts.push(Artifact { filename: exe.clone(), kind: ArtifactType::Binary, size: 0, }); } } if target.kind.iter().any(|x| x == "lib" || x == "rlib") { if let Some(filenames) = &m.filenames { for file in filenames { if file.ends_with(".rlib") { artifacts.push(Artifact { filename: file.clone(), kind: ArtifactType::RustLibrary, size: 0, }); } } } } } for mut arty in &mut artifacts { if let Ok(file) = std::fs::File::open(&arty.filename) { if let Ok(stat) = file.metadata() { arty.size = stat.len(); } } } Ok(artifacts) } fn analyze_crate(crt: &Crate) -> Result<CrateResult, std::io::Error> { let dir = format!("clone_{}", crt.id); let repo = crt.repository.as_ref().ok_or(std::io::ErrorKind::NotFound)?; // Always provide a username/password so git fails fast if one is required. let repo = repo.replace("https://", "https://dummy_user:dummy_password@"); let _result = Command::new("git") .args(&["clone", "--recursive", "--quiet", &repo, &dir]) .stdin(Stdio::null()) .stderr(Stdio::null()) .status() .expect("clone failed"); if !std::path::Path::new(&dir).exists() { return Err(std::io::ErrorKind::Other.into()); } let cargo_toml_path = format!("{}/Cargo.toml", dir); if !std::path::Path::new(&cargo_toml_path).exists() { return Err(std::io::ErrorKind::Other.into()); } let result = Command::new("cargo") .args(&["build", "--release", "--message-format=json"]) .current_dir(&dir) .stderr(Stdio::null()) .output() .expect("build failed"); if !result.status.success() { return Err(std::io::ErrorKind::Other.into()); } let metadata = String::from_utf8(result.stdout).expect("Unreadable pkgid output"); // $ cargo tree --no-indent -a |sort |uniq -c |sort -nr |wc -l let mut cargo_result = Command::new("cargo") .current_dir(&dir) .args(&["tree", "--no-indent", "--no-dev-dependencies", "-a"]) .stdin(Stdio::null()) .stdout(Stdio::piped()) .stderr(Stdio::null()) .spawn() .expect("tree failed"); let cargo_out = cargo_result.stdout.take().expect("Cargo tree stdout failed"); let mut sort_result = Command::new("sort") .current_dir(&dir) .stdin(Stdio::from(cargo_out)) .stdout(Stdio::piped()) .spawn() .expect("sort failed"); let sort_out = sort_result.stdout.take().expect("sort stdout failed"); let mut awk_result = Command::new("awk") .current_dir(&dir) .args(&["{print $1}"]) .stdin(Stdio::from(sort_out)) .stdout(Stdio::piped()) .spawn() .expect("awk failed"); let awk_out = awk_result.stdout.take().expect("awk stdout failed"); let mut uniq_result = Command::new("uniq") .current_dir(&dir) .args(&["-c"]) .stdin(Stdio::from(awk_out)) .stdout(Stdio::piped()) .spawn() .expect("uniq failed"); let uniq_out = uniq_result.stdout.take().expect("uniq stdout failed"); let sort2_result = Command::new("sort") .current_dir(&dir) .args(&["-nr"]) .stdin(Stdio::from(uniq_out)) .stdout(Stdio::piped()) .spawn() .expect("sort failed"); let output = sort2_result.wait_with_output().expect("sort failed"); let _ = cargo_result.wait(); let _ = sort_result.wait(); let _ = awk_result.wait(); let _ = uniq_result.wait(); let output = String::from_utf8(output.stdout).expect("Unreadable output"); // Subtract 1 for the root crate. let dep_count = match output.lines().count() { e if e > 0 => e - 1, _ => return Err(std::io::ErrorKind::Other.into()), }; let artifacts = artifacts(crt, &metadata, &pkgid(crt)?)?; Ok(CrateResult { deps: dep_count, artifacts: artifacts, }) } #[derive(Debug, Default)] struct Statistics { count: usize, mean: f64, median: f64, stddev: f64, max: usize, } #[derive(Debug)] struct BatchStatistics { deps: Statistics, libs: Statistics, bins: Statistics, } fn statistics(crates: &Vec<CrateResult>) -> Result<BatchStatistics, std::io::Error> { let deps: Vec<usize> = crates.iter().filter_map(|c| match c.deps { 0 => None, c => Some(c), }).collect(); let deps_f64: Vec<f64> = deps.iter().map(|v| *v as f64).collect(); let libs: Vec<u64> = crates.iter().filter_map(|c| { c.artifacts.iter().filter_map(|a| { match a.kind { ArtifactType::RustLibrary => Some(a.size), _ => None, } }).next() }).collect(); let libs_f64: Vec<f64> = libs.iter().map(|v| *v as f64).collect(); let bins: Vec<u64> = crates.iter().filter_map(|c| { c.artifacts.iter().filter_map(|a| { match a.kind { ArtifactType::Binary => Some(a.size), _ => None, } }).next() }).collect(); let bins_f64: Vec<f64> = bins.iter().map(|v| *v as f64).collect(); Ok(BatchStatistics { deps: Statistics { count: deps.len(), mean: statistical::mean(deps_f64.as_slice()), median: statistical::median(deps_f64.as_slice()), stddev: statistical::standard_deviation(deps_f64.as_slice(), None), max: *deps.iter().max().unwrap_or(&0), }, libs: Statistics { count: libs.len(), mean: statistical::mean(libs_f64.as_slice()), median: statistical::median(libs_f64.as_slice()), stddev: statistical::standard_deviation(libs_f64.as_slice(), None), max: *libs.iter().max().unwrap_or(&0) as usize, }, bins: Statistics { count: bins.len(), mean: statistical::mean(bins_f64.as_slice()), median: statistical::median(bins_f64.as_slice()), stddev: statistical::standard_deviation(bins_f64.as_slice(), None), max: *bins.iter().max().unwrap_or(&0) as usize, }, }) } fn analyze(crates: Vec<Crate>) { let blacklist = [ "rustc-ap-rustc_cratesio_shim", // all of rust compiler "rustc-ap-rustc_target", "rustc-ap-serialize", "rustc-ap-rustc_data_structures", "rustc-ap-syntax_pos", "rustc-ap-syntax", "rustc-ap-rustc_errors", // these are identical to rls-analysis "rls-data", "rls-span", "rls-vfs", // these are identical to actix-http "actix-files", "actix-http-test", "actix-web", "actix-web-httpauth", // identical to winapi "winapi-build", "winapi-i686-pc-windows-gnu", "winapi-x86_64-pc-windows-gnu", // identical to rand "rand_xorshift", "rand_pcg", "rand_os", "rand_jitter", "rand_isaac", "rand_hc", "rand_core", "rand_chacha", // identical to wayland-client "wayland-commons", "wayland-kbd", "wayland-protocols", "wayland-scanner", "wayland-server", "wayland-window", // identical to clone_tokio "clone_tokio-codec", "clone_tokio-core", "clone_tokio-curl", "clone_tokio-current-thread", "clone_tokio-executor", "clone_tokio-fs", "clone_tokio-io", "clone_tokio-proto", "clone_tokio-reactor", "clone_tokio-service", "clone_tokio-signal", "clone_tokio-sync", "clone_tokio-tcp", "clone_tokio-threadpool", "clone_tokio-timer", "clone_tokio-tls", "clone_tokio-trace-core", "clone_tokio-tungstenite", "clone_tokio-udp", "clone_tokio-uds", ]; // Buckets of 1, up to 20 let mut buckets: [u8; 22] = [ 0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0, 0, 0, ]; let mut results: Vec<CrateResult> = vec!(); println!(""); println!("{:<32}: {:>6} \t {:>7} \t {:>7} ", "CRATE", "DEPS", "LIB (MB)", "BIN (MB)"); println!("{}", std::iter::repeat("-").take(73).collect::<String>()); let crate_count = crates.len(); for (idx,c) in crates.iter().enumerate() { if !blacklist.contains(&c.id.as_str()) { let progress = format!("[{:>3}/{}]", idx, crate_count); print!("{:<10} {:<21}: ", progress, c.id.chars().take(21).collect::<String>()); let _ = std::io::stdout().flush(); match analyze_crate(c) { Err(_) => { println!(""); }, Ok(res) => { println!("{}", res); match res.deps { e if e <= 20 => buckets[e] += 1, _ => buckets[21] += 1, } results.push(res); }, } } } let stats = statistics(&results).expect("failed to generate statistics"); println!(""); println!("Number of crates analyzed: {}", results.len()); println!(""); println!("Dependencies:"); println!(" count: {}", stats.deps.count); println!(" mean: {:.2} +/- {:.2}", stats.deps.mean, stats.deps.stddev); println!(" median: {:.2}", stats.deps.median); println!(" maximum: {}", stats.deps.max); println!(""); println!("Library size:"); println!(" count: {}", stats.libs.count); println!(" mean: {:.2} +/- {:.2} [{:.2} MB + / {:.2} MB]", stats.libs.mean, stats.libs.stddev, stats.libs.mean / 1024. / 1024., stats.libs.stddev / 1024. / 1024.); println!(" median: {:.2} [{:.2} MB]", stats.libs.median, stats.libs.median / 1024. / 1024.); println!(" maximum: {} [{:.2} MB]", stats.libs.max, stats.libs.max as f64 / 1024. / 1024.); println!(""); println!("Binary size:"); println!(" count: {}", stats.bins.count); println!(" mean: {:.2} +/- {:.2} [{:.2} MB + / {:.2} MB]", stats.bins.mean, stats.bins.stddev, stats.bins.mean / 1024. / 1024., stats.bins.stddev / 1024. / 1024.); println!(" median: {:.2} [{:.2} MB]", stats.bins.median, stats.bins.median / 1024. / 1024.); println!(" maximum: {} [{:.2} MB]", stats.bins.max, stats.bins.max as f64 / 1024. / 1024.); println!(""); println!("Dependency count histogram (buckets 0-20 by 1, 20+):"); for (i, count) in buckets.iter().enumerate() { let idx = match i { 21 => "> 20".to_string(), _ => format!("{:>4}", i), }; print!("{} ({:>5.1}%): ", idx, 100.0 * (*count as f64) / results.len() as f64); println!("{}", ['*'].iter().cycle().take(*count as usize).collect::<String>()); } // Buckets of 10, up to 200 let mut buckets: [u8; 21] = [ 0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0, 0, ]; for res in &results { match res.deps { e if e < 200 => buckets[e / 10] += 1, _ => buckets[20] += 1, } } println!(""); println!("Dependency count histogram (buckets 0-200 by 10, 200+):"); for (i, count) in buckets.iter().enumerate() { let idx = match i { 20 => " > 200".to_string(), _ => format!("{:>3} - {:>3}", 10*i, 10*(i+1)), }; print!("{} ({:>5.1}%): ", idx, 100.0 * (*count as f64) / results.len() as f64); println!("{}", ['*'].iter().cycle().take(std::cmp::min(50, *count as usize)).collect::<String>()); } println!(""); } fn main() { println!("========== 200 command-line-utilities crates =========="); let crates = top_crates(200, Some("command-line-utilities".into()), None).unwrap(); analyze(crates); println!("========== 100 graphics crates =========="); let crates = top_crates(100, Some("graphics".into()), None).unwrap(); analyze(crates); println!("========== 100 gui crates =========="); let crates = top_crates(100, Some("gui".into()), None).unwrap(); analyze(crates); println!("========== 100 web-programming crates =========="); let crates = top_crates(100, Some("web-programming".into()), None).unwrap(); analyze(crates); println!("========== Top 400 crates =========="); let crates = top_crates(400, None, None).unwrap(); analyze(crates); }
extern crate percent_encoding; use percent_encoding::{utf8_percent_encode, AsciiSet, CONTROLS}; const FRAGMENT: &AsciiSet = &CONTROLS.add(b' ').add(b'"').add(b'<').add(b'>').add(b'`'); pub fn construct_startpage_search_url(query: &str) -> String { let encoded_query = utf8_percent_encode(query, FRAGMENT).to_string(); let startpage_search_url = format!("https://startpage.com/search?q={}", encoded_query); startpage_search_url } #[cfg(test)] mod tests { use super::*; #[test] fn test_construct_startpage_search_url() { let fake_query = "hello"; assert_eq!(construct_startpage_search_url(fake_query), "https://startpage.com/search?q=hello"); } #[test] fn test_construct_startpage_search_url_with_encoding() { let fake_query = "hello world"; assert_eq!(construct_startpage_search_url(fake_query), "https://startpage.com/search?q=hello%20world"); } }
extern crate minigrep_v2; use minigrep_v2::Config; #[test] fn performs_case_sensitive_search() { assert_eq!( minigrep_v2::run(Config { query: String::from("to"), path: String::from("poem.txt"), case_sensitive: true, }).unwrap(), vec![ String::from("Are you nobody, too?"), String::from("How dreary to be somebody!"), ] ) } #[test] fn performs_case_insensitive_search() { assert_eq!( minigrep_v2::run(Config { query: String::from("to"), path: String::from("poem.txt"), case_sensitive: false, }).unwrap(), vec![ String::from("Are you nobody, too?"), String::from("How dreary to be somebody!"), String::from("To tell your name the livelong day"), String::from("To an admiring bog!"), ] ) }
/// Module to attack substitution cipher texts. /// /// This module uses a word patter matching method to guess probable key used to cipher /// a text using substitution algorithm. /// /// You should be aware that to be successful charset used for attack should be the /// same used to cipher. Besides, this module tries to guess if deciphered text is /// the good one comparing it with words from a language dictionary. If original /// message was in a language you don't have a dictionary for, then correct key /// won/'t be detected. use crate::{ErrorKind, Result, ResultExt, Error}; use crate::attack::dictionaries::{get_words_from_text, Dictionary, get_word_pattern}; use crate::FromStr; use crate::cipher::substitution::decipher; use std::collections::{HashMap, HashSet}; // use std::convert::From; use std::fmt::Debug; // use std::fmt; // use std::iter::FromIterator; use rayon::prelude::*; /// Creates a mapping instance using a content description similar to python dicts. /// /// For instance: /// ```rust /// let mut current_mapping = mapping!(TEST_CHARSET, /// {"1" : {"a", "b"}, /// "2" : {"c"}, /// "3" : {"d"}, /// "4" : {"d", "f"}, /// "5" : {"c", "h"}}); /// ``` /// /// # Parameters: /// * charset: Charset used for substitution method. Both ends, ciphering /// and deciphering, should use the same charset or original text won't be properly /// recovered. /// * content: Python dict like structure whose keys are cipherletters and values are python /// set like lists with letter candidates. /// /// # Returns: /// * A Mapping instance loaded with mapping dict content. macro_rules! mapping { ( $charset:expr , { $($key:tt : {$($value:tt), +}), + } ) => { { let mut mapping_content: HashMap<char, Option<HashSet<char>>> = HashMap::new(); $( let values_list = vec![$(char::fromStr($value)), +]; let values_iter = values_list.into_iter(); mapping_content.insert(char::fromStr($key), Some(HashSet::from_iter(values_iter))); )+ let mapping = Mapping::new(&mapping_content, $charset); mapping } }; ( $charset:expr , { $($key:tt : {}), + } ) => { { let mut mapping_content: HashMap<char, Option<HashSet<char>>> = HashMap::new(); $( mapping_content.insert(char::fromStr($key), None); )+ let mapping = Mapping::new(&mapping_content, $charset); mapping } }; } // trait FromStr<T> { // fn fromStr(s: T) -> Self; // } // // impl FromStr<&str> for char { // fn fromStr(s: &str) -> Self { // s.chars().next().expect(format!("Could not create char from given string: {}", s).as_str()) // } // } /// Get substitution ciphered text key. /// /// Uses a word pattern matching technique to identify used language. /// /// **You should not use this function. Use *hack_substitution_mp* instead.** This /// function is slower than *mp* one because is sequential while the other uses a /// multiprocessing approach. This function only stay here to allow comparisons /// between sequential and multiprocessing approaches. /// /// # Parameters: /// * ciphered_text: Text to be deciphered. /// * charset: Charset used for substitution method. Both ends, ciphering /// and deciphering, should use the same charset or original text won't be properly /// recovered. /// * database_path: Absolute pathname to database file. Usually you don't /// set this parameter, but it is useful for tests. /// /// # Returns: /// * A tuple with substitution key found and success probability. pub fn hack_substitution<T, U>(ciphered_text: T, charset: U) -> Result<(String, f64)> where T: AsRef<str>, U: AsRef<str> { let ciphered_words = get_words_from_text(&ciphered_text); let available_languages = Dictionary::get_dictionaries_names() .chain_err(|| ErrorKind::DatabaseError(String::from("We could not get dictionaries names.")))?; let mut keys_found: HashMap<String, f64> = HashMap::new(); for language in available_languages { let language_probabilities = get_keys_probabilities(&ciphered_text, &charset, &ciphered_words, &language)?; language_probabilities.iter().for_each(|(key, value)| { match keys_found.get(key) { Some(previous_value) => { if value > previous_value { keys_found.insert(key.clone(), *value); } }, None => { keys_found.insert(key.clone(), *value); } } }); } let (best_key, best_probability) = get_best_key(&keys_found); Ok((best_key, best_probability)) } fn get_keys_probabilities<T, U>(ciphered_text: &T, charset: &U, ciphered_words: &HashSet<String>, language: &String) -> Result<HashMap<String, f64>> where T: AsRef<str>, U: AsRef<str> { let (possible_mappings, _) = get_possible_mappings(&language, &ciphered_words, &charset)?; let language_keys = assess_candidate_keys(&ciphered_text, &language, &possible_mappings, &charset)?; Ok(language_keys) } /// Get substitution ciphered text key. /// /// Uses a word pattern matching technique to identify used language. /// /// **You should use this function instead of *hack_substitution*.** /// /// Whereas *hack_substitution* uses a sequential approach, this function uses /// multiprocessing to improve performance. /// /// # Parameters: /// * ciphered_text: Text to be deciphered. /// * charset: Charset used for substitution method. Both ends, ciphering /// and deciphering, should use the same charset or original text won't be properly /// recovered. /// * database_path: Absolute pathname to database file. Usually you don't /// set this parameter, but it is useful for tests. /// /// # Returns: /// * A tuple with substitution key found and success probability. // pub fn hack_substitution_mp<T, U>(ciphered_text: T, charset: U) -> Result<(String, f64)> // where T: AsRef<str> + std::marker::Sync, // U: AsRef<str> + std::marker::Sync { pub fn hack_substitution_mp(ciphered_text: &str, charset: &str) -> Result<(String, f64)> { let ciphered_words = get_words_from_text(&ciphered_text); let available_languages = Dictionary::get_dictionaries_names() .chain_err(|| ErrorKind::DatabaseError(String::from("We could not get dictionaries names.")))?; let mut keys_found: HashMap<String, f64> = HashMap::new(); let languages_probabilities: Vec<Result<HashMap<String, f64>>> = available_languages.par_iter().map(|language| get_keys_probabilities(&ciphered_text, &charset, &ciphered_words, &language)).collect(); for language_probability in languages_probabilities { let probabilities = language_probability?; keys_found.extend(probabilities.into_iter()); } let (best_key, best_probability) = get_best_key(&keys_found); Ok((best_key, best_probability)) } /// Get every possible mapping for given ciphered words in given language. /// /// # Parameters: /// * language: Language to compare with ciphered words. /// * ciphered_words: Words whose patterns needs to be compared with those from language dictionary. /// * charset: Charset used for substitution method. Both ends, ciphering /// and deciphering, should use the same charset or original text won't be properly /// recovered. /// /// # Returns: /// * Tuple with a Vec of possible mapping found and a string with language name where those /// mappings where found. fn get_possible_mappings<T, U, V>(language: T, ciphered_words: &HashSet<U>, charset: V) -> Result<(Vec<Mapping>, String)> where T: AsRef<str>, U: AsRef<str>, V: AsRef<str> { let mut global_mapping = generate_language_mapping(&language, ciphered_words, &charset) .chain_err(|| "Error generating language mapping.")?; global_mapping.clean_redundancies(); let possible_mappings = global_mapping.get_possible_mappings(); Ok((possible_mappings, language.as_ref().to_string())) } /// Generate a mapping with all letter candidates in given language for every cipherletter. /// /// # Parameters: /// * language: Language to look letter candidates into. /// * ciphered_words: Every cipherword in message. /// * charset: Charset used for substitution. Both ends, ciphering /// and deciphering, should use the same charset or original text won't be properly /// recovered. /// /// # Returns: /// * Mapping loaded with all candidates in given language. fn generate_language_mapping<T, U, V>(language: T, ciphered_words: &HashSet<U>, charset: V) -> Result<Mapping> where T: AsRef<str>, U: AsRef<str>, V: AsRef<str> { let mut language_mapping = Mapping::new_empty(&charset); let dictionary = Dictionary::new(&language, false)?; for ciphered_word in ciphered_words { let word_mapping = get_word_mapping(&charset, ciphered_word, &dictionary)?; language_mapping.reduce_mapping(&word_mapping); } Ok(language_mapping) } /// Create a mapping with characters candidates for given ciphered word. /// /// # Parameters: /// * charset: Charset used for substitution method. Both ends, ciphering /// and deciphering, should use the same charset or original text won't be properly /// recovered. /// * ciphered_word: Ciphered word used to find words with similar patterns. /// * dictionary: Dictionary to extract from words with the same pattern than ciphered word. /// /// # Returns: /// * A Mapping class instance. fn get_word_mapping<T, U>(charset: T, ciphered_word: U, dictionary: &Dictionary) -> Result<Mapping> where T: AsRef<str>, U: AsRef<str> { let mut word_mapping = Mapping::new_empty(&charset); let ciphered_word_pattern: String = get_word_pattern(&ciphered_word); let word_candidates = dictionary.get_words_with_pattern(&ciphered_word_pattern) .chain_err(|| ErrorKind::NoMappingAvailable(ciphered_word.as_ref().to_string(), dictionary.language.clone()))?; for (index, char) in ciphered_word.as_ref().chars().enumerate() { // let char_string = char.to_string(); for word_candidate in word_candidates.iter() { if let Some(selected_char) = word_candidate.chars().nth(index) { word_mapping.add(char, selected_char); // word_mapping.add(&char.to_string(), selected_char.to_string()); } } } Ok(word_mapping) } /// Assess every possible mapping and get how many recovered words are identifiable /// in any language dictionary. /// /// # Parameters: /// * ciphered_text: Text to be deciphered. /// * language: Language to compare with recovered words. /// * possible_mappings: Possible cipherletter mappings for given text. /// * charset: Charset used for substitution method. Both ends, ciphering /// and deciphering, should use the same charset or original text won't be properly /// recovered. /// /// # Returns: /// * A HashMap whose keys are tested keys and values are a 0 to 1 float with /// comparison sucess for given language. 1 means every deciphered word using /// tested key can be found in given language dictionary. fn assess_candidate_keys<T, U, V>(ciphered_text: T, language: U, possible_mappings: &Vec<Mapping>, charset: V) -> Result<HashMap<String, f64>> where T: AsRef<str>, U: AsRef<str>, V: AsRef<str> { let mut keys_found: HashMap<String, f64> = HashMap::new(); for possible_mapping in possible_mappings { match assess_possible_mapping(possible_mapping, &language, &ciphered_text, &charset) { Ok((key, probability)) => { keys_found.insert(key, probability); }, Err(e) => match e { Error(ErrorKind::WrongKeyLength(_, _), _) => continue, Error(ErrorKind::WrongKeyRepeatedCharacters(_), _) => continue, error => bail!(error) } }; } Ok(keys_found) } /// Convert mapping to a substitution key and check if that key deciphers messages in words /// from any know dictionary. /// /// # Parameters: /// * possible_mapping: Mapping reduced to maximum. /// * language: Language to compare with recovered words. /// * ciphered_text: Text to be deciphered. /// * charset: Charset used for substitution method. Both ends, ciphering /// and deciphering, should use the same charset or original text won't be properly /// recovered. /// /// # Returns: /// * A tuple with key generated from given mapping and a 0 to 1 float with /// comparison success for given language. 1 means every deciphered word using /// tested key can be found in given language dictionary. fn assess_possible_mapping<T, U, V>(possible_mapping: &Mapping, language: T, ciphered_text: U, charset: V) -> Result<(String, f64)> where T: AsRef<str>, U: AsRef<str>, V: AsRef<str> { let key = possible_mapping.generate_key_string(); let success = assess_substitution_key(&ciphered_text, &key, &language, &charset)?; Ok((key, success)) } /// Decipher text with given key and try to find out if returned text can be identified with given /// language. /// /// If given key does not comply with coherence rules then it is silently discarded /// returning 0. /// /// # Parameters: /// * ciphered_text: Text to be deciphered. /// * key: Key to decipher *ciphered_text*. /// * language: Language to compare got text. /// * charset: Charset used for substitution. Both ends, ciphering /// and deciphering, should use the same charset or original text won't be properly /// recovered. /// # Returns: /// * Float from 0 to 1. The higher the frequency of presence of words in language /// the higher of this probability. fn assess_substitution_key<T, U, V, W>(ciphered_text: T, key: U, language: V, charset: W) -> Result<f64> where T: AsRef<str>, U: AsRef<str>, V: AsRef<str>, W: AsRef<str> { let recovered_text = decipher(ciphered_text.as_ref(), key.as_ref(), charset.as_ref())?; let words = get_words_from_text(&recovered_text); let frequency = get_candidates_frequency_at_language(&words, &language); frequency } /// Get frequency of presence of words in given language. /// /// # Parameters: /// * words: Text words. /// * language: Language you want to look into. /// /// # Returns: /// * Float from 0 to 1. The higher the frequency of presence of words in language /// the higher of this probability. fn get_candidates_frequency_at_language<T>(words: &HashSet<String>, language: T) -> Result<f64> where T: AsRef<str> { let dictionary = Dictionary::new(language.as_ref(), false)?; let frequency = dictionary.get_words_presence(&words); Ok(frequency) } /// Get key with maximum probability /// /// # Parameters: /// * keys_found: Dict with cipher keys as dict keys and their corresponding probabilities as float values. /// /// # Returns: /// * Tuple with best key and its corresponding probability. fn get_best_key(keys_found: &HashMap<String, f64>)-> (String, f64){ let mut best_probability: f64 = 0.0; let mut best_key = String::new(); for (key, value) in keys_found { if *value > best_probability { best_probability = *value; best_key = key.clone(); } } (best_key, best_probability) } /// Type to manage possible candidates to substitute every cipherletter in charset. /// /// You can use it as a dict whose keys are letters and values are sets with substitution /// letters candidates. #[derive(Debug)] struct Mapping { mapping: HashMap<char, Option<HashSet<char>>>, charset: String } impl Mapping { /// Create empty mapping for cipher letters /// /// # Parameters: /// * charset: Charset used for substitution method. Both ends, ciphering /// and deciphering, should use the same charset or original text won't be properly /// recovered. fn init_mapping(&mut self){ for char in self.charset.chars() { self.mapping.insert(char, None); } } /// Create a mapping with all character mappings empty. /// /// # Parameter: /// * charset: Charset used for substitution method. Both ends, ciphering /// and deciphering, should use the same charset or original text won't be properly /// recovered. /// /// # Returns: /// * An empty Mapping instance. pub fn new_empty<T>(charset: T) -> Self where T: AsRef<str> { let mut mapping = Self { mapping: HashMap::new(), charset: charset.as_ref().to_string() }; mapping.init_mapping(); mapping } /// Create a mapping loaded with given mapping dict. /// /// # Parameters: /// * mapping_dict: Content to load. /// * charset: Charset used for substitution method. Both ends, ciphering /// and deciphering, should use the same charset or original text won't be properly /// recovered. /// /// # Returns: /// * A Mapping instance loaded with mapping dict content. pub fn new<T>(mapping_dict: &HashMap<char, Option<HashSet<char>>>, charset: T)-> Self where T: AsRef<str> { let mut mapping = Self::new_empty(charset); mapping.load_content(mapping_dict); mapping } /// Populates this mapping using a HashMap. /// /// HashMaps's keys are cipherletters and values are sets of mapping char candidates. /// /// Given mapping should use the same charset as this one. Differing cipherletters /// will be discarded. /// /// # Parameters: /// * mapping_dict: Content to load. fn load_content(&mut self, mapping_dict: &HashMap<char, Option<HashSet<char>>>) { let keys_list: Vec<&char> = mapping_dict.keys().map(|x| x).collect(); for (key, value) in mapping_dict.iter() { if keys_list.contains(&key){ match value { Some(mapping_set) => { let mapping_set_clone: HashSet<char> = mapping_set.iter().map(|x| *x).collect(); self.mapping.insert(*key, Some(mapping_set_clone)); }, None => { } } } } } /// Get current mapping content. /// /// # Returns: /// * Dict's keys are cipherletters and values are sets of mapping char candidates. fn get_current_content(&self)-> &HashMap<char, Option<HashSet<char>>> { &self.mapping } /// Get this mapping cipherletters. /// /// # Returns: /// * A list with cipherletters registered in this mapping. fn cipherletters(&self)-> Vec<char>{ let cipherletters_list: Vec<char> = self.mapping.keys().cloned().collect(); cipherletters_list } /// Generate an string to be used as a substitution key. /// /// If any cipherletter has no substitutions alternative then the same cipherletter /// is used for substitution. Also, be aware that first candidate for every /// cipherletter will be chosen so use this method when mapping is completely /// reduced. /// /// # Returns: /// * Generated key string. fn generate_key_string(&self)-> String { let mut key_list: Vec<char> = Vec::new(); for clear_char in self.charset.chars() { let mut char_found = false; for (&key, value_set) in self.mapping.iter() { match value_set { Some(set) => { // Use this method with already reduced mappings because only // first element of every set will be taken. let value = set.get_first_element().unwrap(); if value == clear_char { char_found = true; key_list.push(key); break; } }, None => continue } } if !char_found { key_list.push(clear_char); } } let mut string_to_return = String::new(); key_list.iter().for_each(|x| string_to_return.push_str(x.to_string().as_str())); string_to_return } /// Return every possible mapping from an unresolved mapping. /// /// An unresolved mapping is one that has more than one possibility in any of /// its chars. /// /// # Parameters: /// * mapping: A character mapping. /// /// # Returns: /// * A list of mapping candidates. fn get_possible_mappings(&self)-> Vec<Mapping> { self._get_possible_mappings(None) } /// Utility recursive method used by get_possible_mappings(). /// /// # Parameters: /// * mapping: A character mapping. /// /// # Returns: /// * A list of mapping candidates. fn _get_possible_mappings(&self, mapping: Option<&Mapping>)-> Vec<Mapping> { let mut mapping_list: Vec<Mapping> = Vec::new(); let mut step_mapping = match mapping { None => Mapping::new(self.get_current_content(), &self.charset), Some(start_mapping) => start_mapping.clone() }; if let Ok((char, candidates)) = step_mapping.pop_item() { let partial_mappings = self._get_possible_mappings(Some(&mut step_mapping)); match candidates { Some(set) => { for candidate in set.iter() { for partial_mapping in partial_mappings.iter() { let mut current_mapping = Mapping::new_empty(&self.charset); current_mapping.add(char, *candidate); current_mapping.load_content(partial_mapping.get_current_content()); mapping_list.push(current_mapping); } } }, None => { for partial_mapping in partial_mappings.iter() { let char_string = char.to_string(); let char_str = char_string.as_str(); let mut current_mapping = mapping!(&self.charset, {char_str : {}}); current_mapping.load_content(partial_mapping.get_current_content()); mapping_list.push(current_mapping); } } }; return mapping_list } else { return vec![Mapping::new_empty(&self.charset),]; } } /// Apply given word mapping to reduce this mapping. /// /// # Parameters: /// * word_mapping: Partial mapping for an individual word. fn reduce_mapping(&mut self, word_mapping: &Mapping) { for cipherletter in self.cipherletters() { // Unwrap here is safe because we are using cipherletters. if let Some(set) = self.get(cipherletter).unwrap() { // Previous candidates present for cipherletter so reducing needed. if let Some(word_cipherletters_mapping_option) = word_mapping.get(cipherletter) { match word_cipherletters_mapping_option { Some(word_cipherletter_mapping) => { let new_candidates_set: HashSet<char> = set.intersection(word_cipherletter_mapping).map(|x| *x).collect(); self.set(cipherletter, Some(new_candidates_set)); }, None => {} }; } } else { // No previous candidates present for cipherletter so just copy word mapping. if let Some(word_cipherletters_mapping_option) = word_mapping.get(cipherletter) { match word_cipherletters_mapping_option { Some(word_cipherletter_mapping) => { self.set(cipherletter, Some(word_cipherletter_mapping.clone())); }, None => {} }; } } } } /// Remove redundancies from mapping. /// /// If any cipherletter has been reduced to just one candidate, then that /// candidate should not be in any other cipherletter. Leaving it would produce /// an inconsistent deciphering key with repeated characters. pub fn clean_redundancies(&mut self){ let candidates_to_remove: Vec<char> = self.mapping.values() .filter(|&x| if let Some(set) = x { if set.len() == 1 { true } else { false } } else { false }) .map(|x| { let set: &HashSet<char> = x.as_ref().unwrap(); // Unwrap is not dangerous here because we filtered to be sure set has at least 1 element. set.get_first_element().unwrap() }) .collect(); let keys_to_check: Vec<char> = self.mapping.keys().cloned() .filter(|x| if let Some(Some(set)) = self.mapping.get(x) { if set.len() > 1 { true } else { false } } else { false }) .collect(); for key_to_check in keys_to_check { let set_option = self.mapping.get_mut(&key_to_check).unwrap(); let set = set_option.as_mut().unwrap(); set.retain(|x| !candidates_to_remove.contains(x)) } } /// Get candidates for given cipherletter. /// /// If the mapping did not have this cipherletter present, [`None`] is returned. /// /// # Parameters: /// * key: Cipherletter to get candidates from. /// /// # Returns: /// * Current candidates set or None if cipherletter is not present. fn get(&self, key: char) -> Option<&Option<HashSet<char>>> { self.mapping.get(&key) } /// Inserts a cipherletter-candidates pair into the mappping. /// /// If the mapping did not have this cipherletter present, [`None`] is returned, but key and /// value are inserted. /// /// If the mappping did have this cipherletter present, the value is updated, and the old /// value is returned. The key is not updated, though. /// /// # Parameters: /// * key: Cipherletter to update. /// * value: New value to insert. /// /// # Returns: /// * Old value or None if key was not found. fn set(&mut self, key: char, value: Option<HashSet<char>>) -> Option<Option<HashSet<char>>> { self.mapping.insert(key, value) } /// Remove and return a cipherletter and its candidates from current mapping. /// /// # Returns: /// * A tuple with selected cipherletter and its candidates. fn pop_item(&mut self) -> Result<(char, Option<HashSet<char>>)> { if self.mapping.keys().len() >= 1 { let cipherletters: Vec<char> = self.mapping.keys().cloned().take(1).collect(); if let Some(cipherletter) = cipherletters.get(0) { let set = self.mapping.remove(&cipherletter).unwrap(); Ok((*cipherletter, set)) } else { Err(ErrorKind::EmptyMapping.into()) } } else { Err(ErrorKind::EmptyMapping.into()) } } /// Insert a new candidate into an existing mapping. /// /// Whereas set() assigns an entire HashSet to cipherletter, this method only adds a new candidate /// to existing cipherletter. /// /// # Parameters: /// * key: Cipherletter to update. /// * value: Candidate to insert. // fn add<T, U>(&mut self, key: T, value: U) // where T: AsRef<str>, // U: AsRef<str> { fn add(&mut self, key: char, value: char) { let entry = self.mapping.entry(key).or_insert(None); match entry { Some(content) => { content.insert(value); }, None => { let mut new_content: HashSet<char> = HashSet::new(); new_content.insert(value); *entry = Some(new_content); } }; } /// Create a new set at given cipherletter just with one candidate. /// /// # Parameters: /// * key: Cipherletter to update. /// * value: Candidate to insert. fn create_new_single_entry(&mut self, key: char, value: char) { let mut new_candidates_set: HashSet<char> = HashSet::new(); new_candidates_set.insert(value); self.mapping.insert(key, Some(new_candidates_set)); } } impl PartialEq for Mapping { fn eq(&self, other: &Self) -> bool { if self.charset == other.charset && self.mapping == other.mapping { true } else { false } } } impl Clone for Mapping { fn clone(&self) -> Self { Self { mapping: self.mapping.clone(), charset: self.charset.clone() } } } trait Extractor { type Item; /// Get first N elements from collections. /// /// # Parameters: /// * n: How many elements to return. /// /// # Returns: /// * A list of elements. fn get_n_elements(&self, n: usize) -> Option<Vec<Self::Item>>; /// Get first element from collections. /// /// # Returns: /// * An element. fn get_first_element(&self) -> Option<Self::Item>; } impl Extractor for HashSet<char> { type Item = char; fn get_n_elements(&self, n: usize) -> Option<Vec<char>> { let mut returned_elements: Vec<char> = Vec::new(); for element in self.iter() { returned_elements.push(*element); if returned_elements.len() >= n { return Some(returned_elements); } } None } fn get_first_element(&self) -> Option<Self::Item> { if let Some(elements_list) = self.get_n_elements(1) { if let Some(first_element) = elements_list.get(0) { return Some(*first_element); } else { return None; } } None } } #[cfg(test)] pub mod tests { use super::*; use std::env; use std::fs::File; use std::time::Instant; use crate::attack::dictionaries::tests::LoadedDictionaries; use crate::cipher::substitution::{cipher, decipher}; use std::io::Read; use std::path::PathBuf; use std::iter::FromIterator; pub const TEST_CHARSET: &'static str = "abcdefghijklmnopqrstuvwxyz"; pub const TEST_KEY: &'static str = "lfwoayuisvkmnxpbdcrjtqeghz"; const TEST_CHARSET_SPANISH: &'static str = "abcdefghijklmnopqrstuvwxyzáéíóúñ"; const TEST_KEY_SPANISH: &'static str = "lfwoayuisvkmnxpbdcrjtqeghzñúóíéá"; pub const ENGLISH_TEXT_WITH_PUNCTUATIONS_MARKS: &'static str = "resources/english_book_c1.txt"; const SPANISH_TEXT_WITH_PUNCTUATIONS_MARKS: &'static str = "resources/spanish_book_c1.txt"; struct TestSet { text_file: &'static str, language: &'static str, key: &'static str, charset: &'static str } impl TestSet { fn new(text_file: &'static str, language: &'static str, key: &'static str, charset: &'static str)-> Self { Self { text_file, language, key, charset } } } /// Creates a candidates set valid to assigned to a Mapping key. /// /// For instance: /// ```rust /// let mut current_mapping = mapping!(TEST_CHARSET, /// {"1" : {"a", "b"}, /// "2" : {"c"}, /// "3" : {"d"}, /// "4" : {"d", "f"}, /// "5" : {"c", "h"}}); /// current_mapping["4"] = candidates!("r", "p", "x"); /// ``` /// /// # Parameters: /// * A list of &str chars to be included as candidates. /// /// # Returns: /// * A HashSet ready to be assigned to a Mapping key. macro_rules! candidates { ( $($value:tt), + ) => { { let mut candidates_set = HashSet::new(); $( candidates_set.insert(char::fromStr($value)); )+ Some(candidates_set) } }; } #[test] fn test_hack_substitution() { let test_sets = vec![ TestSet::new(ENGLISH_TEXT_WITH_PUNCTUATIONS_MARKS, "english", TEST_KEY, TEST_CHARSET), TestSet::new(SPANISH_TEXT_WITH_PUNCTUATIONS_MARKS, "spanish", TEST_KEY_SPANISH, TEST_CHARSET_SPANISH) ]; let loaded_dictionaries = LoadedDictionaries::new(); for set in test_sets { let text = get_text_to_cipher(&set); let ciphered_text = match cipher(&text, &set.key, &set.charset) { Ok(text) => text, Err(E) => {assert!(false, E); String::new()} }; let timer = Instant::now(); let found_key = hack_substitution(&ciphered_text, &set.charset) .expect("Error running hacking_substitution()."); assert_found_key(&found_key, &set.key, &ciphered_text, &text, &set.charset); println!("{}", format!("\n\nElapsed time with hack_substitution: {:.2} seconds.", timer.elapsed().as_secs_f64())); } } #[test] fn test_hack_substitution_mp() { let test_sets = vec![ TestSet::new(ENGLISH_TEXT_WITH_PUNCTUATIONS_MARKS, "english", TEST_KEY, TEST_CHARSET), TestSet::new(SPANISH_TEXT_WITH_PUNCTUATIONS_MARKS, "spanish", TEST_KEY_SPANISH, TEST_CHARSET_SPANISH) ]; let loaded_dictionaries = LoadedDictionaries::new(); for set in test_sets { let text = get_text_to_cipher(&set); let ciphered_text = match cipher(&text, &set.key, &set.charset) { Ok(text) => text, Err(E) => {assert!(false, E); String::new()} }; let timer = Instant::now(); let found_key = hack_substitution_mp(&ciphered_text, &set.charset) .expect("Error running hacking_substitution()."); assert_found_key(&found_key, &set.key, &ciphered_text, &text, &set.charset); println!("{}", format!("\n\nElapsed time with hack_substitution: {:.2} seconds.", timer.elapsed().as_secs_f64())); } } fn get_text_to_cipher(set: &TestSet) -> String { let mut text_file_pathname = match env::current_dir() { Ok(cwd) => cwd, Err(E) => {assert!(false, E); PathBuf::new()} }; text_file_pathname.push(set.text_file); let mut text_file = match File::open(&text_file_pathname) { Ok(file) => file, Err(E) => {assert!(false, E); File::create("/tmp").unwrap()} }; let mut text = String::new(); match text_file.read_to_string(&mut text) { Ok(_) => (), Err(E) => {assert!(false, E); ()} } text } fn assert_found_key<U, V, W, X>(found_key: &(String, f64), tested_key: U, ciphered_text: V, original_text: W, charset: X) where U: AsRef<str>, V: AsRef<str>, W: AsRef<str>, X: AsRef<str> { assert_eq!(found_key.0, tested_key.as_ref()); let deciphered_text = match decipher(ciphered_text.as_ref(), &found_key.0, charset.as_ref()) { Ok(text) => text, Err(E) => {assert!(false, E); String::new()} }; assert_eq!(deciphered_text, original_text.as_ref()); } #[test] fn test_clean_redundancies() { let mut current_mapping = mapping!(TEST_CHARSET, {"1" : {"a", "b"}, "2" : {"c"}, "3" : {"d"}, "4" : {"d", "f"}, "5" : {"c", "h"}}); let expected_mapping = mapping!(TEST_CHARSET, {"1" : {"a", "b"}, "2" : {"c"}, "3" : {"d"}, "4" : {"f"}, "5" : {"h"}}); current_mapping.clean_redundancies(); assert_eq!(expected_mapping, current_mapping) } #[test] fn test_generate_key_string() { let expected_keystring = "ABCDEFGHIJKLMNOPQRSTUVWXYZfghijfghijklmnopqrstuvwxyz"; let test_charset = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"; let mapping = mapping!(test_charset, {"f": {"a"}, "g": {"b"}, "h": {"c"}, "i": {"d"}, "j": {"e"}}); let returned_keystring = mapping.generate_key_string(); assert_eq!(expected_keystring, returned_keystring) } #[test] fn test_get_n_elements() { let mut set: HashSet<char> = HashSet::new(); set.insert(char::fromStr("a")); set.insert(char::fromStr("b")); set.insert(char::fromStr("c")); match set.get_n_elements(2) { Some(list) => { assert_eq!(list.len(), 2); }, None => { assert!(false, "No element was extracted."); } } } #[test] fn test_get_first_element() { let mut set: HashSet<char> = HashSet::new(); set.insert(char::fromStr("a")); match set.get_first_element() { Some(element) => { assert_eq!(element, char::fromStr("a")); }, None => { assert!(false, "No element was extracted."); } } } #[test] fn test_get_possible_mappings() { let mapping = mapping!(TEST_CHARSET, {"1": {"a", "b"}, "2": {"c"}, "3": {"d"}, "4": {"e", "f"}, "5": {"g", "h"}}); let expected_list = vec![ mapping!(TEST_CHARSET, {"1": {"a"}, "2": {"c"}, "3": {"d"}, "4": {"e"}, "5": {"g"}}), mapping!(TEST_CHARSET, {"1": {"a"}, "2": {"c"}, "3": {"d"}, "4": {"f"}, "5": {"g"}}), mapping!(TEST_CHARSET, {"1": {"b"}, "2": {"c"}, "3": {"d"}, "4": {"e"}, "5": {"g"}}), mapping!(TEST_CHARSET, {"1": {"b"}, "2": {"c"}, "3": {"d"}, "4": {"f"}, "5": {"g"}}), mapping!(TEST_CHARSET, {"1": {"a"}, "2": {"c"}, "3": {"d"}, "4": {"e"}, "5": {"h"}}), mapping!(TEST_CHARSET, {"1": {"a"}, "2": {"c"}, "3": {"d"}, "4": {"f"}, "5": {"h"}}), mapping!(TEST_CHARSET, {"1": {"b"}, "2": {"c"}, "3": {"d"}, "4": {"e"}, "5": {"h"}}), mapping!(TEST_CHARSET, {"1": {"b"}, "2": {"c"}, "3": {"d"}, "4": {"f"}, "5": {"h"}}), ]; let recovered_mappings = mapping.get_possible_mappings(); assert_eq!(expected_list.len(), recovered_mappings.len()); assert!(expected_list.iter().all(|_mapping| recovered_mappings.contains(&_mapping))); } #[test] fn test_get_possible_mappings_with_empties() { let THIS_TEST_CHARSET = "12345"; let mut mapping = mapping!(THIS_TEST_CHARSET, {"1": {"a", "b"}, "2": {"c"}, "3": {"d"}, "4": {"e", "f"}, "5": {"g", "h"}}); mapping.set(char::fromStr("0"), None); let mut expected_mapping_1 = mapping!(THIS_TEST_CHARSET, {"1": {"a"}, "2": {"c"}, "3": {"d"}, "4": {"e"}, "5": {"g"}}); let mut expected_mapping_2 = mapping!(THIS_TEST_CHARSET, {"1": {"a"}, "2": {"c"}, "3": {"d"}, "4": {"f"}, "5": {"g"}}); let mut expected_mapping_3 = mapping!(THIS_TEST_CHARSET, {"1": {"b"}, "2": {"c"}, "3": {"d"}, "4": {"e"}, "5": {"g"}}); let mut expected_mapping_4 = mapping!(THIS_TEST_CHARSET, {"1": {"b"}, "2": {"c"}, "3": {"d"}, "4": {"f"}, "5": {"g"}}); let mut expected_mapping_5 = mapping!(THIS_TEST_CHARSET, {"1": {"a"}, "2": {"c"}, "3": {"d"}, "4": {"e"}, "5": {"h"}}); let mut expected_mapping_6 = mapping!(THIS_TEST_CHARSET, {"1": {"a"}, "2": {"c"}, "3": {"d"}, "4": {"f"}, "5": {"h"}}); let mut expected_mapping_7 = mapping!(THIS_TEST_CHARSET, {"1": {"b"}, "2": {"c"}, "3": {"d"}, "4": {"e"}, "5": {"h"}}); let mut expected_mapping_8 = mapping!(THIS_TEST_CHARSET, {"1": {"b"}, "2": {"c"}, "3": {"d"}, "4": {"f"}, "5": {"h"}}); let mut expected_list = vec![ expected_mapping_1, expected_mapping_2, expected_mapping_3, expected_mapping_4, expected_mapping_5, expected_mapping_6, expected_mapping_7, expected_mapping_8, ]; let recovered_mappings = mapping.get_possible_mappings(); assert_eq!(expected_list.len(), recovered_mappings.len()); let missing: Vec<Mapping> = expected_list.iter().cloned().filter(|_mapping| !recovered_mappings.contains(&_mapping)).collect(); assert!(expected_list.iter().all(|_mapping| recovered_mappings.contains(&_mapping))); } #[test] fn test_reduce_mapping() { let mut mapping = mapping!(TEST_CHARSET, {"1": {"a", "b"}, "2": {"c"}, "3": {"d"}, "4": {"e", "f", "g"}, "5": {"h"}}); let mapping_2 = mapping!(TEST_CHARSET, {"1": {"a"}, "2": {"c"}, "4": {"e", "g"}, "5": {"h"}}); let expected_reduced_mapping = mapping!(TEST_CHARSET, {"1": {"a"}, "2": {"c"}, "3": {"d"}, "4": {"e", "g"}, "5": {"h"}}); mapping.reduce_mapping(&mapping_2); assert_eq!(mapping, expected_reduced_mapping, "Mapping was not reduced as expected."); } #[test] fn test_mapping_get() { let mut mapping = mapping!(TEST_CHARSET, {"1": {"a", "b"}, "2": {"c"}, "3": {"d"}, "4": {"e", "f", "g"}, "5": {"h"}}); let content = mapping.get(char::fromStr("2")).unwrap().as_ref().expect("Error retrieving key."); let content_string = content.get_first_element().expect("Error retrieving content."); assert_eq!(char::fromStr("c"), content_string); } #[test] fn test_mapping_set() { let mut mapping = mapping!(TEST_CHARSET, {"1": {"a", "b"}, "2": {"c"}, "3": {"d"}, "4": {"e", "f", "g"}, "5": {"h"}}); mapping.set(char::fromStr("4"), candidates!("r", "t")); let content = mapping.get(char::fromStr("4")).unwrap().as_ref().expect("Error retrieving key."); let content_list = content.get_n_elements(2).expect("Error retrieving content."); assert!(vec!["r", "t"].iter().all(|candidate| content_list.contains(&char::fromStr(candidate)))); } #[test] fn test_mapping_add() { let mut mapping = mapping!(TEST_CHARSET, {"1": {"a", "b"}, "2": {"c"}, "3": {"d"}, "4": {"e", "f", "g"}, "5": {"h"}}); mapping.add(char::fromStr("4"), char::fromStr("x")); let content = mapping.get(char::fromStr("4")).unwrap().as_ref().expect("Error retrieving key."); let expected_length: usize = 4; assert_eq!(expected_length, content.len(), "Content has {} while we were expecting {}.", content.len(), expected_length); let content_list = content.get_n_elements(expected_length).expect("Error retrieving content."); assert!(vec!["e", "f", "g", "x"].iter().all(|candidate| content_list.contains(&char::fromStr(candidate)))); } #[test] fn test_popitem() { let mut mapping = mapping!(TEST_CHARSET, {"1": {"a", "b"}, "2": {"c"}, "3": {"d"}, "4": {"e", "f", "g"}, "5": {"h"}}); // Test correct item extraction. let original_content = mapping.get_current_content().clone(); let original_keys: Vec<&char> = original_content.keys().collect(); let (extracted_cipherletter, extracted_candidates) = mapping.pop_item() .expect("Error extracting item."); assert!(original_keys.contains(&&extracted_cipherletter), format!("Extracted key {} was not among original ones.", &extracted_cipherletter)); // Test extraction reduces length. let resulting_keys = mapping.cipherletters(); let original_keys_length = original_keys.len().to_string(); let resulting_keys_length = resulting_keys.len().to_string(); assert_eq!(resulting_keys.len(), original_keys.len() - 1, "Original keys length of {} is {} after pop", original_keys_length.as_str(), resulting_keys_length.as_str()); assert!(!resulting_keys.contains(&extracted_cipherletter), format!("Extracted cipherletter {} was not removed from mapping", &extracted_cipherletter)); // Test extraction from empty mapping generates an error. mapping = Mapping { mapping: HashMap::new(), charset: "".to_string() }; if let Err(E) = mapping.pop_item() { match Error::from(E) { Error(ErrorKind::EmptyMapping, _) => assert!(true), error => assert!(false, format!("Raised error was not the one \ we were expecting but {} instead", error)) } } else { assert!(false, "No error was raised when extracting from empty mapping.") } } }
use std::borrow::Cow; use std::sync::Arc; use command_data_derive::MenuCommand; use discorsd::{async_trait, BotState}; use discorsd::commands::*; use discorsd::errors::BotError; use discorsd::http::channel::{create_message, MessageChannelExt}; use discorsd::model::interaction::{ButtonPressData, GuildUser, InteractionUser, MenuSelectData}; use itertools::Itertools; use crate::avalon::characters::Character; use crate::avalon::config::AvalonConfig; use crate::Bot; #[derive(Debug, Clone, Copy)] pub struct SetupCommand; // fn message(config: &AvalonConfig) -> CreateMessage { // config.embed().into() // // create_message(|m| { // // let players_list = if config.players.is_empty() { // // "None".to_string() // // } else { // // config.players.iter().list_grammatically(|u| u.ping(), "and") // // }; // // // todo: list number of MoM/LS // // let roles_list = if config.roles.is_empty() { // // "None".to_string() // // } else { // // config.roles.iter().list_grammatically(|c| c.name().to_string(), "and") // // }; // // let content = format!( // // "**Avalon Setup**\n\ // // Players: {}\n\ // // Roles: {}", // // players_list, // // roles_list, // // ); // // m.content(content); // // }) // } #[async_trait] impl SlashCommand for SetupCommand { type Bot = Bot; type Data = (); type Use = Deferred; const NAME: &'static str = "setup"; fn description(&self) -> Cow<'static, str> { "setup a game of Avalon".into() } async fn run(&self, state: Arc<BotState<<Self as SlashCommand>::Bot>>, interaction: InteractionUse<AppCommandData, Unused>, (): Self::Data, ) -> Result<InteractionUse<AppCommandData, Self::Use>, BotError> { interaction.channel.send(&state, create_message(|m| { m.content("config"); m.button(&state, JoinButton::default(), |b| b.label("join/leave game")); m.menu(&state, RolesMenu::default(), |m| m.max_values(6)); })).await?; let interaction = interaction.defer(&state).await?; let mut config = AvalonConfig::default(); config.update_embed(&state, &interaction).await?; Ok(interaction) } } #[derive(Clone, Default)] struct JoinButton(AvalonConfig); #[async_trait] impl ButtonCommand for JoinButton { type Bot = Bot; async fn run(&self, state: Arc<BotState<Self::Bot>>, interaction: InteractionUse<ButtonPressData, Unused>, ) -> Result<InteractionUse<ButtonPressData, Used>, BotError> { if let InteractionUser::Guild(GuildUser { id: _id, member, locale }) = &interaction.source { { let mut guard = state.buttons.write().unwrap(); let config = &mut guard .get_mut(&interaction.data.custom_id) .unwrap() .downcast_mut::<Self>() .unwrap() .0; config.players.push(member.clone()); // config.update_embed(&state, ) todo!() } interaction.defer_update(&state).await.map_err(|e| e.into()) } else { unreachable!("should not have /setup in dms") } } } #[derive(Clone, Default)] struct RolesMenu(AvalonConfig); #[derive(MenuCommand, Debug, Copy, Clone)] enum Role { Assassin, Merlin, Mordred, Morgana, Oberon, Percival, } impl From<&'_ Role> for Character { fn from(role: &'_ Role) -> Self { match role { Role::Assassin => Self::Assassin, Role::Merlin => Self::Merlin, Role::Mordred => Self::Mordred, Role::Morgana => Self::Morgana, Role::Oberon => Self::Oberon, Role::Percival => Self::Percival, } } } #[async_trait] impl MenuCommand for RolesMenu { type Bot = Bot; type Data = Role; async fn run( &self, state: Arc<BotState<Self::Bot>>, interaction: InteractionUse<MenuSelectData, Unused>, data: Vec<Self::Data>, ) -> Result<InteractionUse<MenuSelectData, Used>, BotError> { let embed = { let mut guard = state.menus.write().unwrap(); let config = &mut guard .get_mut(&interaction.data.custom_id) .unwrap() .downcast_mut::<Self>() .unwrap() .0; config.roles = data.iter() .map(Character::from) .collect(); config.embed() }; interaction.update(&state, embed).await .map_err(Into::into) } }
#[cfg(feature = "grpc_support")] tonic::include_proto!("as/r#as"); #[cfg(feature = "grpc_support")] pub mod external; pub mod integration;
use alloc::collections::BTreeMap; use alloc::string::String; use alloc::vec::Vec; use ferr_os_librust::{io, syscall}; //mod build_tree; pub mod lexer; //use build_tree::command::{Command, Connector}; pub mod command; use command::{Command, Connector, Redirect}; pub mod parser; pub fn bash(string: String, env: &mut BTreeMap<String, String>) { unsafe { syscall::debug(0, 0); } let mut lexbuf = lexer::Lexbuf::new(string); unsafe { syscall::debug(0, 1); } match parser::inputunit(lexer::token, &mut lexbuf) { Err(_) => io::_print(&String::from("parsing error\n")), Ok(command) => { unsafe { exec(command, env) }; }, } /*match lexer::decompose(string) { Err(()) => io::_print(&String::from("Could not parse it\n")), Ok(vector) => match build_tree::build_tree(vector) { Err(()) => io::_print(&String::from("Could not parse formula\n")), Ok(command) => { unsafe { exec(command, env, false) }; } }, }*/ } unsafe fn exec(command: Command, env: &mut BTreeMap<String, String>) -> usize { match command { Command::Nothing => { 0 } Command::SimpleCommand(cmd) => { if cmd.cmd_line.len() >= 2 && cmd.cmd_line[1] == "=" { if cmd.cmd_line.len() > 2 { env.insert( String::from(&cmd.cmd_line[0]), String::from(&cmd.cmd_line[2]), ); } else { env.insert(String::from(&cmd.cmd_line[0]), String::from("")); } 0 } else if cmd.cmd_line.len() == 0 { 0 } else { let prog_name = &cmd.cmd_line[0]; if prog_name.len() == 0 { io::_print(&String::from("Should not happen compute->exec\n")); 1 } else if prog_name.len() > 1 && prog_name.as_bytes()[0] == b'.' && prog_name.as_bytes()[1] == b'/' { let mut pwd; match env.get("PWD") { Some(n) => pwd = String::from(n), None => pwd = String::new(), } let id; let fd = syscall::open(&String::from("/dev/fifo"), io::OpenFlags::ORD | io::OpenFlags::OWR); if cmd.cmd_bg { id = 0; } else { id = syscall::fork(); } if id == 0 { if !cmd.cmd_bg { syscall::dup2(io::STD_IN, fd); } syscall::close(fd); do_redirects(&cmd.cmd_redirects, &pwd); let mut name = String::from(&pwd); for c in prog_name.bytes().skip(2) { name.push(c as char); } let mut args = cmd.cmd_line; args.push(String::from(&pwd)); run(&name, &args); io::_print(&String::from("Program not found\n")); syscall::exit(1) } else { await_end_and_kill(id, fd) } } else { if cmd.cmd_line[0] == "cd" { if cmd.cmd_line.len() > 1 && cmd.cmd_line[1].len() > 0 { let mut name = String::from(&cmd.cmd_line[1]); if name.as_bytes()[name.len() - 1] != b'/' { name.push('/'); } if name.as_bytes()[0] == b'/' { env.insert(String::from("PWD"), String::from(&name)); } else { let mut pwd; match env.get("PWD") { None => {pwd = String::from("/")}, Some(p) => {pwd = String::from(p)}, } pwd.push_str(&name); env.insert(String::from("PWD"), String::from(&pwd)); } } else { env.insert(String::from("PWD"), String::from("/")); } 0 } else if cmd.cmd_line[0] == "exit" { if cmd.cmd_line.len() >= 2 { let i = &cmd.cmd_line[1]; if i == "0" { syscall::exit(0) } else { syscall::exit(1) } } else { syscall::exit(0) } } else if let Some(name_list_raw) = env.get("PATH") { let fd = syscall::open(&String::from("/dev/fifo"), io::OpenFlags::OWR | io::OpenFlags::ORD); let id; if cmd.cmd_bg { id = 0 } else { id = syscall::fork() }; if id == 0 { if !cmd.cmd_bg { syscall::dup2(io::STD_IN, fd); } syscall::close(fd); let pwd; if let Some(name) = env.get("PWD") { pwd = String::from(name); } else { pwd = String::new(); } do_redirects(&cmd.cmd_redirects, &pwd); let name_list = String::from(name_list_raw); for name_raw in name_list.split(":") { let mut name = String::from(name_raw); if name.as_bytes()[name.len() - 1] != b'/' { name.push('/'); } for c in prog_name.bytes() { name.push(c as char); } let mut args = Vec::new(); for a in cmd.cmd_line.iter() { args.push(String::from(a)) } args.push(String::from(&pwd)); run(&name, &args); } io::_print(&String::from("Program not found\n")); syscall::exit(1) } else { await_end_and_kill(id, fd) } } else { io::_print(&String::from("Variable PATH not defined\n")); 1 } } } } Command::Connection(cmd1, connect, cmd2) => { match connect { Connector::Seq => { let output = exec(*cmd1, env); if output == 0 { exec(*cmd2, env) } else { output } }, Connector::And => { let fd = syscall::open(&String::from("/dev/fifo"), io::OpenFlags::ORD | io::OpenFlags::OWR); let proc_1 = syscall::fork(); if proc_1 == 0 { syscall::dup2(io::STD_IN, fd); syscall::close(fd); syscall::exit(exec(*cmd1, env)) } else { let proc_2 = syscall::fork(); if proc_2 == 0 { syscall::dup2(io::STD_IN, fd); syscall::close(fd); syscall::exit(exec(*cmd2, env)) } else { syscall::close(fd); await_end2_and_kill(proc_1, proc_2, fd) } } }, Connector::Or => { let output = exec(*cmd1, env); if output != 0 { exec(*cmd2, env) } else { output } } Connector::Pipe => { let fd = syscall::open(&String::from("/dev/fifo"), io::OpenFlags::ORD | io::OpenFlags::OWR); let fd2 = syscall::open(&String::from("/dev/fifo"), io::OpenFlags::ORD | io::OpenFlags::OWR); let proc_1 = syscall::fork(); if proc_1 == 0 { syscall::dup2(io::STD_OUT, fd); syscall::close(fd); syscall::dup2(io::STD_IN, fd2); syscall::close(fd2); syscall::exit(exec(*cmd1, env)) } else { let proc_2 = syscall::fork(); if proc_2 == 0 { syscall::close(fd2); syscall::dup2(io::STD_IN, fd); syscall::close(fd); syscall::exit(exec(*cmd2, env)) } else { syscall::close(fd); await_end2_and_kill(proc_1, proc_2, fd2) } } } } } Command::If(cmd_if, cmd_then, cmd_else) => { let id = syscall::fork(); let fd = syscall::open(&String::from("/dev/fifo"), io::OpenFlags::ORD | io::OpenFlags::OWR); if id == 0 { syscall::dup2(io::STD_IN, fd); syscall::close(fd); let v = exec(*cmd_if, env); syscall::exit(v) } else { let v = await_end_and_kill(id, fd); if v == 0 { exec(*cmd_then, env) } else { exec(*cmd_else, env) } } } // Not implemented } } unsafe fn wait_end(proc_1: usize, proc_2: usize) -> usize { loop { let (i1, _) = syscall::listen_proc(proc_1); if i1 == proc_1 { return syscall::await_end(proc_2) } let (i1, _) = syscall::listen_proc(proc_2); if i1 == proc_2 { return syscall::await_end(proc_1) } syscall::sleep(); } } unsafe fn do_redirects(redirects: &Vec<Redirect>, pwd: &String) { for r in redirects.iter() { let file_name; let fd_target; let rights; match r { Redirect::Input(s) => { file_name = s; fd_target = io::STD_IN; rights = io::OpenFlags::ORD; }, Redirect::Output(s) => { file_name = s; fd_target = io::STD_OUT; rights = io::OpenFlags::OWR | io::OpenFlags::OCREAT; }, Redirect::OutputAppend(s) => { file_name = s; fd_target = io::STD_OUT; rights = io::OpenFlags::OWR | io::OpenFlags::OCREAT | io::OpenFlags::OAPPEND; syscall::debug(0, 2); } }; if file_name.len() > 0 { let mut file; if file_name.as_bytes()[0] == b'/' { file = String::from(file_name); } else { if file_name.len() > 1 && &file_name.as_bytes()[0..1] == "./".as_bytes() { file = String::from(pwd); for b in file_name.bytes().skip(2) { file.push(b as char); } } else { file = String::from(pwd); for b in file_name.bytes() { file.push(b as char); } } } let fd = syscall::open(&file, rights); syscall::dup2(fd_target, fd); syscall::close(fd); } else { syscall::exit(1); } } } unsafe fn run(path: &String, args: &Vec<String>) -> usize { let fd = syscall::open(path, io::OpenFlags::OXCUTE); if fd == usize::MAX { 1 } else { let start = io::read_input(fd, 512); if start.len() == 0 { 1 } else if start.len() > 4 && &start[0..4] == "\x7FELF".as_bytes() { syscall::close(fd); syscall::exec(path, args) } else if start.len() > 4 && &start[0..2] == "#!/".as_bytes() { let mut name = String::from(start[2] as char); for i in 3..start.len() { if start[i] == b'\n' { break } else { name.push(start[i] as char); } } syscall::exec(&name, args) } else { io::_print(&String::from("Only ELF has been implemented\n")); 1 } } } unsafe fn await_end_and_kill(id: usize, fd: usize) -> usize { let mut data: [u8; 512] = [0; 512]; loop { let size = syscall::read(io::STD_IN, &mut data as *mut u8, 512); let mut kill = false; for i in 0..size { if data[i] == 12 { kill = true; io::_print(&String::from("Should kill user\n")); } } syscall::write(fd, &data as *const u8, size); syscall::sleep(); if kill { syscall::close(fd); for _ in 0..5 { syscall::sleep(); } let (id2, v) = syscall::listen_proc(id); if id2 == id { return v; } else { syscall::kill(id); return syscall::await_end(id); } } let (id2, v) = syscall::listen_proc(id); if id2 == id { syscall::close(fd); return v; } } } unsafe fn await_end2_and_kill(id1: usize, id2: usize, fd: usize) -> usize { let mut data: [u8; 512] = [0; 512]; loop { let size = syscall::read(io::STD_IN, &mut data as *mut u8, 512); let mut kill = false; for i in 0..size { if data[i] == 12 { kill = true; io::_print(&String::from("Should kill user\n")); } } syscall::write(fd, &data as *const u8, size); syscall::sleep(); if kill { syscall::close(fd); for _ in 0..5 { syscall::sleep(); } let (id1bis, _) = syscall::listen_proc(id1); if id1bis == id1 { let (id2bis, v) = syscall::listen_proc(id2); if id2bis == id2 { return v; } else { syscall::kill(id2); return syscall::await_end(id2); } } else { syscall::kill(id1); let (id2bis, _) = syscall::listen_proc(id2); if id2bis == id2 { return syscall::await_end(id1); } else { syscall::kill(id2); return wait_end(id1, id2); } } } let (id1bis, _) = syscall::listen_proc(id1); if id1bis == id1 { return await_end_and_kill(id2, fd); } let (id2bis, _) = syscall::listen_proc(id2); if id2bis == id2 { return await_end_and_kill(id1, fd); } } }
/********************************************** > File Name : build_tree.rs > Author : lunar > Email : lunar_ubuntu@qq.com > Created Time : Fri 15 Apr 2022 02:02:52 PM CST > Location : Shanghai > Copyright@ https://github.com/xiaoqixian **********************************************/ use std::rc::Rc; use std::cell::RefCell; // Definition for a binary tree node. #[derive(Debug, PartialEq, Eq)] pub struct TreeNode { pub val: i32, pub left: Option<Rc<RefCell<TreeNode>>>, pub right: Option<Rc<RefCell<TreeNode>>>, } impl TreeNode { #[inline] pub fn new(val: i32) -> Self { TreeNode { val, left: None, right: None } } } struct Solution; /* * preorder: [root, [left child tree], [right child tree]] * inorder: [[left child tree], root, [right child tree]] */ impl Solution { pub fn build_tree(preorder: Vec<i32>, inorder: Vec<i32>) -> Option<Rc<RefCell<TreeNode>>> { if preorder.is_empty() {return None;} Self::_build_tree(&preorder, &inorder, (0, preorder.len()-1), (0, inorder.len()-1)) } fn _build_tree(preorder: &Vec<i32>, inorder: &Vec<i32>, pre_bound: (usize, usize), in_bound: (usize, usize)) -> Option<Rc<RefCell<TreeNode>>> { let root = Rc::new(RefCell::new(TreeNode::new(preorder[pre_bound.0]))); if pre_bound.0 == pre_bound.1 {return Some(root);} let in_root_index = { let mut i = in_bound.0; let root_val = preorder[pre_bound.0]; while inorder[i] != root_val {i += 1;} i }; let mut left_tree_len: usize = 0; if in_root_index > in_bound.0 { left_tree_len = in_root_index - in_bound.0; (*root.borrow_mut()).left = Self::_build_tree(preorder, inorder, (pre_bound.0 + 1, pre_bound.0 + left_tree_len), (in_bound.0, in_root_index - 1)); } if in_root_index < in_bound.1 { let right_tree_len = in_bound.1 - in_root_index; (*root.borrow_mut()).right = Self::_build_tree(preorder, inorder, (pre_bound.0 + left_tree_len + 1, pre_bound.1), (in_root_index + 1, in_bound.1)); } Some(root) } }
// Copyright 2019 Red Hat, Inc. All Rights Reserved. // SPDX-License-Identifier: (BSD-3-Clause OR Apache-2.0) #[cfg(all(feature = "virtio-v4_14_0", not(feature = "virtio-v5_0_0")))] mod bindings_v4_14_0; #[cfg(feature = "virtio-v5_0_0")] mod bindings_v5_0_0; // Major hack to have a default version in case no feature is specified: // If no version is specified by using the features, just use the latest one // which currently is 5.0. #[cfg(all(not(feature = "virtio-v4_14_0"), not(feature = "virtio-v5_0_0")))] mod bindings_v5_0_0; pub mod bindings { #[cfg(all(feature = "virtio-v4_14_0", not(feature = "virtio-v5_0_0")))] pub use super::bindings_v4_14_0::*; #[cfg(feature = "virtio-v5_0_0")] pub use super::bindings_v5_0_0::*; #[cfg(all(not(feature = "virtio-v4_14_0"), not(feature = "virtio-v5_0_0")))] pub use super::bindings_v5_0_0::*; }
use anyhow::Result; fn main() -> Result<()> { // let door_key = 5764801; // let card_key = 17807724; // let door_key = 11562782; let card_key = 18108497; let mut door_loop = 0; let mut card_loop = 0; let mut found = 0; let mut v = 1; for loop_size in 1.. { v = round(v, 7); if v == door_key { println!("door loop {}", loop_size); door_loop = loop_size; found += 1 } else if v == card_key { println!("card loop {}", loop_size); card_loop = loop_size; found += 1 } if found == 2 { break } } dbg!(door_loop, card_loop); v = 1; for _ in 0..card_loop { v = round(v, door_key); } dbg!(v); Ok(()) } fn round(n: isize, sn: isize) -> isize { (n * sn) % 20201227 }
// father of all kinds of renderengine like opengl, dx12 use gfx; use rand::{self, Rng}; //use super::types::*; //use super::types::ColorFormat; use render::types::ColorFormat; gfx_defines!{ vertex Vertex { position: [f32; 2] = "a_Position", } // color format: 0xRRGGBBAA vertex Instance { translate: [f32; 2] = "a_Translate", color: u32 = "a_Color", } constant Locals { scale: f32 = "u_Scale", } pipeline pipe { vertex: gfx::VertexBuffer<Vertex> = (), instance: gfx::InstanceBuffer<Instance> = (), scale: gfx::Global<f32> = "u_Scale", locals: gfx::ConstantBuffer<Locals> = "Locals", out: gfx::RenderTarget<ColorFormat> = "Target0", } } fn fill_instances(instances: &mut [Instance], instances_per_length: u32, size: f32) { let gap = 0.4 / (instances_per_length + 1) as f32; println!("gap: {}", gap); let begin = -1. + gap + (size / 2.); let mut translate = [begin, begin]; let mut rng = rand::StdRng::new().unwrap(); let length = instances_per_length as usize; for x in 0..length { for y in 0..length { let i = x * length + y; instances[i] = Instance { translate: translate, color: rng.next_u32(), }; translate[1] += size + gap; } translate[1] = begin; translate[0] += size + gap; } } const MAX_INSTANCE_COUNT: usize = 2048; pub struct RenderEngine<R: gfx::Resources> { pso: gfx::PipelineState<R, pipe::Meta>, data: pipe::Data<R>, slice: gfx::Slice<R>, upload: gfx::handle::Buffer<R, Instance>, uploading: bool, // TODO: not needed if we have the encoder everywhere } impl<R> RenderEngine<R> where R: gfx::Resources { fn new<F>(factory: &mut F, color_format: gfx::handle::RenderTargetView<R, (gfx::format::R8_G8_B8_A8, gfx::format::Unorm)>) -> Self where F: gfx::Factory<R> + gfx::traits::FactoryExt<R> { let pso = factory.create_pipeline_simple(include_bytes!("../../artist/shader/instancing_150.glslv"), include_bytes!("../../artist/shader/instancing_150.glslf"), pipe::new()) .unwrap(); let instances_per_length: u32 = 32; println!("{} instances per length", instances_per_length); let instance_count = instances_per_length * instances_per_length; println!("{} instances", instance_count); assert!(instance_count as usize <= MAX_INSTANCE_COUNT); let size = 1.6 / instances_per_length as f32; println!("size: {}", size); let upload = factory.create_upload_buffer(instance_count as usize).unwrap(); { let mut writer = factory.write_mapping(&upload).unwrap(); fill_instances(&mut writer, instances_per_length, size); } let instances = factory.create_buffer(instance_count as usize, gfx::buffer::Role::Vertex, gfx::memory::Usage::Data, gfx::TRANSFER_DST) .unwrap(); let (quad_vertices, mut slice) = factory.create_vertex_buffer_with_slice(&QUAD_VERTICES, &QUAD_INDICES[..]); slice.instances = Some((instance_count, 0)); let locals = Locals { scale: size }; RenderEngine { pso: pso, data: pipe::Data { vertex: quad_vertices, instance: instances, scale: size, locals: factory.create_buffer_immutable(&[locals], gfx::buffer::Role::Constant, gfx::Bind::empty()) .unwrap(), out: color_format, }, slice: slice, upload: upload, uploading: true, } } fn render<C: gfx::CommandBuffer<R>>(&mut self, encoder: &mut gfx::Encoder<R, C>) { if self.uploading { encoder.copy_buffer(&self.upload, &self.data.instance, 0, 0, self.upload.len()) .unwrap(); self.uploading = false; } encoder.clear(&self.data.out, [0.1, 0.2, 0.3, 1.0]); encoder.draw(&self.slice, &self.pso, &self.data); } } // TODO move to config const QUAD_VERTICES: [Vertex; 4] = [Vertex { position: [-0.5, 0.5] }, Vertex { position: [-0.5, -0.5] }, Vertex { position: [0.5, -0.5] }, Vertex { position: [0.5, 0.5] }]; const QUAD_INDICES: [u16; 6] = [0, 1, 2, 2, 3, 0];
use std::net::UdpSocket; use std::net::{SocketAddr, ToSocketAddrs}; use docopt::Docopt; const USAGE: &'static str = "\ Simle tool for receive UDP datagrams. Usage: zudp help zudp listen <hostport> zudp send <hostport> <data> "; const BUFFER_SIZE: usize = 2048; const MAX_PRINT_SIZE: usize = 256; const ALL_INTERFACER: &str = "0:0"; fn listen(addr: &SocketAddr) -> std::io::Result<()> { let socket = UdpSocket::bind(addr)?; loop { let mut buf = [0; BUFFER_SIZE]; let (size, src) = socket.recv_from(&mut buf)?; let print_size = core::cmp::min(size, MAX_PRINT_SIZE); match std::str::from_utf8(&buf[..print_size]) { Ok(data) => println!("{} {} {:?}", src, size, data), Err(_) => println!("{} {}", src, size), } } } fn send(addr: &SocketAddr, data: &str) -> std::io::Result<()> { let socket = UdpSocket::bind(ALL_INTERFACER)?; socket.send_to(data.as_bytes(), &addr)?; Ok(()) } fn main() -> std::io::Result<()> { match Docopt::new(USAGE).unwrap().parse() { Ok(args) => { if args.get_bool("help") { println!("{}", USAGE); } else if args.get_bool("listen") { let addr = args.get_str("<hostport>") .to_socket_addrs()? .next() .unwrap(); listen(&addr)?; } else if args.get_bool("send") { let addr = args.get_str("<hostport>") .to_socket_addrs()? .next() .unwrap(); send(&addr, args.get_str("<data>"))?; } else { panic!("{:?}", args); } }, Err(e) => e.exit(), }; Ok(()) }
extern crate dmbc; extern crate exonum; extern crate exonum_testkit; extern crate hyper; extern crate iron; extern crate iron_test; extern crate mount; extern crate serde_json; pub mod dmbc_testkit; use std::collections::HashMap; use dmbc_testkit::{DmbcTestApiBuilder, DmbcTestKitApi}; use exonum::crypto; use hyper::status::StatusCode; use dmbc::currency::api::fees::FeesResponseBody; use dmbc::currency::configuration::{Configuration, TransactionFees, TransactionPermissions}; use dmbc::currency::transactions::builders::transaction; #[test] fn fees_for_delete_assets() { let transaction_fee = 1000; let config_fees = TransactionFees::with_default_key(0, 0, transaction_fee, 0, 0, 0); let permissions = TransactionPermissions::default(); let testkit = DmbcTestApiBuilder::new() .with_configuration(Configuration::new(config_fees, permissions)) .create(); let api = testkit.api(); let meta_data = "asset"; let (public_key, secret_key) = crypto::gen_keypair(); let tx_delete_assets = transaction::Builder::new() .keypair(public_key, secret_key) .tx_del_assets() .add_asset(meta_data, 5) .seed(85) .build(); let (status, response) = api.post_fee(&tx_delete_assets); let mut expected = HashMap::new(); expected.insert(public_key, transaction_fee); assert_eq!(status, StatusCode::Ok); assert_eq!(response, Ok(Ok(FeesResponseBody { fees: expected }))); }
use crate::built_info; use crate::provider::ClientLedger; use directory_client::presence::providers::MixProviderPresence; use directory_client::requests::presence_providers_post::PresenceMixProviderPoster; use directory_client::DirectoryClient; use log::{debug, error}; use std::time::Duration; use tokio::runtime::Handle; use tokio::task::JoinHandle; pub struct NotifierConfig { location: String, directory_server: String, mix_announce_host: String, clients_announce_host: String, pub_key_string: String, sending_delay: Duration, } impl NotifierConfig { pub fn new( location: String, directory_server: String, mix_announce_host: String, clients_announce_host: String, pub_key_string: String, sending_delay: Duration, ) -> Self { NotifierConfig { location, directory_server, mix_announce_host, clients_announce_host, pub_key_string, sending_delay, } } } pub struct Notifier { location: String, net_client: directory_client::Client, client_ledger: ClientLedger, sending_delay: Duration, client_listener: String, mixnet_listener: String, pub_key_string: String, } impl Notifier { pub fn new(config: NotifierConfig, client_ledger: ClientLedger) -> Notifier { let directory_client_cfg = directory_client::Config { base_url: config.directory_server, }; let net_client = directory_client::Client::new(directory_client_cfg); Notifier { client_ledger, net_client, location: config.location, client_listener: config.clients_announce_host, mixnet_listener: config.mix_announce_host, pub_key_string: config.pub_key_string, sending_delay: config.sending_delay, } } async fn make_presence(&self) -> MixProviderPresence { MixProviderPresence { location: self.location.clone(), client_listener: self.client_listener.clone(), mixnet_listener: self.mixnet_listener.clone(), pub_key: self.pub_key_string.clone(), registered_clients: self.client_ledger.current_clients().await, last_seen: 0, version: built_info::PKG_VERSION.to_string(), } } pub fn notify(&self, presence: MixProviderPresence) { match self.net_client.presence_providers_post.post(&presence) { Err(err) => error!("failed to send presence - {:?}", err), Ok(_) => debug!("sent presence information"), } } pub fn start(self, handle: &Handle) -> JoinHandle<()> { handle.spawn(async move { loop { // set the deadline in the future let sending_delay = tokio::time::delay_for(self.sending_delay); let presence = self.make_presence().await; self.notify(presence); // wait for however much is left sending_delay.await; } }) } }
use sha2::Digest; use serde::Deserialize; use serde::ser::{Serialize, Serializer, SerializeStruct}; use serde_json::json; use crate::transaction::*; /// ```rust /// use bc::block::*; /// let x = "hello".to_string(); /// assert_eq!( /// string2hash_string(&x), /// "9b71d224bd62f3785d96d46ad3ea3d73319bfbc2890caadae2dff72519673ca72323c3d99ba5c11d7c7acc6e14b8c5da0c4663475c2e5c3adef46f73bcdec043" /// ); /// ``` pub fn string2hash_string(string: &std::string::String)-> std::string::String { format!( "{:x}", sha2::Sha512::digest(string.as_bytes()) ) } #[derive(Debug, Clone, Deserialize)] pub struct Block { pub index: u64, pub time: i64, pub proof: i64, pub previous_hash: std::string::String, pub transactions: Vec<Transaction>, } impl Block { pub fn to_json(&self) -> std::string::String{ let a = json!({ "index": self.index, "previous_hash": self.previous_hash, "proof": self.proof, "time": self.time, "transactions": self.transactions, }).to_string(); a } } impl Serialize for Block { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { // 5 is the number of fields in the struct. let mut state = serializer.serialize_struct("Block", 5)?; state.serialize_field("index", &self.index)?; state.serialize_field("time", &self.time)?; state.serialize_field("proof", &self.proof)?; state.serialize_field("previous_hash", &self.previous_hash)?; state.serialize_field("transactions", &self.transactions)?; state.end() } }
use crate::access_control; use fund::{ accounts::{vault::TokenVault, Fund}, error::{FundError, FundErrorCode}, }; use serum_common::pack::Pack; use solana_program::{ account_info::{next_account_info, AccountInfo}, msg, program, pubkey::Pubkey, }; use spl_token::instruction; use std::convert::Into; pub fn handler( program_id: &Pubkey, accounts: &[AccountInfo], amount: u64, ) -> Result<(), FundError> { msg!("handler: withdraw"); let acc_infos = &mut accounts.iter(); let vault_acc_info = next_account_info(acc_infos)?; let fund_acc_info = next_account_info(acc_infos)?; let withdraw_acc_info = next_account_info(acc_infos)?; let vault_authority_acc_info = next_account_info(acc_infos)?; let token_program_acc_info = next_account_info(acc_infos)?; access_control(AccessControlRequest { program_id, amount, fund_acc_info, withdraw_acc_info, vault_acc_info, vault_authority_acc_info, })?; Fund::unpack_mut( &mut fund_acc_info.try_borrow_mut_data()?, &mut |fund_acc: &mut Fund| { state_transistion(StateTransistionRequest { fund_acc, fund_acc_info, withdraw_acc_info, vault_acc_info, vault_authority_acc_info, token_program_acc_info, amount, }) .map_err(Into::into) }, )?; Ok(()) } fn access_control(req: AccessControlRequest) -> Result<(), FundError> { let AccessControlRequest { program_id, amount, fund_acc_info, withdraw_acc_info, vault_acc_info, vault_authority_acc_info, } = req; if !withdraw_acc_info.is_signer { return Err(FundErrorCode::Unauthorized.into()); } { let fund = access_control::fund(fund_acc_info, program_id)?; let _ = access_control::vault_join( vault_acc_info, vault_authority_acc_info, fund_acc_info, program_id, )?; if fund.open { return Err(FundErrorCode::FundOpen.into()); } if amount > fund.balance { return Err(FundErrorCode::InsufficientBalance)?; } } let _ = access_control::withdraw(program_id, fund_acc_info, withdraw_acc_info); msg!("access control withdraw success"); Ok(()) } fn state_transistion(req: StateTransistionRequest) -> Result<(), FundError> { let StateTransistionRequest { fund_acc, fund_acc_info, withdraw_acc_info, vault_acc_info, vault_authority_acc_info, token_program_acc_info, amount, } = req; { fund_acc.deduct(amount); // transfer from program account to owner of fund msg!("invoking token transfer"); let withdraw_instruction = instruction::transfer( &spl_token::ID, vault_acc_info.key, withdraw_acc_info.key, &vault_authority_acc_info.key, &[], amount, )?; let signer_seeds = TokenVault::signer_seeds(fund_acc_info.key, &fund_acc.nonce); program::invoke_signed( &withdraw_instruction, &[ vault_acc_info.clone(), withdraw_acc_info.clone(), vault_authority_acc_info.clone(), token_program_acc_info.clone(), ], &[&signer_seeds], )?; } msg!("state transition withdraw success"); Ok(()) } struct AccessControlRequest<'a, 'b> { program_id: &'a Pubkey, amount: u64, fund_acc_info: &'a AccountInfo<'b>, withdraw_acc_info: &'a AccountInfo<'b>, vault_acc_info: &'a AccountInfo<'b>, vault_authority_acc_info: &'a AccountInfo<'b>, } struct StateTransistionRequest<'a, 'b, 'c> { fund_acc: &'c mut Fund, fund_acc_info: &'a AccountInfo<'b>, withdraw_acc_info: &'a AccountInfo<'b>, vault_acc_info: &'a AccountInfo<'b>, vault_authority_acc_info: &'a AccountInfo<'b>, token_program_acc_info: &'a AccountInfo<'b>, amount: u64, }
fn main() { /*let x: i64 = 5; let a: i64 = 8; let sum = x + a; { println!("{:?}", a); let a = 12; println!("{:?}", a); } println!("{:?}", a); println!("{}", sum);*/ // print_number(77); // print_sum(9, 8); println!("{:?}", add_one(9));; } fn print_number(x: i64) { println!("{:?}", x); } fn print_sum(x: i64, y: i64) { println!("{:?}", x + y); } fn add_one(x: i64) -> i64 { x + 1 }
mod cgroup; mod config_linux; mod error; mod join; mod mount; mod namespace; mod prctl; mod process; mod seccomp; mod selinux; mod systemd; #[macro_use] mod system; mod user; use crate::config; use crate::process::ProcessStatus; use error::Result; use nix::sys::signal::Signal; use nix::unistd::Pid; use std::io::Write; use std::time::{Duration, SystemTime}; #[derive(Debug)] pub struct LinuxProcess { // name of container, used in cgroup creation. name: String, config: config::Config, command: Vec<String>, mapped_fds: Vec<(i32, i32)>, preserved_fds: Vec<i32>, pid: Option<Pid>, rootless_euid: bool, cgroup: Option<cgroups_rs::Cgroup>, status: ProcessStatus, } pub fn run( config: &config::Config, commands: Vec<&str>, out_meta: Option<String>, mapped_fds: Vec<(i32, i32)>, preserved_fds: Vec<i32>, ) -> Result<()> { run_impl(config, commands, out_meta, mapped_fds, preserved_fds)?; Ok(()) } fn run_impl( config: &config::Config, commands: Vec<&str>, out_meta: Option<String>, mapped_fds: Vec<(i32, i32)>, preserved_fds: Vec<i32>, ) -> Result<()> { let mut process = LinuxProcess::new( format!( "runc/{}_{}", nix::unistd::getpid(), SystemTime::now() .duration_since(SystemTime::UNIX_EPOCH) .unwrap() .as_nanos() ), config.clone(), commands.iter().map(|s| s.to_string()).collect(), mapped_fds, preserved_fds, ); process.start()?; let start_time = SystemTime::now(); let process_status = process.wait()?; let end_time = SystemTime::now(); // clean all processes. process.kill(nix::sys::signal::Signal::SIGKILL)?; let exitcode: i32; let signal: Option<Signal>; match process_status { ProcessStatus::Exited(exitcode_) => { exitcode = exitcode_ as i32; signal = None } ProcessStatus::Signaled(signal_) => { exitcode = signal_ as i32 + 128; signal = Some(signal_) } _ => { unreachable!() } } if let Some(out_meta_f) = &out_meta { let f = std::fs::File::create(&out_meta_f); if f.is_err() { return Err(error::Error::WriteOutMeta { path: out_meta_f.into(), error: f.unwrap_err(), }); } collect_status( &mut f.unwrap(), config, &process, exitcode, signal, end_time.duration_since(start_time).unwrap(), ); } Ok(()) } fn collect_status( out_meta: &mut std::fs::File, config: &config::Config, process: &LinuxProcess, exitcode: i32, signal: Option<Signal>, wall_time: Duration, ) { let mut cpu_time = 0.0; let mut oom = false; let mut tle = false; if let Some(cgroup) = &process.cgroup { if let Some(mem) = cgroup.controller_of::<cgroups_rs::memory::MemController>() { let memstat = mem.memory_stat(); writeln!(out_meta, "memory-bytes: {}", memstat.max_usage_in_bytes); if memstat.oom_control.oom_kill > 0 { oom = true; } } if let Some(cpuacct) = cgroup.controller_of::<cgroups_rs::cpuacct::CpuAcctController>() { let acct = cpuacct.cpuacct(); writeln!(out_meta, "user-time: {}", acct.usage_user as f64 / 1e9); writeln!(out_meta, "sys-time: {}", acct.usage_sys as f64 / 1e9); writeln!(out_meta, "cpu-time: {}", acct.usage as f64 / 1e9); cpu_time = acct.usage as f64 / 1e9; if let Some(limit) = config.limits.cpu_limit { if cpu_time > limit { tle = true; } } } } writeln!(out_meta, "wall-time: {}", wall_time.as_secs_f64()); writeln!(out_meta, "exit-code: {}", exitcode); if let Some(limit) = config.limits.wall_limit { if wall_time.as_secs_f64() > limit { tle = true; } } else if let Some(limit) = config.limits.cpu_limit { // wall_limit is assumed as triple of cpu_limit if wall_time.as_secs_f64() > 3.0 * limit { tle = true; } } if let Some(sig) = signal { writeln!(out_meta, "signal: {}", sig as libc::c_int); } if oom { writeln!(out_meta, "memory-result: oom"); } else { writeln!(out_meta, "memory-result:"); } if tle { writeln!(out_meta, "time-result: hard-timelimit"); } else { writeln!(out_meta, "time-result:"); } }