text
stringlengths
8
4.13M
use std::iter::Peekable; use std::str::Chars; pub fn encode(source: &str) -> String { let mut encoded = String::new(); let mut rle_state = (None, 0); for char in source.chars() { match rle_state { (None, _) => rle_state = (Some(char), 1), (Some(rle_char), ref mut count) if char == rle_char => *count += 1, (Some(prev_char), prev_count) => { encoded += &format!( "{}{}", if prev_count > 1 { prev_count.to_string() } else { String::new() }, prev_char ); rle_state = (Some(char), 1); } } } if let (Some(char), count) = rle_state { encoded += &format!("{}{}", if count > 1 { count.to_string() } else { String::new() }, char); } encoded } pub fn decode(source: &str) -> String { let mut decoded = String::new(); let mut iter = source.chars().peekable(); // If the stdlib had a peek-friendly take_while, this would be a lot simpler... let parse_num = |iter: &mut Peekable<Chars>| { let mut num_str = String::new(); while let Some(char) = iter.peek() { if !char.is_ascii_digit() { break; } num_str.push(iter.next().unwrap()); } num_str.parse::<u32>().unwrap_or(0) }; while let Some(_) = iter.peek() { let num = std::cmp::max(parse_num(&mut iter), 1); let char = iter.next().unwrap(); decoded += &char.to_string().repeat(num as usize); } decoded }
/* Copyright 2019-2023 Didier Plaindoux Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ #[macro_use] extern crate bencher; use bencher::{Bencher, black_box}; use celma_core::parser::and::AndOperation; use celma_core::parser::char::{digit, space}; use celma_core::parser::core::eos; use celma_core::parser::literal::delimited_string; use celma_core::parser::parser::Parse; use celma_core::parser::response::Response::{Reject, Success}; use celma_core::stream::position::Position; use celma_core::stream::stream::Stream; use celma_macro::parsec_rules; use celma_core::stream::char_stream::CharStream; #[derive(Clone)] pub enum JSON { Number(f64), String(String), Null, Bool(bool), Array(Vec<JSON>), Object(Vec<(String, JSON)>), } fn mk_vec<E>(a: Option<(E, Vec<E>)>) -> Vec<E> { if a.is_none() { Vec::new() } else { let (a, v) = a.unwrap(); let mut r = v; r.insert(0, a); r } } fn mk_string(a: Vec<char>) -> String { a.into_iter().collect::<String>() } fn mk_f64(a: Vec<char>) -> f64 { mk_string(a).parse().unwrap() } parsec_rules!( let json:{JSON} = S _=(array | object | string | null | boolean | number) S let number:{JSON} = f=NUMBER -> {JSON::Number(f)} let string:{JSON} = s=STRING -> {JSON::String(s)} let null:{JSON} = "null" -> {JSON::Null} let boolean:{JSON} = b=("true"|"false") -> {JSON::Bool(b=="true")} let array:{JSON} = ('[' S a=(_=json _=(',' _=json)*)? ']') -> {JSON::Array(mk_vec(a))} let object:{JSON} = ('{' S a=(_=attr _=(',' _=attr)*)? '}') -> {JSON::Object(mk_vec(a))} let attr:{(String,JSON)} = (S s=STRING S ":" j=json) ); parsec_rules!( let STRING:{String} = delimited_string let NUMBER:{f64} = c=#(INT ('.' NAT)? (('E'|'e') INT)?) -> {mk_f64(c)} let INT:{()} = ('-'|'+')? NAT -> {} let NAT:{()} = digit+ -> {} let S:{()} = space* -> {} ); // ------------------------------------------------------------------------------------------------- // JSon benchmarks // ------------------------------------------------------------------------------------------------- fn json_data(b: &mut Bencher) { let data = include_str!("data/data.json"); b.bytes = data.len() as u64; parse(b, data) } // ------------------------------------------------------------------------------------------------- fn json_canada_pest(b: &mut Bencher) { let data = include_str!("data/canada_pest.json"); b.bytes = data.len() as u64; parse(b, data) } // ------------------------------------------------------------------------------------------------- fn json_canada_nom(b: &mut Bencher) { let data = include_str!("data/canada_nom.json"); b.bytes = data.len() as u64; parse(b, data) } // ------------------------------------------------------------------------------------------------- fn json_apache(b: &mut Bencher) { let data = include_str!("data/apache_builds.json"); b.bytes = data.len() as u64; parse(b, data) } // ------------------------------------------------------------------------------------------------- fn parse(b: &mut Bencher, buffer: &str) { let stream = CharStream::new_with_position(buffer, <usize>::new()); b.iter(|| { let response = json().and_left(eos()).parse(black_box(stream.clone())); match response { Success(_, _, _) => (), Reject(s, _) => panic!("parse error at {:?}", s.position()), } }); } benchmark_group!( benches, json_data, json_canada_pest, json_canada_nom, json_apache ); benchmark_main!(benches);
// Copyright 2018 Vlad Yermakov // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use super::Natural; use std::convert::From; use std::fmt::{self, Display, Formatter}; use std::ops::{Add, Div, Mul, Neg, Rem, Sub}; #[derive(Ord, PartialOrd, Eq, PartialEq, Debug, Copy, Clone)] pub struct Integer(i128); // TODO: --//-- impl Integer { pub fn new<T: Into<i128>>(int: T) -> Integer { Integer(int.into()) } pub fn abs(&self) -> Natural { if self.0 >= 0 { Natural::new(self.0) } else { Natural::new(-self.0) } } pub fn value(&self) -> i128 { self.0 } } impl Display for Integer { fn fmt(&self, f: &mut Formatter) -> fmt::Result { self.0.fmt(f) } } impl<T: Into<i128>> From<T> for Integer { fn from(some: T) -> Integer { Integer::new(some.into()) } } impl Neg for Integer { type Output = Integer; fn neg(self) -> Integer { Integer::new(-self.0) } } #[macro_export] macro_rules! integer { ($int:tt) => { $crate::numbers::Integer::new($int as i128) }; (- $int:tt) => { $crate::numbers::Integer::new(-$int as i128) }; } impl_std_ops_for_tuple_struct! { Integer: @all } impl_std_ops_for_tuple_struct! { Integer: Rem(rem, %) } impl_default! { Integer, integer!(0) }
extern crate gl; extern crate half; extern crate nalgebra; extern crate sdl2; extern crate vec_2_10_10_10; #[macro_use] extern crate failure; #[macro_use] extern crate render_gl_derive; mod debug; pub mod render_gl; pub mod resources; mod triangle; mod grid; use triangle::Uniform; use failure::err_msg; use nalgebra as na; use crate::resources::Resources; use std::path::Path; use std::time::{Duration, SystemTime}; const WINDOW_WIDTH: u32 = 1600; const WINDOW_HEIGHT: u32 = 900; fn main() { if let Err(e) = run() { println!("{}", debug::failure_to_string(e)); } } fn run() -> Result<(), failure::Error> { // Window and OpenGL init let res = Resources::from_relative_exe_path(Path::new("res")).unwrap(); let sdl = sdl2::init().map_err(err_msg)?; let video_subsystem = sdl.video().map_err(err_msg)?; let gl_attr = video_subsystem.gl_attr(); gl_attr.set_context_profile(sdl2::video::GLProfile::Core); gl_attr.set_context_version(4, 1); let window = video_subsystem .window("Playground", WINDOW_WIDTH, WINDOW_HEIGHT) .opengl() .build()?; let _gl_context = window.gl_create_context().map_err(err_msg)?; let gl = gl::Gl::load_with(|s| { video_subsystem.gl_get_proc_address(s) as *const std::os::raw::c_void }); let mut viewport = render_gl::Viewport::for_window(WINDOW_WIDTH as i32, WINDOW_HEIGHT as i32); // Setting up screen let color_buffer = render_gl::ColorBuffer::from_color(na::Vector3::new(0.0, 0.0, 0.0)); let triangle = triangle::Triangle::new(&res, &gl)?; viewport.set_used(&gl); color_buffer.set_used(&gl); // Setting up for the loop let time0 = SystemTime::now(); // main loop let mut event_pump = sdl.event_pump().map_err(err_msg)?; 'main: loop { // Input for event in event_pump.poll_iter() { match event { sdl2::event::Event::Quit { .. } => break 'main, sdl2::event::Event::Window { win_event: sdl2::event::WindowEvent::Resized(w, h), .. } => { viewport.update_size(w, h); viewport.set_used(&gl); } _ => {} } } // updating let time_since_start: f32 = (time0.elapsed()?.as_micros() as f32) / 1_000_000f32; // uniforms triangle.set_uniform("iResolution".to_string(), Uniform::Float2(WINDOW_WIDTH as f32, WINDOW_HEIGHT as f32), &gl); triangle.set_uniform("iTime".to_string(), Uniform::Float(time_since_start), &gl); // rendering color_buffer.clear(&gl); triangle.render(&gl); window.gl_swap_window(); } Ok(()) }
use std::{ num::{ParseFloatError, ParseIntError}, }; /// Possible error types when classifying with one of the SVMs. #[derive(Debug)] pub enum Error { /// This can be emitted when creating a SVM from a [`ModelFile`](crate::ModelFile). For models generated by /// libSVM's `svm-train`, the most common reason this occurs is skipping attributes. /// All attributes must be in sequential order 0, 1, 2, ..., n. If they are not, this /// error will be emitted. For more details see the documentation provided in [`ModelFile`](crate::ModelFile). AttributesUnordered { /// The index process that was not a direct successor of the previous index. Can be used for /// easier debugging the model file. index: u32, /// The value of the given index. Can be used for debugging in conjunction with `index`. value: f32, /// The last index processed. If everything were alright, then `index` should equal /// `last_index + 1`. last_index: u32, }, /// This error can be emitted by [`Predict::predict_probability`](crate::Predict::predict_probability) in case the model loaded by /// [`ModelFile`](crate::ModelFile) was not trained with probability estimates (`svm-train -b 1`). NoProbabilities, /// Can be emitted by [`Predict::predict_probability`](crate::Predict::predict_probability) when predicting probabilities /// and the internal iteration limit was exceeded. IterationsExceeded, /// If the model does not have a `gamma` set this error may be raised. NoGamma, /// If the model does not have a `coef0` set this error may be raised. NoCoef0, /// If the model does not have a `degree` set this error may be raised. NoDegree, /// Wrapper for internal parsing error when unifiying error handling. Parsing(String), /// A required attribute was not found. MissingRequiredAttribute, } // impl<'a, T> From<Error<'a, T>> for Error { // fn from(_: Error<'a, T>) -> Self { // Error::ParsingError // } // } impl From<ParseFloatError> for Error { fn from(_e: ParseFloatError) -> Self { Error::Parsing("ParseFloatError".to_owned()) } } impl From<ParseIntError> for Error { fn from(_: ParseIntError) -> Self { Error::Parsing("ParseIntError".to_owned()) } }
use std::future::Future; use futures::future::Abortable; pub use futures::future::{Aborted, AbortHandle}; pub fn abortable<F, T>(future: F) -> ( impl Future<Output = Result<T, Aborted>>, AbortHandle ) where F: Future<Output = T> { let (abort_handle, abort_registration) = AbortHandle::new_pair(); let future = Abortable::new(future, abort_registration); (future, abort_handle) }
use crate::node_client; use crate::node_client::NodeClient; use crate::single_disk_farm::Handlers; use futures::channel::mpsc; use futures::StreamExt; use memmap2::Mmap; use parking_lot::RwLock; use std::io; use std::sync::Arc; use subspace_core_primitives::crypto::kzg::Kzg; use subspace_core_primitives::{PublicKey, SectorIndex, Solution}; use subspace_erasure_coding::ErasureCoding; use subspace_farmer_components::auditing::audit_sector; use subspace_farmer_components::proving; use subspace_farmer_components::sector::SectorMetadataChecksummed; use subspace_proof_of_space::Table; use subspace_rpc_primitives::{SlotInfo, SolutionResponse}; use thiserror::Error; use tracing::{debug, error, trace}; /// Self-imposed limit for number of solutions that farmer will not go over per challenge. /// /// Only useful for initial network bootstrapping where due to initial plot size there might be too /// many solutions. const SOLUTIONS_LIMIT: usize = 1; /// Errors that happen during farming #[derive(Debug, Error)] pub enum FarmingError { /// Failed to subscribe to slot info notifications #[error("Failed to substribe to slot info notifications: {error}")] FailedToSubscribeSlotInfo { /// Lower-level error error: node_client::Error, }, /// Failed to retrieve farmer info #[error("Failed to retrieve farmer info: {error}")] FailedToGetFarmerInfo { /// Lower-level error error: node_client::Error, }, /// Failed to create memory mapping for metadata #[error("Failed to create memory mapping for metadata: {error}")] FailedToMapMetadata { /// Lower-level error error: io::Error, }, /// Failed to submit solutions response #[error("Failed to submit solutions response: {error}")] FailedToSubmitSolutionsResponse { /// Lower-level error error: node_client::Error, }, /// Low-level proving error #[error("Low-level proving error: {0}")] LowLevelProving(#[from] proving::ProvingError), /// I/O error occurred #[error("I/O error: {0}")] Io(#[from] io::Error), } /// Starts farming process. /// /// NOTE: Returned future is async, but does blocking operations and should be running in dedicated /// thread. // False-positive, we do drop lock before .await #[allow(clippy::await_holding_lock)] #[allow(clippy::too_many_arguments)] pub(super) async fn farming<NC, PosTable>( public_key: PublicKey, reward_address: PublicKey, node_client: NC, sector_size: usize, plot_mmap: Mmap, sectors_metadata: Arc<RwLock<Vec<SectorMetadataChecksummed>>>, kzg: Kzg, erasure_coding: ErasureCoding, handlers: Arc<Handlers>, modifying_sector_index: Arc<RwLock<Option<SectorIndex>>>, mut slot_info_notifications: mpsc::Receiver<SlotInfo>, ) -> Result<(), FarmingError> where NC: NodeClient, PosTable: Table, { let mut table_generator = PosTable::generator(); while let Some(slot_info) = slot_info_notifications.next().await { let slot = slot_info.slot_number; let sectors_metadata = sectors_metadata.read(); let sector_count = sectors_metadata.len(); debug!(%slot, %sector_count, "Reading sectors"); let modifying_sector_guard = modifying_sector_index.read(); let maybe_sector_being_modified = modifying_sector_guard.as_ref().copied(); let mut solutions = Vec::<Solution<PublicKey, PublicKey>>::new(); for ((sector_index, sector_metadata), sector) in (0..) .zip(&*sectors_metadata) .zip(plot_mmap.chunks_exact(sector_size)) { if maybe_sector_being_modified == Some(sector_index) { // Skip sector that is being modified right now continue; } trace!(%slot, %sector_index, "Auditing sector"); let maybe_solution_candidates = audit_sector( &public_key, sector_index, &slot_info.global_challenge, slot_info.voting_solution_range, sector, sector_metadata, ); let Some(solution_candidates) = maybe_solution_candidates else { continue; }; for maybe_solution in solution_candidates.into_iter::<_, PosTable>( &reward_address, &kzg, &erasure_coding, &mut table_generator, )? { let solution = match maybe_solution { Ok(solution) => solution, Err(error) => { error!(%slot, %sector_index, %error, "Failed to prove"); // Do not error completely on disk corruption or other // reasons why proving might fail continue; } }; debug!(%slot, %sector_index, "Solution found"); trace!(?solution, "Solution found"); solutions.push(solution); if solutions.len() >= SOLUTIONS_LIMIT { break; } } if solutions.len() >= SOLUTIONS_LIMIT { break; } // TODO: It is known that decoding is slow now and we'll only be // able to decode a single sector within time slot reliably, in the // future we may want allow more than one sector to be valid within // the same disk plot. if !solutions.is_empty() { break; } } drop(sectors_metadata); drop(modifying_sector_guard); let response = SolutionResponse { slot_number: slot_info.slot_number, solutions, }; handlers.solution.call_simple(&response); node_client .submit_solution_response(response) .await .map_err(|error| FarmingError::FailedToSubmitSolutionsResponse { error })?; } Ok(()) }
extern crate ctrlc; extern crate e2d2; extern crate tcp_proxy; extern crate time; #[macro_use] extern crate log; extern crate env_logger; extern crate ipnet; extern crate netfcts; extern crate separator; use std::sync::Arc; use std::sync::atomic::{AtomicBool, Ordering}; use std::time::Duration; use std::thread; use std::io::{Read, BufWriter, Write}; use std::net::{SocketAddr, TcpListener, TcpStream}; use std::sync::mpsc::RecvTimeoutError; use std::fs::File; use std::collections::HashMap; use std::vec::Vec; use std::error::Error; use std::mem; use std::process; use separator::Separatable; use e2d2::native::zcsi::*; use e2d2::interface::{ PmdPort, }; use e2d2::scheduler::StandaloneScheduler; use netfcts::tcp_common::{ReleaseCause, TcpStatistics, L234Data, TcpState}; use netfcts::system::{get_mac_from_ifname}; use netfcts::io::{ print_tcp_counters, print_rx_tx_counters}; use netfcts::conrecord::{HasTcpState, ConRecord}; use netfcts::{RunTime, Store64}; use tcp_proxy::{ProxyConnection, Extension, ProxyMode, Configuration}; use tcp_proxy::{setup_pipes_delayed_proxy}; use netfcts::comm::{MessageFrom, MessageTo}; #[test] fn delayed_binding_proxy() { // cannot directly read toml file from command line, as cargo test owns it. Thus we take a detour and read it from a file. const INDIRECTION_FILE: &str = "./tests/toml_file.txt"; let mut run_time: RunTime<Configuration, Store64<Extension>> = match RunTime::init_indirectly(INDIRECTION_FILE) { Ok(run_time) => run_time, Err(err) => panic!("failed to initialize RunTime {}", err), }; // setup flowdirector for physical ports: run_time.setup_flowdirector().expect("failed to setup flowdirector"); let run_configuration = run_time.run_configuration.clone(); let configuration = &run_configuration.engine_configuration; if run_configuration.engine_configuration.test_size.is_none() { error!( "missing parameter 'test_size' in configuration file {}", run_time.toml_filename() ); process::exit(1); }; let running = Arc::new(AtomicBool::new(true)); let r = running.clone(); ctrlc::set_handler(move || { info!("received SIGINT or SIGTERM"); r.store(false, Ordering::SeqCst); }) .expect("error setting Ctrl-C handler"); info!("Testing client to server connections of ProxyEngine .."); let l234data: Vec<L234Data> = run_configuration .engine_configuration .targets .iter() .enumerate() .map(|(i, srv_cfg)| L234Data { mac: srv_cfg .mac .unwrap_or_else(|| get_mac_from_ifname(srv_cfg.linux_if.as_ref().unwrap()).unwrap()), ip: u32::from(srv_cfg.ip), port: srv_cfg.port, server_id: srv_cfg.id.clone(), index: i, }) .collect(); let configuration_cloned = configuration.clone(); let l234data_clone = l234data.clone(); // this is the closure, which selects the target server to use for a new TCP connection let f_by_payload = move |c: &mut ProxyConnection| { let s = String::from_utf8(c.payload_packet.as_ref().unwrap().get_payload(2).to_vec()).unwrap(); // read first item in string and convert to usize: let stars: usize = s.split(" ").next().unwrap().parse().unwrap(); let remainder = stars % l234data_clone.len(); c.set_server_index(remainder as u8); debug!("selecting {}", configuration_cloned.targets[remainder].id); }; let no_servers = l234data.len(); let mut last_server: u8 = 0; let _f_round_robbin = move |c: &mut ProxyConnection| { if (last_server as usize) < no_servers - 1 { last_server += 1; } else { last_server = 0; } c.set_server_index(last_server); debug!("round robin select {}", last_server); }; // this is the closure, which may modify the payload of client to server packets in a TCP connection let f_process_payload_c_s = |_c: &mut ProxyConnection, _payload: &mut [u8], _tailroom: usize| { /* if let IResult::Done(_, c_tag) = parse_tag(payload) { let userdata: &mut MyData = &mut c.userdata .as_mut() .unwrap() .mut_userdata() .downcast_mut() .unwrap(); userdata.c2s_count += payload.len(); debug!( "c->s (tailroom { }, {:?}): {:?}", tailroom, userdata, c_tag, ); } unsafe { let payload_sz = payload.len(); } let p_payload= payload[0] as *mut u8; process_payload(p_payload, payload_sz, tailroom); } */ }; run_time.start_schedulers().expect("cannot start schedulers"); if *run_configuration .engine_configuration .engine .mode .as_ref() .unwrap_or(&ProxyMode::Delayed) == ProxyMode::Delayed { let run_configuration_cloned = run_configuration.clone(); run_time .install_pipeline_on_cores(Box::new( move |core: i32, pmd_ports: HashMap<String, Arc<PmdPort>>, s: &mut StandaloneScheduler| { setup_pipes_delayed_proxy( core, pmd_ports, s, run_configuration_cloned.clone(), l234data.clone(), f_by_payload.clone(), f_process_payload_c_s.clone(), ); }, )) .expect("cannot install pipelines");; } else { // simple proxy error!("simple proxy still not implemented"); } let cores = run_time.context().unwrap().active_cores.clone(); let associated_ports: Vec<_> = run_time .context() .unwrap() .ports .values() .filter(|p| p.is_physical() && p.kni_name().is_some()) .map(|p| &run_time.context().unwrap().ports[p.kni_name().as_ref().unwrap().clone()]) .collect(); let proxy_addr = ( associated_ports[0] .net_spec() .as_ref() .unwrap() .ip_net .as_ref() .unwrap() .addr(), configuration.engine.port, ); // start the run_time receive thread run_time.start(); let (mtx, reply_mrx) = run_time.get_main_channel().expect("cannot get main channel"); mtx.send(MessageFrom::StartEngine).unwrap(); thread::sleep(Duration::from_millis(2000 as u64)); debug!( "Connection record sizes = {} + {} + {}", mem::size_of::<ProxyConnection>(), mem::size_of::<ConRecord>(), mem::size_of::<Extension>() ); debug!("before run: available mbufs in memory pool= {:6}", unsafe { mbuf_avail_count() }); // give threads some time to do initialization work thread::sleep(Duration::from_millis(1000 as u64)); // set up servers for server in configuration.targets.clone() { let target_port = server.port; // moved into thread let target_ip = server.ip; let id = server.id; thread::spawn(move || match TcpListener::bind((target_ip, target_port)) { Ok(listener1) => { debug!("bound server {} to {}:{}", id, target_ip, target_port); for stream in listener1.incoming() { let mut stream = stream.unwrap(); let mut buf = [0u8; 256]; stream.read(&mut buf[..]).unwrap(); debug!("server {} received: {}", id, String::from_utf8(buf.to_vec()).unwrap()); stream .write(&format!("Thank You from {}", id).to_string().into_bytes()) .unwrap(); } } _ => { panic!("failed to bind server {} to {}:{}", id, target_ip, target_port); } }); } thread::sleep(Duration::from_millis(500 as u64)); // wait for the servers if log_enabled!(log::Level::Debug) { unsafe { fdir_get_infos(1u16); } } // emulate clients let timeout = Duration::from_millis(2000 as u64); for ntry in 0..configuration.test_size.unwrap() { match TcpStream::connect_timeout(&SocketAddr::from(proxy_addr), timeout) { Ok(mut stream) => { debug!("test connection {}: TCP connect to proxy successful", ntry); stream.set_write_timeout(Some(timeout)).unwrap(); stream.set_read_timeout(Some(timeout)).unwrap(); match stream.write(&format!("{} stars", ntry).to_string().into_bytes()) { Ok(_) => { debug!("successfully send {} stars", ntry); let mut buf = [0u8; 256]; match stream.read(&mut buf[..]) { Ok(_) => debug!("on try {} we received {}", ntry, String::from_utf8(buf.to_vec()).unwrap()), _ => { panic!("timeout on connection {} while waiting for answer", ntry); } }; } _ => { panic!("error when writing to test connection {}", ntry); } } } _ => { panic!("test connection {}: 3-way handshake with proxy failed", ntry); } } } thread::sleep(Duration::from_millis(200)); // Sleep for a bit mtx.send(MessageFrom::PrintPerformance(cores)).unwrap(); thread::sleep(Duration::from_millis(1000 as u64)); mtx.send(MessageFrom::FetchCounter).unwrap(); if configuration.engine.detailed_records.unwrap_or(false) { mtx.send(MessageFrom::FetchCRecords).unwrap(); } let mut tcp_counters_c = HashMap::new(); let mut tcp_counters_s = HashMap::new(); let mut con_records = HashMap::new(); loop { match reply_mrx.recv_timeout(Duration::from_millis(1000)) { Ok(MessageTo::Counter(pipeline_id, tcp_counter_c, tcp_counter_s, rx_tx_stats)) => { print_tcp_counters(&pipeline_id, &tcp_counter_c, &tcp_counter_s); if rx_tx_stats.is_some() { print_rx_tx_counters(&pipeline_id, &rx_tx_stats.unwrap()); } tcp_counters_c.insert(pipeline_id.clone(), tcp_counter_c); tcp_counters_s.insert(pipeline_id, tcp_counter_s); } Ok(MessageTo::CRecords(pipeline_id, Some(recv_con_records), _)) => { debug!("{}: received {} CRecords", pipeline_id, recv_con_records.len(),); con_records.insert(pipeline_id, recv_con_records); } Ok(_m) => error!("illegal MessageTo received from reply_to_main channel"), Err(RecvTimeoutError::Timeout) => { break; } Err(e) => { error!("error receiving from reply_to_main channel (reply_mrx): {}", e); break; } } } info!("after run: available mbufs in memory pool= {:6}", unsafe { mbuf_avail_count() }); println!("\nTask Performance Data:\n"); if configuration.engine.detailed_records.unwrap_or(false) { let mut completed_count_c = 0; let mut completed_count_s = 0; for (_p, con_recs) in &con_records { for c in con_recs.iter_0() { if (c.release_cause() == ReleaseCause::PassiveClose || c.release_cause() == ReleaseCause::ActiveClose) && c.last_state() == TcpState::Closed { completed_count_c += 1 }; } for c in con_recs.iter_1() { if (c.release_cause() == ReleaseCause::PassiveClose || c.release_cause() == ReleaseCause::ActiveClose) && c.last_state() == TcpState::Closed { completed_count_s += 1 }; } } println!("\ncompleted connections c/s: {}/{}\n", completed_count_c, completed_count_s); // write connection records into file let file = match File::create("c_records.txt") { Err(why) => panic!("couldn't create c_records.txt: {}", why.description()), Ok(file) => file, }; let mut f = BufWriter::new(file); for (p, c_records) in con_records { f.write_all(format!("Pipeline {}:\n", p).as_bytes()) .expect("cannot write c_records"); if c_records.len() > 0 { let mut completed_count = 0; let mut min = c_records.iter_0().last().unwrap().clone(); let mut max = min.clone(); c_records.iter().enumerate().for_each(|(i, (c, e))| { let line = format!("{:6}: {}\n {}\n", i, c, e); f.write_all(line.as_bytes()).expect("cannot write c_records"); if (c.release_cause() == ReleaseCause::PassiveClose || c.release_cause() == ReleaseCause::ActiveClose) && c.states().last().unwrap() == &TcpState::Closed { completed_count += 1 } if c.get_first_stamp().unwrap_or(u64::max_value()) < min.get_first_stamp().unwrap_or(u64::max_value()) { min = c.clone() } if c.get_last_stamp().unwrap_or(0) > max.get_last_stamp().unwrap_or(0) { max = c.clone() } if i == (c_records.len() - 1) && min.get_first_stamp().is_some() && max.get_last_stamp().is_some() { let total = max.get_last_stamp().unwrap() - min.get_first_stamp().unwrap(); info!( "total used cycles= {}, per connection = {}", total.separated_string(), (total / (i as u64 + 1)).separated_string() ); } }); assert_eq!( completed_count, tcp_counters_s.get(&p).unwrap()[TcpStatistics::SentSyn] + tcp_counters_c.get(&p).unwrap()[TcpStatistics::SentSyn] ); } } f.flush().expect("cannot flush BufWriter"); assert_eq!(configuration.test_size.unwrap(), completed_count_c); assert_eq!(configuration.test_size.unwrap(), completed_count_s); } for (p, counters) in tcp_counters_s { assert_eq!(counters[TcpStatistics::SentSyn], counters[TcpStatistics::SentSynAck2]); assert_eq!(counters[TcpStatistics::SentSynAck2], counters[TcpStatistics::RecvSynAck]); assert_eq!( counters[TcpStatistics::RecvFin] + counters[TcpStatistics::RecvFinPssv], tcp_counters_c.get(&p).unwrap()[TcpStatistics::RecvFinPssv] + tcp_counters_c.get(&p).unwrap()[TcpStatistics::RecvFin] ); assert!( tcp_counters_c.get(&p).unwrap()[TcpStatistics::SentFin] + tcp_counters_c.get(&p).unwrap()[TcpStatistics::SentFinPssv] <= tcp_counters_c.get(&p).unwrap()[TcpStatistics::RecvAck4Fin] ); assert!( counters[TcpStatistics::SentFin] + counters[TcpStatistics::SentFinPssv] <= counters[TcpStatistics::RecvAck4Fin] ); assert_eq!(counters[TcpStatistics::SentSyn], counters[TcpStatistics::SentPayload]); assert_eq!( tcp_counters_c.get(&p).unwrap()[TcpStatistics::RecvSyn], tcp_counters_c.get(&p).unwrap()[TcpStatistics::RecvPayload] ); } mtx.send(MessageFrom::Exit).unwrap(); thread::sleep(Duration::from_millis(2000)); info!("terminating ProxyEngine ..."); println!("\nPASSED\n"); std::process::exit(0); }
struct Solution; const LETTERS: [&[u8]; 8] = [ b"abc", b"def", b"ghi", b"jkl", b"mno", b"pqrs", b"tuv", b"wxyz", ]; const LETTER_BASE: u8 = '2' as u8; impl Solution { pub fn letter_combinations(digits: String) -> Vec<String> { let mut ans: Vec<String> = Vec::new(); if digits.len() > 0 { Self::dfs(&digits.as_bytes(), 0, String::new(), &mut ans); } ans } fn dfs(digits: &[u8], next_idx: usize, prefix: String, ans: &mut Vec<String>) { if next_idx == digits.len() { ans.push(prefix); return; } for c in LETTERS[(digits[next_idx] - LETTER_BASE) as usize] { let mut new_prefix = prefix.clone(); new_prefix.push(*c as char); Self::dfs(digits, next_idx + 1, new_prefix, ans) } } } #[cfg(test)] mod test { use super::*; #[test] fn test_letter_combinations() { let mut ans = Solution::letter_combinations(String::from("23")); ans.sort(); let mut want: Vec<String> = vec!["ad", "ae", "af", "bd", "be", "bf", "cd", "ce", "cf"] .iter() .map(|s| String::from(*s)) .collect(); want.sort(); assert_eq!(ans, want); } #[test] fn test_letter_combinations_empty_input() { let ans = Solution::letter_combinations(String::new()); assert_eq!(ans, Vec::<String>::new()); } }
use bc::block::{Block, LightBlock, NewBlock}; use bc::{BlockChain, BLOCKCHAIN}; use colored::*; use ctx::{remote_context, CONTEXT, RemoteContext}; use grpcio::{ChannelBuilder, EnvBuilder, CallOption, Error}; use proto::byzan::{BlockIdx, BlockTill}; use proto::byzan_grpc::BlockChainClient; use serde_json; use std::sync::Arc; use std::time::Duration; pub enum RequestType { New = 0, } #[derive(Serialize, Deserialize)] pub struct Request { pub req_type: u32, pub context: RemoteContext, pub block: LightBlock, } pub fn payload(req_type: RequestType, block: LightBlock) -> String { let req = create_req(req_type, block); serde_json::to_string(&req).unwrap() } pub fn negotiate(req: &Request) -> Result<Option<LightBlock>, String> { let req_hash = req.block.self_hash.clone(); let req_len = req.block.idx + 1; let (blockchain_hash, blockchain_len) = try!(local_bc_last()); if req_len > blockchain_len as u32 || (req_len == blockchain_len as u32 && req_hash != blockchain_hash) { for idx in (0..blockchain_len).rev() { let b = try!(local_b_by_idx(idx)); let r = try!(remote_b_by_idx(req, idx)); if b.self_hash == r.self_hash { let tb = try!(till(&req.context, idx + 1)); if tb.len() > 0 { let mut imported = tb.len(); // // blockchain's lock is acquired from here // let mut blockchain = BLOCKCHAIN.lock().unwrap(); let c = blockchain.cut(idx + 1); let nb: Vec<NewBlock> = c.iter().map(|b| NewBlock::from(b.clone())).collect(); for t in tb { if let Err(e) = blockchain.push_block(t) { warn!("[push] the block is ignored for the reason: {}", e); imported -= 1; } } for n in nb { if let Err(_) = blockchain.push_new_block(n) { // 一個ignoreならそれ以降無視して良さそう // warn!("[push new] the block is ignored for the reason: {}", e); imported -= 1; } } let local = local().unwrap(); info!( "---> [{}:{}] imported {} blocks", local.0, local.1, imported.to_string().cyan().bold() ); let lb: Option<LightBlock> = match blockchain.last() { Some(b) => Some(b.into()), None => None, }; return Ok(lb); // // blockchain's lock is acquired till here // } break; } } } Ok(None) } fn create_req(req_type: RequestType, block: LightBlock) -> Request { Request { req_type: req_type as u32, context: remote_context(), block: block, } } fn client(context: &RemoteContext) -> BlockChainClient { let remote = format!("{}:{}", &context.bind_host, context.bind_port); let env = Arc::new(EnvBuilder::new().build()); let ch = ChannelBuilder::new(env).connect(&remote); let client = BlockChainClient::new(ch); client } fn opt() -> CallOption { let opt = CallOption::default(); opt.timeout(Duration::from_millis(500)) } fn local() -> Result<(String, u16), String> { let ctx = CONTEXT.read().unwrap(); Ok((ctx.bind_host.clone(), ctx.bind_port)) } fn local_bc_last() -> Result<(String, u32), String> { let bc = BLOCKCHAIN.lock().unwrap().last().unwrap(); Ok((bc.self_hash.clone(), bc.idx + 1 as u32)) } fn local_b_by_idx(idx: u32) -> Result<Block, String> { match BLOCKCHAIN.lock().unwrap().get_by_idx(idx) { Some(b) => { return Ok(b); } None => { return Err(format!("failed to get block at {} on localhost", idx)); } } } fn remote_b_by_idx(req: &Request, idx: u32) -> Result<Block, String> { let mut b_idx = BlockIdx::new(); b_idx.set_idx(idx); match client(&req.context).get_by_idx_opt(&b_idx, opt()) { Ok(b) => Ok(Block::from(b.get_block().clone())), Err(e) => { return Err(handle_error(&format!("failed to get block at {}", idx), &req.context, e)); }, } } fn till(context: &RemoteContext, idx: u32) -> Result<Vec<Block>, String> { let mut block_till = BlockTill::new(); block_till.set_first(idx as u32); match client(&context).till_opt(&block_till, opt()) { Ok(blocks) => { let blocks: Vec<Block> = blocks .get_blocks() .to_vec() .iter() .map(|b| Block::from(b.clone())) .collect(); return Ok(blocks); } Err(e) => { return Err(handle_error(&format!("failed to exec till {}", idx), &context, e)); } } } fn handle_error(s: &String, context: &RemoteContext, e: Error) -> String { let detail = match e { Error::RpcFailure(s) => { match s.details { Some(d) => d, None => String::from("unknown"), } }, _ => String::from("unknown") }; format!("{} on {}:{} ({})", s, context.bind_host, context.bind_port, detail) }
extern crate libc; use prodbg::*; #[repr(C)] pub mod prodbg { use std::mem::transmute; use libc::*; pub struct CPDReaderAPI { private_data: *mut c_void, read_u8: extern fn(data: *mut c_void), } pub struct Reader { c_reader_api: *mut CPDReaderAPI, } impl Reader { pub fn read_u8(&self) { unsafe { ((*self.c_reader_api).read_u8)((*self.c_reader_api).private_data) } } } pub trait Backend { fn new() -> Self; fn update(&mut self); //fn update(&mut self, reader: &Reader); } #[repr(C)] pub struct CBackendCallbacks { pub create_instance: fn() -> *mut c_void, pub destroy_instance: fn(*mut c_void), pub update: fn(*mut c_void), } pub fn create_instance<T: Backend>() -> *mut c_void { let instance = unsafe { transmute(Box::new(T::new())) }; println!("Lets create instance!"); instance } pub fn destroy_instance<T: Backend>(ptr: *mut c_void) { let instance: Box<T> = unsafe{ transmute(ptr) }; // implicitly dropped } pub fn update_instance<T: Backend>(ptr: *mut c_void) { let backend: &mut T = unsafe { &mut *(ptr as *mut T) }; backend.update() } pub struct PluginHandler { pub private_data: *mut c_void, pub c_register_plugin: extern fn(plugin: *mut c_void, priv_data: *mut c_void), } impl PluginHandler { fn register_plugin(&self, plugin: &mut CBackendCallbacks) { unsafe { (self.c_register_plugin)(transmute(plugin), (self.private_data)); } } } } extern fn init_plugin(plugin_handler: &mut prodbg::PluginHandler) {} #[no_mangle] pub extern fn InitPlugin(cb: extern fn(plugin: *mut c_void, data: *mut c_void), priv_data: *mut c_void) { let mut plugin_handler = prodbg::PluginHandler { private_data : priv_data, c_register_plugin : cb }; init_plugin(&mut plugin_handler); //cb(priv_data); } macro_rules! define_plugin { ($x:ty) => { { let mut plugin = prodbg::CBackendCallbacks { create_instance: prodbg::create_instance::<$x>, destroy_instance: prodbg::destroy_instance::<$x>, update: prodbg::update_instance::<$x> }; plugin } } } struct MyBackend { some_data: i32, } impl prodbg::Backend for MyBackend { fn new() -> Self { MyBackend { some_data: 0 } } //fn update(&mut self, reader: &prodbg::Reader) fn update(&mut self) { println!("update instance! {}", self.some_data); self.some_data += 1; } } pub fn init_plugin(plugin_handler: &mut prodbg::PluginHandler) { let plugin = define_plugin!(MyBackend); plugin_handler.register_plugin(plugin); } //fn init_plugin() { // let plugin = define_plugin!(MyBackend); //} // these needs to be generated for each type /* #[no_mangle] pub static mut g_backend: prodbg::CBackendCallbacks = prodbg::CBackendCallbacks { create_instance: prodbg::create_instance::<MyBackend>, destroy_instance: prodbg::destroy_instance::<MyBackend>, update: prodbg::update_instance::<MyBackend> }; */ #[test] fn it_works() { }
#![allow(non_camel_case_types)] #![allow(non_upper_case_globals)] #![allow(non_snake_case)] use std::{ cmp::Ordering, convert::TryFrom, fmt::{Display, Formatter, Result}, ops::{Add, Sub}, result, }; include!(concat!( env!("OUT_DIR"), "/gdb_register_bindings_generated.rs" )); /// The inner u32 is deliberately NOT pub. We don't want others to manually construct arbitrary /// GdbRegister structs. They need to go through the provided interfaces. #[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug)] pub struct GdbRegister(u32); pub const DREG_EAX: GdbRegister = GdbRegister(__DREG_EAX); pub const DREG_ECX: GdbRegister = GdbRegister(__DREG_ECX); pub const DREG_EDX: GdbRegister = GdbRegister(__DREG_EDX); pub const DREG_EBX: GdbRegister = GdbRegister(__DREG_EBX); pub const DREG_ESP: GdbRegister = GdbRegister(__DREG_ESP); pub const DREG_EBP: GdbRegister = GdbRegister(__DREG_EBP); pub const DREG_ESI: GdbRegister = GdbRegister(__DREG_ESI); pub const DREG_EDI: GdbRegister = GdbRegister(__DREG_EDI); pub const DREG_EIP: GdbRegister = GdbRegister(__DREG_EIP); pub const DREG_EFLAGS: GdbRegister = GdbRegister(__DREG_EFLAGS); pub const DREG_CS: GdbRegister = GdbRegister(__DREG_CS); pub const DREG_SS: GdbRegister = GdbRegister(__DREG_SS); pub const DREG_DS: GdbRegister = GdbRegister(__DREG_DS); pub const DREG_ES: GdbRegister = GdbRegister(__DREG_ES); pub const DREG_FS: GdbRegister = GdbRegister(__DREG_FS); pub const DREG_GS: GdbRegister = GdbRegister(__DREG_GS); pub const DREG_FIRST_FXSAVE_REG: GdbRegister = GdbRegister(__DREG_FIRST_FXSAVE_REG); pub const DREG_ST0: GdbRegister = GdbRegister(__DREG_ST0); pub const DREG_ST1: GdbRegister = GdbRegister(__DREG_ST1); pub const DREG_ST2: GdbRegister = GdbRegister(__DREG_ST2); pub const DREG_ST3: GdbRegister = GdbRegister(__DREG_ST3); pub const DREG_ST4: GdbRegister = GdbRegister(__DREG_ST4); pub const DREG_ST5: GdbRegister = GdbRegister(__DREG_ST5); pub const DREG_ST6: GdbRegister = GdbRegister(__DREG_ST6); pub const DREG_ST7: GdbRegister = GdbRegister(__DREG_ST7); pub const DREG_FCTRL: GdbRegister = GdbRegister(__DREG_FCTRL); pub const DREG_FSTAT: GdbRegister = GdbRegister(__DREG_FSTAT); pub const DREG_FTAG: GdbRegister = GdbRegister(__DREG_FTAG); pub const DREG_FISEG: GdbRegister = GdbRegister(__DREG_FISEG); pub const DREG_FIOFF: GdbRegister = GdbRegister(__DREG_FIOFF); pub const DREG_FOSEG: GdbRegister = GdbRegister(__DREG_FOSEG); pub const DREG_FOOFF: GdbRegister = GdbRegister(__DREG_FOOFF); pub const DREG_FOP: GdbRegister = GdbRegister(__DREG_FOP); pub const DREG_XMM0: GdbRegister = GdbRegister(__DREG_XMM0); pub const DREG_XMM1: GdbRegister = GdbRegister(__DREG_XMM1); pub const DREG_XMM2: GdbRegister = GdbRegister(__DREG_XMM2); pub const DREG_XMM3: GdbRegister = GdbRegister(__DREG_XMM3); pub const DREG_XMM4: GdbRegister = GdbRegister(__DREG_XMM4); pub const DREG_XMM5: GdbRegister = GdbRegister(__DREG_XMM5); pub const DREG_XMM6: GdbRegister = GdbRegister(__DREG_XMM6); pub const DREG_XMM7: GdbRegister = GdbRegister(__DREG_XMM7); pub const DREG_MXCSR: GdbRegister = GdbRegister(__DREG_MXCSR); pub const DREG_LAST_FXSAVE_REG: GdbRegister = GdbRegister(__DREG_LAST_FXSAVE_REG); pub const DREG_ORIG_EAX: GdbRegister = GdbRegister(__DREG_ORIG_EAX); pub const DREG_YMM0H: GdbRegister = GdbRegister(__DREG_YMM0H); pub const DREG_YMM1H: GdbRegister = GdbRegister(__DREG_YMM1H); pub const DREG_YMM2H: GdbRegister = GdbRegister(__DREG_YMM2H); pub const DREG_YMM3H: GdbRegister = GdbRegister(__DREG_YMM3H); pub const DREG_YMM4H: GdbRegister = GdbRegister(__DREG_YMM4H); pub const DREG_YMM5H: GdbRegister = GdbRegister(__DREG_YMM5H); pub const DREG_YMM6H: GdbRegister = GdbRegister(__DREG_YMM6H); pub const DREG_YMM7H: GdbRegister = GdbRegister(__DREG_YMM7H); pub const DREG_RAX: GdbRegister = GdbRegister(__DREG_RAX); pub const DREG_RBX: GdbRegister = GdbRegister(__DREG_RBX); pub const DREG_RCX: GdbRegister = GdbRegister(__DREG_RCX); pub const DREG_RDX: GdbRegister = GdbRegister(__DREG_RDX); pub const DREG_RSI: GdbRegister = GdbRegister(__DREG_RSI); pub const DREG_RDI: GdbRegister = GdbRegister(__DREG_RDI); pub const DREG_RBP: GdbRegister = GdbRegister(__DREG_RBP); pub const DREG_RSP: GdbRegister = GdbRegister(__DREG_RSP); pub const DREG_R8: GdbRegister = GdbRegister(__DREG_R8); pub const DREG_R9: GdbRegister = GdbRegister(__DREG_R9); pub const DREG_R10: GdbRegister = GdbRegister(__DREG_R10); pub const DREG_R11: GdbRegister = GdbRegister(__DREG_R11); pub const DREG_R12: GdbRegister = GdbRegister(__DREG_R12); pub const DREG_R13: GdbRegister = GdbRegister(__DREG_R13); pub const DREG_R14: GdbRegister = GdbRegister(__DREG_R14); pub const DREG_R15: GdbRegister = GdbRegister(__DREG_R15); pub const DREG_RIP: GdbRegister = GdbRegister(__DREG_RIP); pub const DREG_64_EFLAGS: GdbRegister = GdbRegister(__DREG_64_EFLAGS); pub const DREG_64_CS: GdbRegister = GdbRegister(__DREG_64_CS); pub const DREG_64_SS: GdbRegister = GdbRegister(__DREG_64_SS); pub const DREG_64_DS: GdbRegister = GdbRegister(__DREG_64_DS); pub const DREG_64_ES: GdbRegister = GdbRegister(__DREG_64_ES); pub const DREG_64_FS: GdbRegister = GdbRegister(__DREG_64_FS); pub const DREG_64_GS: GdbRegister = GdbRegister(__DREG_64_GS); pub const DREG_64_FIRST_FXSAVE_REG: GdbRegister = GdbRegister(__DREG_64_FIRST_FXSAVE_REG); pub const DREG_64_ST0: GdbRegister = GdbRegister(__DREG_64_ST0); pub const DREG_64_ST1: GdbRegister = GdbRegister(__DREG_64_ST1); pub const DREG_64_ST2: GdbRegister = GdbRegister(__DREG_64_ST2); pub const DREG_64_ST3: GdbRegister = GdbRegister(__DREG_64_ST3); pub const DREG_64_ST4: GdbRegister = GdbRegister(__DREG_64_ST4); pub const DREG_64_ST5: GdbRegister = GdbRegister(__DREG_64_ST5); pub const DREG_64_ST6: GdbRegister = GdbRegister(__DREG_64_ST6); pub const DREG_64_ST7: GdbRegister = GdbRegister(__DREG_64_ST7); pub const DREG_64_FCTRL: GdbRegister = GdbRegister(__DREG_64_FCTRL); pub const DREG_64_FSTAT: GdbRegister = GdbRegister(__DREG_64_FSTAT); pub const DREG_64_FTAG: GdbRegister = GdbRegister(__DREG_64_FTAG); pub const DREG_64_FISEG: GdbRegister = GdbRegister(__DREG_64_FISEG); pub const DREG_64_FIOFF: GdbRegister = GdbRegister(__DREG_64_FIOFF); pub const DREG_64_FOSEG: GdbRegister = GdbRegister(__DREG_64_FOSEG); pub const DREG_64_FOOFF: GdbRegister = GdbRegister(__DREG_64_FOOFF); pub const DREG_64_FOP: GdbRegister = GdbRegister(__DREG_64_FOP); pub const DREG_64_XMM0: GdbRegister = GdbRegister(__DREG_64_XMM0); pub const DREG_64_XMM1: GdbRegister = GdbRegister(__DREG_64_XMM1); pub const DREG_64_XMM2: GdbRegister = GdbRegister(__DREG_64_XMM2); pub const DREG_64_XMM3: GdbRegister = GdbRegister(__DREG_64_XMM3); pub const DREG_64_XMM4: GdbRegister = GdbRegister(__DREG_64_XMM4); pub const DREG_64_XMM5: GdbRegister = GdbRegister(__DREG_64_XMM5); pub const DREG_64_XMM6: GdbRegister = GdbRegister(__DREG_64_XMM6); pub const DREG_64_XMM7: GdbRegister = GdbRegister(__DREG_64_XMM7); pub const DREG_64_XMM8: GdbRegister = GdbRegister(__DREG_64_XMM8); pub const DREG_64_XMM9: GdbRegister = GdbRegister(__DREG_64_XMM9); pub const DREG_64_XMM10: GdbRegister = GdbRegister(__DREG_64_XMM10); pub const DREG_64_XMM11: GdbRegister = GdbRegister(__DREG_64_XMM11); pub const DREG_64_XMM12: GdbRegister = GdbRegister(__DREG_64_XMM12); pub const DREG_64_XMM13: GdbRegister = GdbRegister(__DREG_64_XMM13); pub const DREG_64_XMM14: GdbRegister = GdbRegister(__DREG_64_XMM14); pub const DREG_64_XMM15: GdbRegister = GdbRegister(__DREG_64_XMM15); pub const DREG_64_MXCSR: GdbRegister = GdbRegister(__DREG_64_MXCSR); pub const DREG_64_LAST_FXSAVE_REG: GdbRegister = GdbRegister(__DREG_64_LAST_FXSAVE_REG); pub const DREG_ORIG_RAX: GdbRegister = GdbRegister(__DREG_ORIG_RAX); pub const DREG_FS_BASE: GdbRegister = GdbRegister(__DREG_FS_BASE); pub const DREG_GS_BASE: GdbRegister = GdbRegister(__DREG_GS_BASE); pub const DREG_64_YMM0H: GdbRegister = GdbRegister(__DREG_64_YMM0H); pub const DREG_64_YMM1H: GdbRegister = GdbRegister(__DREG_64_YMM1H); pub const DREG_64_YMM2H: GdbRegister = GdbRegister(__DREG_64_YMM2H); pub const DREG_64_YMM3H: GdbRegister = GdbRegister(__DREG_64_YMM3H); pub const DREG_64_YMM4H: GdbRegister = GdbRegister(__DREG_64_YMM4H); pub const DREG_64_YMM5H: GdbRegister = GdbRegister(__DREG_64_YMM5H); pub const DREG_64_YMM6H: GdbRegister = GdbRegister(__DREG_64_YMM6H); pub const DREG_64_YMM7H: GdbRegister = GdbRegister(__DREG_64_YMM7H); pub const DREG_64_YMM8H: GdbRegister = GdbRegister(__DREG_64_YMM8H); pub const DREG_64_YMM9H: GdbRegister = GdbRegister(__DREG_64_YMM9H); pub const DREG_64_YMM10H: GdbRegister = GdbRegister(__DREG_64_YMM10H); pub const DREG_64_YMM11H: GdbRegister = GdbRegister(__DREG_64_YMM11H); pub const DREG_64_YMM12H: GdbRegister = GdbRegister(__DREG_64_YMM12H); pub const DREG_64_YMM13H: GdbRegister = GdbRegister(__DREG_64_YMM13H); pub const DREG_64_YMM14H: GdbRegister = GdbRegister(__DREG_64_YMM14H); pub const DREG_64_YMM15H: GdbRegister = GdbRegister(__DREG_64_YMM15H); impl Display for GdbRegister { fn fmt(&self, f: &mut Formatter<'_>) -> Result { write!(f, "{}", self.0) } } impl GdbRegister { pub fn as_usize(&self) -> usize { self.0 as usize } } impl TryFrom<u32> for GdbRegister { type Error = (); fn try_from(regno: u32) -> result::Result<Self, Self::Error> { if regno < __DREG_NUM_LINUX_X86_64 { Ok(Self(regno)) } else { Err(()) } } } impl Into<usize> for GdbRegister { fn into(self) -> usize { self.as_usize() } } impl Add<Self> for GdbRegister { type Output = result::Result<GdbRegister, <GdbRegister as TryFrom<u32>>::Error>; fn add(self, rhs: Self) -> Self::Output { GdbRegister::try_from(self.0 + rhs.0) } } impl Sub<Self> for GdbRegister { type Output = result::Result<GdbRegister, <GdbRegister as TryFrom<u32>>::Error>; fn sub(self, rhs: Self) -> Self::Output { GdbRegister::try_from(self.0 - rhs.0) } } impl Add<u32> for GdbRegister { type Output = result::Result<GdbRegister, <GdbRegister as TryFrom<u32>>::Error>; fn add(self, rhs: u32) -> Self::Output { GdbRegister::try_from(self.0 + rhs) } } impl Sub<u32> for GdbRegister { type Output = result::Result<GdbRegister, <GdbRegister as TryFrom<u32>>::Error>; fn sub(self, rhs: u32) -> Self::Output { GdbRegister::try_from(self.0 - rhs) } } impl PartialOrd<u32> for GdbRegister { fn partial_cmp(&self, other: &u32) -> Option<Ordering> { if self.0 < *other { Some(Ordering::Less) } else if self.0 == *other { Some(Ordering::Equal) } else { Some(Ordering::Greater) } } } impl PartialEq<u32> for GdbRegister { fn eq(&self, other: &u32) -> bool { self.0 == *other } }
//! 🦇 BATS! 🦇 //! //! CLI wrapper use bats::Bats; use gumdrop::Options; fn main() { Bats::parse_args_default_or_exit().run(); }
use super::Error; #[derive(Debug, Default)] pub struct ValidationResult { pub errors: Vec<Error>, }
#[doc = "Register `CCMR2_Input` reader"] pub type R = crate::R<CCMR2_INPUT_SPEC>; #[doc = "Register `CCMR2_Input` writer"] pub type W = crate::W<CCMR2_INPUT_SPEC>; #[doc = "Field `CC3S` reader - Capture/Compare 3 selection"] pub type CC3S_R = crate::FieldReader<CC3S_A>; #[doc = "Capture/Compare 3 selection\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] #[repr(u8)] pub enum CC3S_A { #[doc = "0: CCx channel is configured as output"] Output = 0, #[doc = "1: CCx channel is configured as input, ICx is mapped on TI1"] Ti1 = 1, #[doc = "2: CCx channel is configured as input, ICx is mapped on TI2"] Ti2 = 2, #[doc = "3: CCx channel is configured as input, ICx is mapped on TRC"] Trc = 3, } impl From<CC3S_A> for u8 { #[inline(always)] fn from(variant: CC3S_A) -> Self { variant as _ } } impl crate::FieldSpec for CC3S_A { type Ux = u8; } impl CC3S_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> CC3S_A { match self.bits { 0 => CC3S_A::Output, 1 => CC3S_A::Ti1, 2 => CC3S_A::Ti2, 3 => CC3S_A::Trc, _ => unreachable!(), } } #[doc = "CCx channel is configured as output"] #[inline(always)] pub fn is_output(&self) -> bool { *self == CC3S_A::Output } #[doc = "CCx channel is configured as input, ICx is mapped on TI1"] #[inline(always)] pub fn is_ti1(&self) -> bool { *self == CC3S_A::Ti1 } #[doc = "CCx channel is configured as input, ICx is mapped on TI2"] #[inline(always)] pub fn is_ti2(&self) -> bool { *self == CC3S_A::Ti2 } #[doc = "CCx channel is configured as input, ICx is mapped on TRC"] #[inline(always)] pub fn is_trc(&self) -> bool { *self == CC3S_A::Trc } } #[doc = "Field `CC3S` writer - Capture/Compare 3 selection"] pub type CC3S_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 2, O, CC3S_A>; impl<'a, REG, const O: u8> CC3S_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, REG::Ux: From<u8>, { #[doc = "CCx channel is configured as output"] #[inline(always)] pub fn output(self) -> &'a mut crate::W<REG> { self.variant(CC3S_A::Output) } #[doc = "CCx channel is configured as input, ICx is mapped on TI1"] #[inline(always)] pub fn ti1(self) -> &'a mut crate::W<REG> { self.variant(CC3S_A::Ti1) } #[doc = "CCx channel is configured as input, ICx is mapped on TI2"] #[inline(always)] pub fn ti2(self) -> &'a mut crate::W<REG> { self.variant(CC3S_A::Ti2) } #[doc = "CCx channel is configured as input, ICx is mapped on TRC"] #[inline(always)] pub fn trc(self) -> &'a mut crate::W<REG> { self.variant(CC3S_A::Trc) } } #[doc = "Field `IC3PSC` reader - Input capture 3 prescaler"] pub type IC3PSC_R = crate::FieldReader<IC3PSC_A>; #[doc = "Input capture 3 prescaler\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] #[repr(u8)] pub enum IC3PSC_A { #[doc = "0: CCx channel is configured as output"] Output = 0, #[doc = "1: Capture is done once every 2 events"] Capture2 = 1, #[doc = "2: Capture is done once every 4 events"] Capture4 = 2, #[doc = "3: Capture is done once every 8 events"] Capture8 = 3, } impl From<IC3PSC_A> for u8 { #[inline(always)] fn from(variant: IC3PSC_A) -> Self { variant as _ } } impl crate::FieldSpec for IC3PSC_A { type Ux = u8; } impl IC3PSC_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> IC3PSC_A { match self.bits { 0 => IC3PSC_A::Output, 1 => IC3PSC_A::Capture2, 2 => IC3PSC_A::Capture4, 3 => IC3PSC_A::Capture8, _ => unreachable!(), } } #[doc = "CCx channel is configured as output"] #[inline(always)] pub fn is_output(&self) -> bool { *self == IC3PSC_A::Output } #[doc = "Capture is done once every 2 events"] #[inline(always)] pub fn is_capture2(&self) -> bool { *self == IC3PSC_A::Capture2 } #[doc = "Capture is done once every 4 events"] #[inline(always)] pub fn is_capture4(&self) -> bool { *self == IC3PSC_A::Capture4 } #[doc = "Capture is done once every 8 events"] #[inline(always)] pub fn is_capture8(&self) -> bool { *self == IC3PSC_A::Capture8 } } #[doc = "Field `IC3PSC` writer - Input capture 3 prescaler"] pub type IC3PSC_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 2, O, IC3PSC_A>; impl<'a, REG, const O: u8> IC3PSC_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, REG::Ux: From<u8>, { #[doc = "CCx channel is configured as output"] #[inline(always)] pub fn output(self) -> &'a mut crate::W<REG> { self.variant(IC3PSC_A::Output) } #[doc = "Capture is done once every 2 events"] #[inline(always)] pub fn capture2(self) -> &'a mut crate::W<REG> { self.variant(IC3PSC_A::Capture2) } #[doc = "Capture is done once every 4 events"] #[inline(always)] pub fn capture4(self) -> &'a mut crate::W<REG> { self.variant(IC3PSC_A::Capture4) } #[doc = "Capture is done once every 8 events"] #[inline(always)] pub fn capture8(self) -> &'a mut crate::W<REG> { self.variant(IC3PSC_A::Capture8) } } #[doc = "Field `IC3F` reader - Input capture 3 filter"] pub type IC3F_R = crate::FieldReader<IC3F_A>; #[doc = "Input capture 3 filter\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] #[repr(u8)] pub enum IC3F_A { #[doc = "0: No filter, sampling is done at fDTS"] NoFilter = 0, #[doc = "1: fSAMPLING=fCK_INT, N=2"] FckIntN2 = 1, #[doc = "2: fSAMPLING=fCK_INT, N=4"] FckIntN4 = 2, #[doc = "3: fSAMPLING=fCK_INT, N=8"] FckIntN8 = 3, #[doc = "4: fSAMPLING=fDTS/2, N=6"] FdtsDiv2N6 = 4, #[doc = "5: fSAMPLING=fDTS/2, N=8"] FdtsDiv2N8 = 5, #[doc = "6: fSAMPLING=fDTS/4, N=6"] FdtsDiv4N6 = 6, #[doc = "7: fSAMPLING=fDTS/4, N=8"] FdtsDiv4N8 = 7, #[doc = "8: fSAMPLING=fDTS/8, N=6"] FdtsDiv8N6 = 8, #[doc = "9: fSAMPLING=fDTS/8, N=8"] FdtsDiv8N8 = 9, #[doc = "10: fSAMPLING=fDTS/16, N=5"] FdtsDiv16N5 = 10, #[doc = "11: fSAMPLING=fDTS/16, N=6"] FdtsDiv16N6 = 11, #[doc = "12: fSAMPLING=fDTS/16, N=8"] FdtsDiv16N8 = 12, #[doc = "13: fSAMPLING=fDTS/32, N=5"] FdtsDiv32N5 = 13, #[doc = "14: fSAMPLING=fDTS/32, N=6"] FdtsDiv32N6 = 14, #[doc = "15: fSAMPLING=fDTS/32, N=8"] FdtsDiv32N8 = 15, } impl From<IC3F_A> for u8 { #[inline(always)] fn from(variant: IC3F_A) -> Self { variant as _ } } impl crate::FieldSpec for IC3F_A { type Ux = u8; } impl IC3F_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> IC3F_A { match self.bits { 0 => IC3F_A::NoFilter, 1 => IC3F_A::FckIntN2, 2 => IC3F_A::FckIntN4, 3 => IC3F_A::FckIntN8, 4 => IC3F_A::FdtsDiv2N6, 5 => IC3F_A::FdtsDiv2N8, 6 => IC3F_A::FdtsDiv4N6, 7 => IC3F_A::FdtsDiv4N8, 8 => IC3F_A::FdtsDiv8N6, 9 => IC3F_A::FdtsDiv8N8, 10 => IC3F_A::FdtsDiv16N5, 11 => IC3F_A::FdtsDiv16N6, 12 => IC3F_A::FdtsDiv16N8, 13 => IC3F_A::FdtsDiv32N5, 14 => IC3F_A::FdtsDiv32N6, 15 => IC3F_A::FdtsDiv32N8, _ => unreachable!(), } } #[doc = "No filter, sampling is done at fDTS"] #[inline(always)] pub fn is_no_filter(&self) -> bool { *self == IC3F_A::NoFilter } #[doc = "fSAMPLING=fCK_INT, N=2"] #[inline(always)] pub fn is_fck_int_n2(&self) -> bool { *self == IC3F_A::FckIntN2 } #[doc = "fSAMPLING=fCK_INT, N=4"] #[inline(always)] pub fn is_fck_int_n4(&self) -> bool { *self == IC3F_A::FckIntN4 } #[doc = "fSAMPLING=fCK_INT, N=8"] #[inline(always)] pub fn is_fck_int_n8(&self) -> bool { *self == IC3F_A::FckIntN8 } #[doc = "fSAMPLING=fDTS/2, N=6"] #[inline(always)] pub fn is_fdts_div2_n6(&self) -> bool { *self == IC3F_A::FdtsDiv2N6 } #[doc = "fSAMPLING=fDTS/2, N=8"] #[inline(always)] pub fn is_fdts_div2_n8(&self) -> bool { *self == IC3F_A::FdtsDiv2N8 } #[doc = "fSAMPLING=fDTS/4, N=6"] #[inline(always)] pub fn is_fdts_div4_n6(&self) -> bool { *self == IC3F_A::FdtsDiv4N6 } #[doc = "fSAMPLING=fDTS/4, N=8"] #[inline(always)] pub fn is_fdts_div4_n8(&self) -> bool { *self == IC3F_A::FdtsDiv4N8 } #[doc = "fSAMPLING=fDTS/8, N=6"] #[inline(always)] pub fn is_fdts_div8_n6(&self) -> bool { *self == IC3F_A::FdtsDiv8N6 } #[doc = "fSAMPLING=fDTS/8, N=8"] #[inline(always)] pub fn is_fdts_div8_n8(&self) -> bool { *self == IC3F_A::FdtsDiv8N8 } #[doc = "fSAMPLING=fDTS/16, N=5"] #[inline(always)] pub fn is_fdts_div16_n5(&self) -> bool { *self == IC3F_A::FdtsDiv16N5 } #[doc = "fSAMPLING=fDTS/16, N=6"] #[inline(always)] pub fn is_fdts_div16_n6(&self) -> bool { *self == IC3F_A::FdtsDiv16N6 } #[doc = "fSAMPLING=fDTS/16, N=8"] #[inline(always)] pub fn is_fdts_div16_n8(&self) -> bool { *self == IC3F_A::FdtsDiv16N8 } #[doc = "fSAMPLING=fDTS/32, N=5"] #[inline(always)] pub fn is_fdts_div32_n5(&self) -> bool { *self == IC3F_A::FdtsDiv32N5 } #[doc = "fSAMPLING=fDTS/32, N=6"] #[inline(always)] pub fn is_fdts_div32_n6(&self) -> bool { *self == IC3F_A::FdtsDiv32N6 } #[doc = "fSAMPLING=fDTS/32, N=8"] #[inline(always)] pub fn is_fdts_div32_n8(&self) -> bool { *self == IC3F_A::FdtsDiv32N8 } } #[doc = "Field `IC3F` writer - Input capture 3 filter"] pub type IC3F_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 4, O, IC3F_A>; impl<'a, REG, const O: u8> IC3F_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, REG::Ux: From<u8>, { #[doc = "No filter, sampling is done at fDTS"] #[inline(always)] pub fn no_filter(self) -> &'a mut crate::W<REG> { self.variant(IC3F_A::NoFilter) } #[doc = "fSAMPLING=fCK_INT, N=2"] #[inline(always)] pub fn fck_int_n2(self) -> &'a mut crate::W<REG> { self.variant(IC3F_A::FckIntN2) } #[doc = "fSAMPLING=fCK_INT, N=4"] #[inline(always)] pub fn fck_int_n4(self) -> &'a mut crate::W<REG> { self.variant(IC3F_A::FckIntN4) } #[doc = "fSAMPLING=fCK_INT, N=8"] #[inline(always)] pub fn fck_int_n8(self) -> &'a mut crate::W<REG> { self.variant(IC3F_A::FckIntN8) } #[doc = "fSAMPLING=fDTS/2, N=6"] #[inline(always)] pub fn fdts_div2_n6(self) -> &'a mut crate::W<REG> { self.variant(IC3F_A::FdtsDiv2N6) } #[doc = "fSAMPLING=fDTS/2, N=8"] #[inline(always)] pub fn fdts_div2_n8(self) -> &'a mut crate::W<REG> { self.variant(IC3F_A::FdtsDiv2N8) } #[doc = "fSAMPLING=fDTS/4, N=6"] #[inline(always)] pub fn fdts_div4_n6(self) -> &'a mut crate::W<REG> { self.variant(IC3F_A::FdtsDiv4N6) } #[doc = "fSAMPLING=fDTS/4, N=8"] #[inline(always)] pub fn fdts_div4_n8(self) -> &'a mut crate::W<REG> { self.variant(IC3F_A::FdtsDiv4N8) } #[doc = "fSAMPLING=fDTS/8, N=6"] #[inline(always)] pub fn fdts_div8_n6(self) -> &'a mut crate::W<REG> { self.variant(IC3F_A::FdtsDiv8N6) } #[doc = "fSAMPLING=fDTS/8, N=8"] #[inline(always)] pub fn fdts_div8_n8(self) -> &'a mut crate::W<REG> { self.variant(IC3F_A::FdtsDiv8N8) } #[doc = "fSAMPLING=fDTS/16, N=5"] #[inline(always)] pub fn fdts_div16_n5(self) -> &'a mut crate::W<REG> { self.variant(IC3F_A::FdtsDiv16N5) } #[doc = "fSAMPLING=fDTS/16, N=6"] #[inline(always)] pub fn fdts_div16_n6(self) -> &'a mut crate::W<REG> { self.variant(IC3F_A::FdtsDiv16N6) } #[doc = "fSAMPLING=fDTS/16, N=8"] #[inline(always)] pub fn fdts_div16_n8(self) -> &'a mut crate::W<REG> { self.variant(IC3F_A::FdtsDiv16N8) } #[doc = "fSAMPLING=fDTS/32, N=5"] #[inline(always)] pub fn fdts_div32_n5(self) -> &'a mut crate::W<REG> { self.variant(IC3F_A::FdtsDiv32N5) } #[doc = "fSAMPLING=fDTS/32, N=6"] #[inline(always)] pub fn fdts_div32_n6(self) -> &'a mut crate::W<REG> { self.variant(IC3F_A::FdtsDiv32N6) } #[doc = "fSAMPLING=fDTS/32, N=8"] #[inline(always)] pub fn fdts_div32_n8(self) -> &'a mut crate::W<REG> { self.variant(IC3F_A::FdtsDiv32N8) } } #[doc = "Field `CC4S` reader - Capture/Compare 4 selection"] pub use CC3S_R as CC4S_R; #[doc = "Field `CC4S` writer - Capture/Compare 4 selection"] pub use CC3S_W as CC4S_W; #[doc = "Field `IC4F` reader - Input capture 4 filter"] pub use IC3F_R as IC4F_R; #[doc = "Field `IC4F` writer - Input capture 4 filter"] pub use IC3F_W as IC4F_W; #[doc = "Field `IC4PSC` reader - Input capture 4 prescaler"] pub use IC3PSC_R as IC4PSC_R; #[doc = "Field `IC4PSC` writer - Input capture 4 prescaler"] pub use IC3PSC_W as IC4PSC_W; impl R { #[doc = "Bits 0:1 - Capture/Compare 3 selection"] #[inline(always)] pub fn cc3s(&self) -> CC3S_R { CC3S_R::new((self.bits & 3) as u8) } #[doc = "Bits 2:3 - Input capture 3 prescaler"] #[inline(always)] pub fn ic3psc(&self) -> IC3PSC_R { IC3PSC_R::new(((self.bits >> 2) & 3) as u8) } #[doc = "Bits 4:7 - Input capture 3 filter"] #[inline(always)] pub fn ic3f(&self) -> IC3F_R { IC3F_R::new(((self.bits >> 4) & 0x0f) as u8) } #[doc = "Bits 8:9 - Capture/Compare 4 selection"] #[inline(always)] pub fn cc4s(&self) -> CC4S_R { CC4S_R::new(((self.bits >> 8) & 3) as u8) } #[doc = "Bits 10:11 - Input capture 4 prescaler"] #[inline(always)] pub fn ic4psc(&self) -> IC4PSC_R { IC4PSC_R::new(((self.bits >> 10) & 3) as u8) } #[doc = "Bits 12:15 - Input capture 4 filter"] #[inline(always)] pub fn ic4f(&self) -> IC4F_R { IC4F_R::new(((self.bits >> 12) & 0x0f) as u8) } } impl W { #[doc = "Bits 0:1 - Capture/Compare 3 selection"] #[inline(always)] #[must_use] pub fn cc3s(&mut self) -> CC3S_W<CCMR2_INPUT_SPEC, 0> { CC3S_W::new(self) } #[doc = "Bits 2:3 - Input capture 3 prescaler"] #[inline(always)] #[must_use] pub fn ic3psc(&mut self) -> IC3PSC_W<CCMR2_INPUT_SPEC, 2> { IC3PSC_W::new(self) } #[doc = "Bits 4:7 - Input capture 3 filter"] #[inline(always)] #[must_use] pub fn ic3f(&mut self) -> IC3F_W<CCMR2_INPUT_SPEC, 4> { IC3F_W::new(self) } #[doc = "Bits 8:9 - Capture/Compare 4 selection"] #[inline(always)] #[must_use] pub fn cc4s(&mut self) -> CC4S_W<CCMR2_INPUT_SPEC, 8> { CC4S_W::new(self) } #[doc = "Bits 10:11 - Input capture 4 prescaler"] #[inline(always)] #[must_use] pub fn ic4psc(&mut self) -> IC4PSC_W<CCMR2_INPUT_SPEC, 10> { IC4PSC_W::new(self) } #[doc = "Bits 12:15 - Input capture 4 filter"] #[inline(always)] #[must_use] pub fn ic4f(&mut self) -> IC4F_W<CCMR2_INPUT_SPEC, 12> { IC4F_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "capture/compare mode register 2 (input mode)\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ccmr2_input::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ccmr2_input::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct CCMR2_INPUT_SPEC; impl crate::RegisterSpec for CCMR2_INPUT_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`ccmr2_input::R`](R) reader structure"] impl crate::Readable for CCMR2_INPUT_SPEC {} #[doc = "`write(|w| ..)` method takes [`ccmr2_input::W`](W) writer structure"] impl crate::Writable for CCMR2_INPUT_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets CCMR2_Input to value 0"] impl crate::Resettable for CCMR2_INPUT_SPEC { const RESET_VALUE: Self::Ux = 0; }
use crate::api::BabylonApi; use crate::core::Scene; use js_ffi::*; pub struct Camera { _js_ref: JSObject, } impl Camera { pub fn new(scene: &Scene) -> Camera { Camera { _js_ref: BabylonApi::create_arc_rotate_camera(&scene.get_js_ref()), } } }
#[doc = "Register `CCVR` reader"] pub type R = crate::R<CCVR_SPEC>; #[doc = "Field `NCV` reader - NMOS compensation value"] pub type NCV_R = crate::FieldReader; #[doc = "Field `PCV` reader - PMOS compensation value"] pub type PCV_R = crate::FieldReader; impl R { #[doc = "Bits 0:3 - NMOS compensation value"] #[inline(always)] pub fn ncv(&self) -> NCV_R { NCV_R::new((self.bits & 0x0f) as u8) } #[doc = "Bits 4:7 - PMOS compensation value"] #[inline(always)] pub fn pcv(&self) -> PCV_R { PCV_R::new(((self.bits >> 4) & 0x0f) as u8) } } #[doc = "SYSCFG compensation cell value register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ccvr::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct CCVR_SPEC; impl crate::RegisterSpec for CCVR_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`ccvr::R`](R) reader structure"] impl crate::Readable for CCVR_SPEC {} #[doc = "`reset()` method sets CCVR to value 0"] impl crate::Resettable for CCVR_SPEC { const RESET_VALUE: Self::Ux = 0; }
pub fn create_player_w(pos: ::na::Vector3<f32>, hook: bool, world: &::specs::World) { create_player( pos, hook, &mut world.write(), &mut world.write(), &mut world.write(), &mut world.write(), &mut world.write(), &mut world.write(), &mut world.write(), &mut world.write(), &mut world.write(), &mut world.write(), &mut world.write(), &mut world.write_resource(), &world.read_resource(), ); } pub fn create_player<'a>( pos: ::na::Vector3<f32>, hook: bool, players: &mut ::specs::WriteStorage<'a, ::component::Player>, aims: &mut ::specs::WriteStorage<'a, ::component::Aim>, momentums: &mut ::specs::WriteStorage<'a, ::component::Momentum>, bodies: &mut ::specs::WriteStorage<'a, ::component::PhysicBody>, sensors: &mut ::specs::WriteStorage<'a, ::component::PhysicSensor>, proximitors: &mut ::specs::WriteStorage<'a, ::component::Proximitor>, shooters: &mut ::specs::WriteStorage<'a, ::component::Shooter>, hooks: &mut ::specs::WriteStorage<'a, ::component::Hook>, weapon_animations: &mut ::specs::WriteStorage<'a, ::component::WeaponAnimation>, dynamic_huds: &mut ::specs::WriteStorage<'a, ::component::DynamicHud>, dynamic_graphics_assets: &mut ::specs::WriteStorage<'a, ::component::DynamicGraphicsAssets>, physic_world: &mut ::specs::FetchMut<'a, ::resource::PhysicWorld>, entities: &::specs::Entities, ) { let shape = ::ncollide::shape::Cylinder::new(::CONFIG.player_height, ::CONFIG.player_radius); let mut group = ::nphysics::object::RigidBodyCollisionGroups::new_dynamic(); group.set_membership(&[super::ALIVE_GROUP, super::PLAYER_GROUP]); group.set_blacklist(&[super::KILLER_GROUP]); let mut body = ::nphysics::object::RigidBody::new_dynamic(shape.clone(), 1.0, 0.0, 0.0); let pos = ::na::Isometry3::new(pos, ::na::zero()); body.set_transformation(pos); body.set_collision_groups(group); let mass = 1.0 / body.inv_mass(); let entity = entities.create(); players.insert(entity, ::component::Player); aims.insert(entity, ::component::Aim::new()); if hook { let (hook_primitive, hook_groups) = ::graphics::Primitive::Hook.instantiate(); let hook_primitive_trans = ::graphics::resizer( ::CONFIG.player_hook_size, ::CONFIG.player_hook_size, ::CONFIG.player_hook_size, ); let hook_draw_entity = entities.create(); dynamic_graphics_assets.insert(hook_draw_entity, ::component::DynamicGraphicsAssets::new( hook_primitive, hook_groups, ::CONFIG.player_hook_color, hook_primitive_trans, )); hooks.insert(entity, ::component::Hook::new(::CONFIG.player_hook_force, hook_draw_entity)); } let velocity = if hook { ::CONFIG.player_hook_velocity } else { ::CONFIG.player_velocity }; let time_to_reach_vmax = if hook { ::CONFIG.player_hook_time_to_reach_vmax } else { ::CONFIG.player_time_to_reach_vmax }; momentums.insert( entity, ::component::Momentum::new( mass, velocity, time_to_reach_vmax, None, ::CONFIG.player_ang_damping, ::na::zero(), None, ), ); super::create_weapon( entity, shooters, weapon_animations, dynamic_huds, dynamic_graphics_assets, entities, ); ::component::PhysicBody::add(entity, body, bodies, physic_world); let sensor_shape = ::ncollide::shape::Cylinder::new(::CONFIG.player_height, ::CONFIG.player_radius); let mut sensor_group = ::nphysics::object::SensorCollisionGroups::new(); sensor_group.set_whitelist(&[super::KILLER_GROUP]); let mut sensor = ::nphysics::object::Sensor::new(sensor_shape, Some(bodies.get(entity).unwrap().handle)); sensor.set_collision_groups(sensor_group); ::component::PhysicSensor::add(entity, sensor, sensors, physic_world); proximitors.insert(entity, ::component::Proximitor::new()); }
#![warn(clippy::pedantic)] #![warn(clippy::cargo)] #![allow(clippy::multiple_crate_versions)] #![allow(clippy::module_name_repetitions)] #![allow(clippy::missing_panics_doc)] use js_sys::Date; use wasm_bindgen::prelude::*; use wasm_bindgen::JsCast; use wasm_bindgen_futures::JsFuture; use web_sys::{HtmlInputElement, Request, RequestInit, Response}; use crate::{active_tab, goto_page}; #[wasm_bindgen] pub async fn contact() { // Set active tab. active_tab("contact"); // Go to the page. goto_page("/contact", "/api/contact.html?ver=LpidCXTXHkc", "Contact").await; } #[wasm_bindgen] pub async fn contact_info() { let window = web_sys::window().expect("No global `window` exists"); let document = window.document().expect("Should have a document on window"); // Retrieve selected contact method. let input_js: JsValue = document .get_element_by_id("info-selector") .expect("Could not find element 'info-selector'") .into(); let info_selector: HtmlInputElement = input_js.into(); let selected = info_selector.value(); let contact_info; if selected == "Phone" { contact_info = "Dude, it's ".to_owned() + &Date::new_0().get_full_year().to_string() + "."; } else if selected == "Select" { contact_info = String::new(); } else { let mut req = RequestInit::new(); req.method("GET"); let request_string = format!("/api/contact_info?method={}", selected); let request = Request::new_with_str_and_init(&request_string, &req) .expect("Request could not be created"); request .headers() .set("Accept", "text/plain") .expect("Headers could not be set"); let response = JsFuture::from(window.fetch_with_request(&request)) .await .expect("Could not cast response as JsFuture"); // `response` is a `Response` object. assert!(response.is_instance_of::<Response>()); let resp: Response = response.dyn_into().unwrap(); // Convert this other `Promise` into a rust `Future`. contact_info = JsFuture::from(resp.text().unwrap()) .await .unwrap() .as_string() .unwrap(); } // Show contact info. let text = document .get_element_by_id("info-text") .expect("Could not get element with id 'info-text'"); text.set_inner_html(&contact_info); } #[wasm_bindgen] pub async fn contact_copy() { let window = web_sys::window().expect("No global `window` exists"); let document = window.document().expect("Should have a document on window"); let navigator = window.navigator(); // Get the text. let text = document .get_element_by_id("info-text") .expect("Could not get element with id 'submit'") .inner_html(); if text.is_empty() { return; } let maybe_clipboard = navigator.clipboard(); let mut feedback = "Error!"; // Default to error. if let Some(clipboard) = maybe_clipboard { let copy_promise = clipboard.write_text(&text); // Convert this `Promise` into a rust `Future`. if JsFuture::from(copy_promise).await.is_ok() { feedback = "Copied!"; }; } // Show copied. let copy_button = document .get_element_by_id("copy-info") .expect("Could not get element with id 'copy-info'"); copy_button.set_inner_html(feedback); window .set_timeout_with_str_and_timeout_and_unused_0("window.busy.contact_copy_reset()", 1000) .expect("Could not set timeout for copy feedback"); } #[wasm_bindgen] pub fn contact_copy_reset() { let window = web_sys::window().expect("No global `window` exists"); let document = window.document().expect("Should have a document on window"); // Show copied. let copy_button = document .get_element_by_id("copy-info") .expect("Could not get element with id 'copy-info'"); copy_button.set_inner_html("Copy"); } #[wasm_bindgen] pub fn contact_submit() { let window = web_sys::window().expect("No global `window` exists"); let document = window.document().expect("Should have a document on window"); // Remove submit button. document .get_element_by_id("submit") .expect("Could not get element with id 'submit'") .remove(); // Show loading text. let loading = document .get_element_by_id("contact-loading") .expect("Could not get element with id 'contact-loading'"); loading.set_class_name("contact-loading show"); } #[wasm_bindgen] pub async fn contact_submitted() { // Set active tab. active_tab("contact"); // Go to the page. goto_page( "/contact-submitted", "/api/contact_submitted.html?ver=ypBIrFi5QPY", "Submitted", ) .await; } #[wasm_bindgen] pub async fn captcha_submit() { let window = web_sys::window().expect("No global `window` exists"); let document = window.document().expect("Should have a document on window"); let input_js: JsValue = document .get_element_by_id("captcha-chars") .expect("Could not find element 'captcha-chars'") .into(); let captcha_input: HtmlInputElement = input_js.into(); let guess = captcha_input.value(); let mut req = RequestInit::new(); req.method("GET"); let request_string = format!("/api/submit_captcha?captcha={}", guess); let request = Request::new_with_str_and_init(&request_string, &req) .expect("Request could not be created"); request .headers() .set("Accept", "text/plain") .expect("Headers could not be set"); let response = JsFuture::from(window.fetch_with_request(&request)) .await .expect("Could not cast response as JsFuture"); // `response` is a `Response` object. assert!(response.is_instance_of::<Response>()); let resp: Response = response.dyn_into().unwrap(); // Convert this other `Promise` into a rust `Future`. let response_content = JsFuture::from(resp.text().unwrap()) .await .unwrap() .as_string() .unwrap(); if response_content == "Pass" { // Hide captcha control stuff. document .get_element_by_id("captcha-buttons") .expect("Could not find element 'captcha-pass'") .remove(); document .get_element_by_id("captcha-chars") .expect("Could not find element 'captcha-pass'") .set_attribute("hidden", "true") .expect("Hidden attribute could not be set"); // Show Pass Checkmark. document .get_element_by_id("captcha-pass") .expect("Could not find element 'captcha-pass'") .remove_attribute("hidden") .expect("Hidden attribute not present"); // Show submit button. document .get_element_by_id("submit") .expect("Could not find element 'captcha-pass'") .remove_attribute("hidden") .expect("Hidden attribute not present"); } else { // Show try again. document .get_element_by_id("try-again") .expect("Could not find element 'captcha-pass'") .remove_attribute("hidden") .expect("Hidden attribute not present"); } } #[wasm_bindgen] pub fn captcha_refresh() { let window = web_sys::window().expect("No global `window` exists"); let document = window.document().expect("Should have a document on window"); let url = format!("/api/generate_captcha?time={}", Date::now()); document .get_element_by_id("captcha-png") .expect("Could not find element 'captcha-pass'") .set_attribute("src", &url) .expect("Could not set hidden attribute"); }
//! Crate which contain the virtual machine which executes embed_lang programs #[macro_use] extern crate log; #[cfg(test)] extern crate env_logger; #[macro_use] extern crate quick_error; #[macro_use] extern crate mopa; extern crate base; #[cfg(feature = "parser")] extern crate parser; #[cfg(feature = "check")] extern crate check; #[macro_use] pub mod api; pub mod compiler; pub mod types; pub mod vm; pub mod thread; pub mod interner; pub mod gc; pub mod stack; pub mod primitives; pub mod channel; mod reference; mod lazy; mod array; use api::ValueRef; use vm::Value; #[derive(Debug)] pub struct Variants<'a>(&'a Value); impl<'a> Variants<'a> { pub unsafe fn new(value: &Value) -> Variants { Variants(value) } pub fn as_ref(&self) -> ValueRef { ValueRef::new(self.0) } }
use std::io::{self, BufRead}; use std::collections::HashSet; #[derive(Debug)] struct Assembly { opcode: String, argument: i32 } #[derive(Clone, Debug)] enum Operation { Acc(i32), Jmp(i32), Nop(i32) } fn parse_operation(assembly: Assembly) -> Option<Operation> { if assembly.opcode == "nop" { Some(Operation::Nop(assembly.argument)) } else if assembly.opcode == "jmp" { Some(Operation::Jmp(assembly.argument)) } else if assembly.opcode == "acc" { Some(Operation::Acc(assembly.argument)) } else { None } } struct Machine { pc : i32, acc : i32 } fn step(code : &Vec<Operation>, machine : &mut Machine) { match code.get(machine.pc as usize).unwrap() { Operation::Nop(_) => { machine.pc += 1; } Operation::Acc(acc_offset) => { machine.acc += acc_offset; machine.pc += 1; } Operation::Jmp(jmp_offset) => { machine.pc += jmp_offset; } } } fn run_code_until_loop(code : &Vec<Operation>) { let mut machine : Machine = Machine { pc: 0, acc: 0 }; let mut visited_pcs : Vec<i32> = Vec::new(); while !visited_pcs.contains(&machine.pc) { visited_pcs.push(machine.pc); step(&code, &mut machine); } println!("{}", machine.acc); } fn run_code_until_end(code : &Vec<Operation>) { let mut iters = 0; let mut machine : Machine = Machine { pc: 0, acc: 0 }; while machine.pc != code.len() as i32 { step(&code, &mut machine); iters += 1; if iters > 20000 { return; } } println!("{}", machine.acc); } fn main() { let mut code : Vec<Operation> = Vec::new(); for wrapped_line in io::stdin().lock().lines() { let line = wrapped_line.unwrap(); let assembly = Assembly { opcode: line.get(0..3).unwrap().to_string() , argument: line.get(4..).unwrap().parse::<i32>().unwrap() }; code.push(parse_operation(assembly).unwrap()); } run_code_until_loop(&code); for (op_index, op) in code.iter().enumerate() { let mut new_code = code.clone(); match op { Operation::Nop(offset) => { new_code[op_index] = Operation::Jmp(*offset); }, Operation::Jmp(offset) => { new_code[op_index] = Operation::Nop(*offset); }, _ => continue } run_code_until_end(&new_code); } }
use std::{collections::HashMap, fs}; use nom::{ branch::alt, bytes::complete::tag, character::complete::{digit1, line_ending, not_line_ending}, combinator::{iterator, map, value}, sequence::{preceded, terminated}, IResult, }; #[derive(Debug, Clone)] enum Command { Cd(String), Ls, } #[derive(Debug)] struct DirectoryNode { index: Option<usize>, entries: HashMap<String, EntryNode>, parent: Option<usize>, } impl DirectoryNode { fn new() -> Self { Self { index: None, entries: HashMap::new(), parent: None, } } fn add_to_storage( storage: &mut Vec<DirectoryNode>, mut directory: Self, parent: Option<usize>, ) -> usize { let index = storage.len(); directory.index = Some(index); directory.parent = parent; storage.push(directory); index } fn directory_sizes(&self, storage: &Vec<DirectoryNode>) -> (usize, Vec<usize>) { let mut own_size: usize = 0; let mut sub_directory_sizes: Vec<usize> = Vec::new(); for entry in self.entries.values() { match entry { EntryNode::DirectoryNode(dir) => { let (size, sub_sizes) = storage[*dir].directory_sizes(storage); own_size += size; sub_directory_sizes.extend(sub_sizes); } EntryNode::FileNode(file) => own_size += file.size, } } sub_directory_sizes.push(own_size); (own_size, sub_directory_sizes) } } #[derive(Debug)] struct FileNode { name: String, size: usize, } #[derive(Debug)] enum EntryNode { DirectoryNode(usize), FileNode(FileNode), } struct FileInfo { name: String, size: usize, } enum EntryInfo { DirectoryInfo(String), FileInfo(FileInfo), } fn number(input: &str) -> IResult<&str, usize> { map(digit1, |s: &str| s.parse::<usize>().unwrap())(input) } fn parse_ls(input: &str) -> IResult<&str, Command> { value(Command::Ls, terminated(tag("ls"), line_ending))(input) } fn parse_cd(input: &str) -> IResult<&str, Command> { let (input, _) = tag("cd ")(input)?; let (input, path) = terminated(alt((not_line_ending, tag("/"))), line_ending)(input)?; Ok((input, Command::Cd(path.to_string()))) } fn parse_command(input: &str) -> IResult<&str, Command> { preceded(tag("$ "), alt((parse_ls, parse_cd)))(input) } fn parse_directory(input: &str) -> IResult<&str, EntryInfo> { let (input, _) = tag("dir ")(input)?; let (input, name) = terminated(not_line_ending, line_ending)(input)?; Ok((input, EntryInfo::DirectoryInfo(name.to_string()))) } fn parse_file(input: &str) -> IResult<&str, EntryInfo> { let (input, size) = terminated(number, tag(" "))(input)?; let (input, name) = terminated(not_line_ending, line_ending)(input)?; Ok(( input, EntryInfo::FileInfo(FileInfo { name: name.to_string(), size, }), )) } fn parse_entry(input: &str) -> IResult<&str, EntryInfo> { alt((parse_directory, parse_file))(input) } fn do_the_thing(input: &str) -> usize { // Redefine so we can mutate it locally let mut input = input; // This Vec is the storage arena, so we can have references to parents let mut directories = Vec::<DirectoryNode>::new(); let root = DirectoryNode::new(); let root = DirectoryNode::add_to_storage(&mut directories, root, None); let mut current_dir = root; while !input.is_empty() { let (rest, command) = parse_command(input).unwrap(); input = rest; match command { Command::Cd(path) => match path.as_str() { "/" => current_dir = root, ".." => current_dir = directories[current_dir].parent.unwrap(), _ => { let dir = directories[current_dir].entries.get(&path).unwrap(); match dir { EntryNode::DirectoryNode(dir) => current_dir = *dir, _ => panic!("Not a directory"), } } }, Command::Ls => { let mut entries = iterator(input, parse_entry); entries.for_each(|entry_info| match entry_info { EntryInfo::DirectoryInfo(name) => { let dir = DirectoryNode::new(); let dir = DirectoryNode::add_to_storage(&mut directories, dir, Some(current_dir)); directories[current_dir] .entries .insert(name, EntryNode::DirectoryNode(dir)); } EntryInfo::FileInfo(file) => { let file = FileNode { name: file.name, size: file.size, }; directories[current_dir] .entries .insert(file.name.clone(), EntryNode::FileNode(file)); } }); let (rest, _) = entries.finish().unwrap(); input = rest; } } } let (total, mut directory_sizes) = directories[root].directory_sizes(&directories); directory_sizes.sort_unstable(); directory_sizes.into_iter().find(|size| 70000000 - (total - size) >= 30000000).unwrap() } fn main() { let input = fs::read_to_string("input.txt").unwrap(); println!("{:?}", do_the_thing(&input)); } #[cfg(test)] mod tests { use super::*; use test_case::test_case; #[test_case("$ cd / $ ls dir a 14848514 b.txt 8504156 c.dat dir d $ cd a $ ls dir e 29116 f 2557 g 62596 h.lst $ cd e $ ls 584 i $ cd .. $ cd .. $ cd d $ ls 4060174 j 8033020 d.log 5626152 d.ext 7214296 k " => 24933642)] fn test(input: &str) -> usize { do_the_thing(&input) } }
#[doc = "Reader of register RESET_DONE"] pub type R = crate::R<u32, super::RESET_DONE>; #[doc = "Reader of field `usbctrl`"] pub type USBCTRL_R = crate::R<bool, bool>; #[doc = "Reader of field `uart1`"] pub type UART1_R = crate::R<bool, bool>; #[doc = "Reader of field `uart0`"] pub type UART0_R = crate::R<bool, bool>; #[doc = "Reader of field `timer`"] pub type TIMER_R = crate::R<bool, bool>; #[doc = "Reader of field `tbman`"] pub type TBMAN_R = crate::R<bool, bool>; #[doc = "Reader of field `sysinfo`"] pub type SYSINFO_R = crate::R<bool, bool>; #[doc = "Reader of field `syscfg`"] pub type SYSCFG_R = crate::R<bool, bool>; #[doc = "Reader of field `spi1`"] pub type SPI1_R = crate::R<bool, bool>; #[doc = "Reader of field `spi0`"] pub type SPI0_R = crate::R<bool, bool>; #[doc = "Reader of field `rtc`"] pub type RTC_R = crate::R<bool, bool>; #[doc = "Reader of field `pwm`"] pub type PWM_R = crate::R<bool, bool>; #[doc = "Reader of field `pll_usb`"] pub type PLL_USB_R = crate::R<bool, bool>; #[doc = "Reader of field `pll_sys`"] pub type PLL_SYS_R = crate::R<bool, bool>; #[doc = "Reader of field `pio1`"] pub type PIO1_R = crate::R<bool, bool>; #[doc = "Reader of field `pio0`"] pub type PIO0_R = crate::R<bool, bool>; #[doc = "Reader of field `pads_qspi`"] pub type PADS_QSPI_R = crate::R<bool, bool>; #[doc = "Reader of field `pads_bank0`"] pub type PADS_BANK0_R = crate::R<bool, bool>; #[doc = "Reader of field `jtag`"] pub type JTAG_R = crate::R<bool, bool>; #[doc = "Reader of field `io_qspi`"] pub type IO_QSPI_R = crate::R<bool, bool>; #[doc = "Reader of field `io_bank0`"] pub type IO_BANK0_R = crate::R<bool, bool>; #[doc = "Reader of field `i2c1`"] pub type I2C1_R = crate::R<bool, bool>; #[doc = "Reader of field `i2c0`"] pub type I2C0_R = crate::R<bool, bool>; #[doc = "Reader of field `dma`"] pub type DMA_R = crate::R<bool, bool>; #[doc = "Reader of field `busctrl`"] pub type BUSCTRL_R = crate::R<bool, bool>; #[doc = "Reader of field `adc`"] pub type ADC_R = crate::R<bool, bool>; impl R { #[doc = "Bit 24"] #[inline(always)] pub fn usbctrl(&self) -> USBCTRL_R { USBCTRL_R::new(((self.bits >> 24) & 0x01) != 0) } #[doc = "Bit 23"] #[inline(always)] pub fn uart1(&self) -> UART1_R { UART1_R::new(((self.bits >> 23) & 0x01) != 0) } #[doc = "Bit 22"] #[inline(always)] pub fn uart0(&self) -> UART0_R { UART0_R::new(((self.bits >> 22) & 0x01) != 0) } #[doc = "Bit 21"] #[inline(always)] pub fn timer(&self) -> TIMER_R { TIMER_R::new(((self.bits >> 21) & 0x01) != 0) } #[doc = "Bit 20"] #[inline(always)] pub fn tbman(&self) -> TBMAN_R { TBMAN_R::new(((self.bits >> 20) & 0x01) != 0) } #[doc = "Bit 19"] #[inline(always)] pub fn sysinfo(&self) -> SYSINFO_R { SYSINFO_R::new(((self.bits >> 19) & 0x01) != 0) } #[doc = "Bit 18"] #[inline(always)] pub fn syscfg(&self) -> SYSCFG_R { SYSCFG_R::new(((self.bits >> 18) & 0x01) != 0) } #[doc = "Bit 17"] #[inline(always)] pub fn spi1(&self) -> SPI1_R { SPI1_R::new(((self.bits >> 17) & 0x01) != 0) } #[doc = "Bit 16"] #[inline(always)] pub fn spi0(&self) -> SPI0_R { SPI0_R::new(((self.bits >> 16) & 0x01) != 0) } #[doc = "Bit 15"] #[inline(always)] pub fn rtc(&self) -> RTC_R { RTC_R::new(((self.bits >> 15) & 0x01) != 0) } #[doc = "Bit 14"] #[inline(always)] pub fn pwm(&self) -> PWM_R { PWM_R::new(((self.bits >> 14) & 0x01) != 0) } #[doc = "Bit 13"] #[inline(always)] pub fn pll_usb(&self) -> PLL_USB_R { PLL_USB_R::new(((self.bits >> 13) & 0x01) != 0) } #[doc = "Bit 12"] #[inline(always)] pub fn pll_sys(&self) -> PLL_SYS_R { PLL_SYS_R::new(((self.bits >> 12) & 0x01) != 0) } #[doc = "Bit 11"] #[inline(always)] pub fn pio1(&self) -> PIO1_R { PIO1_R::new(((self.bits >> 11) & 0x01) != 0) } #[doc = "Bit 10"] #[inline(always)] pub fn pio0(&self) -> PIO0_R { PIO0_R::new(((self.bits >> 10) & 0x01) != 0) } #[doc = "Bit 9"] #[inline(always)] pub fn pads_qspi(&self) -> PADS_QSPI_R { PADS_QSPI_R::new(((self.bits >> 9) & 0x01) != 0) } #[doc = "Bit 8"] #[inline(always)] pub fn pads_bank0(&self) -> PADS_BANK0_R { PADS_BANK0_R::new(((self.bits >> 8) & 0x01) != 0) } #[doc = "Bit 7"] #[inline(always)] pub fn jtag(&self) -> JTAG_R { JTAG_R::new(((self.bits >> 7) & 0x01) != 0) } #[doc = "Bit 6"] #[inline(always)] pub fn io_qspi(&self) -> IO_QSPI_R { IO_QSPI_R::new(((self.bits >> 6) & 0x01) != 0) } #[doc = "Bit 5"] #[inline(always)] pub fn io_bank0(&self) -> IO_BANK0_R { IO_BANK0_R::new(((self.bits >> 5) & 0x01) != 0) } #[doc = "Bit 4"] #[inline(always)] pub fn i2c1(&self) -> I2C1_R { I2C1_R::new(((self.bits >> 4) & 0x01) != 0) } #[doc = "Bit 3"] #[inline(always)] pub fn i2c0(&self) -> I2C0_R { I2C0_R::new(((self.bits >> 3) & 0x01) != 0) } #[doc = "Bit 2"] #[inline(always)] pub fn dma(&self) -> DMA_R { DMA_R::new(((self.bits >> 2) & 0x01) != 0) } #[doc = "Bit 1"] #[inline(always)] pub fn busctrl(&self) -> BUSCTRL_R { BUSCTRL_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 0"] #[inline(always)] pub fn adc(&self) -> ADC_R { ADC_R::new((self.bits & 0x01) != 0) } }
// Copyright (C) 2022 Subspace Labs, Inc. // SPDX-License-Identifier: Apache-2.0 // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // GRANDPA verification is mostly taken from Parity's bridges https://github.com/paritytech/parity-bridges-common/tree/master/primitives/header-chain use codec::{Decode, Encode}; use finality_grandpa::voter_set::VoterSet; use scale_info::TypeInfo; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; use sp_consensus_grandpa::{ AuthorityId, AuthorityList, AuthoritySignature, ConsensusLog, SetId, GRANDPA_ENGINE_ID, }; use sp_runtime::traits::Header as HeaderT; use sp_std::collections::btree_map::BTreeMap; use sp_std::collections::btree_set::BTreeSet; use sp_std::prelude::*; use sp_std::vec::Vec; /// A GRANDPA Justification is a proof that a given header was finalized /// at a certain height and with a certain set of authorities. /// /// This particular proof is used to prove that headers on a bridged chain /// (so not our chain) have been finalized correctly. #[derive(Encode, Decode, Clone, PartialEq, Eq, TypeInfo)] pub struct GrandpaJustification<Header: HeaderT> { /// The round (voting period) this justification is valid for. pub round: u64, /// The set of votes for the chain which is to be finalized. pub commit: finality_grandpa::Commit<Header::Hash, Header::Number, AuthoritySignature, AuthorityId>, /// A proof that the chain of blocks in the commit are related to each other. pub votes_ancestries: Vec<Header>, } /// A GRANDPA Authority List and ID. #[derive(Debug, Default, Encode, Decode, Eq, PartialEq, Clone, TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub struct AuthoritySet { /// List of GRANDPA authorities for the current round. pub authorities: AuthorityList, /// Monotonic identifier of the current GRANDPA authority set. pub set_id: SetId, } /// Votes ancestries with useful methods. #[derive(Debug)] struct AncestryChain<Header: HeaderT> { /// Header hash => parent header hash mapping. parents: BTreeMap<Header::Hash, Header::Hash>, /// Hashes of headers that were not visited by `is_ancestor` method. unvisited: BTreeSet<Header::Hash>, } impl<Header: HeaderT> AncestryChain<Header> { /// Create new ancestry chain. fn new(ancestry: &[Header]) -> AncestryChain<Header> { let mut parents = BTreeMap::new(); let mut unvisited = BTreeSet::new(); for ancestor in ancestry { let hash = ancestor.hash(); let parent_hash = *ancestor.parent_hash(); parents.insert(hash, parent_hash); unvisited.insert(hash); } AncestryChain { parents, unvisited } } /// Returns `Ok(_)` if `precommit_target` is a descendant of the `commit_target` block and /// `Err(_)` otherwise. fn ensure_descendant( mut self, commit_target: &Header::Hash, precommit_target: &Header::Hash, ) -> Result<Self, Error> { let mut current_hash = *precommit_target; while current_hash != *commit_target { let is_visited_before = !self.unvisited.remove(&current_hash); current_hash = match self.parents.get(&current_hash) { Some(parent_hash) => { if is_visited_before { // `Some(parent_hash)` means that the `current_hash` is in the `parents` // container `is_visited_before` means that it has been visited before in // some of previous calls => since we assume that previous call has finished // with `true`, this also will be finished with `true` return Ok(self); } *parent_hash } None => return Err(Error::PrecommitIsNotCommitDescendant), }; } Ok(self) } } /// Justification verification error. #[derive(Debug, Eq, PartialEq)] pub enum Error { /// Justification is finalizing unexpected header. InvalidJustificationTarget, /// The authority has provided an invalid signature. InvalidAuthoritySignature, /// The justification contains precommit for header that is not a descendant of the commit /// header. PrecommitIsNotCommitDescendant, /// The cumulative weight of all votes in the justification is not enough to justify commit /// header finalization. TooLowCumulativeWeight, /// The justification contains extra (unused) headers in its `votes_ancestries` field. ExtraHeadersInVotesAncestries, /// Arithematic overflow ArithematicOverflow, } /// Verify that justification, that is generated by given authority set, finalizes given header. pub(crate) fn verify_justification<Header: HeaderT>( finalized_target: (Header::Hash, Header::Number), authorities_set_id: SetId, authorities_set: &VoterSet<AuthorityId>, justification: &GrandpaJustification<Header>, ) -> Result<(), Error> where Header::Number: finality_grandpa::BlockNumberOps, { // always ensure the justification belongs to either current target or its descendent let (_finalized_hash, finalized_number) = finalized_target; if justification.commit.target_number < finalized_number { return Err(Error::InvalidJustificationTarget); } let mut chain = AncestryChain::new(&justification.votes_ancestries); let mut signature_buffer = Vec::new(); let mut votes = BTreeSet::new(); let mut cumulative_weight = 0u64; for signed in &justification.commit.precommits { // authority must be in the set let authority_info = match authorities_set.get(&signed.id) { Some(authority_info) => authority_info, None => { // just ignore precommit from unknown authority as // `finality_grandpa::import_precommit` does continue; } }; // check if authority has already voted in the same round. // // there's a lot of code in `validate_commit` and `import_precommit` functions inside // `finality-grandpa` crate (mostly related to reporting equivocations). But the only thing // that we care about is that only first vote from the authority is accepted if !votes.insert(signed.id.clone()) { continue; } // everything below this line can't just `continue`, because state is already altered // precommits aren't allowed for block lower than the target if signed.precommit.target_number < justification.commit.target_number { return Err(Error::PrecommitIsNotCommitDescendant); } // all precommits must be descendants of target block chain = chain.ensure_descendant( &justification.commit.target_hash, &signed.precommit.target_hash, )?; // since we know now that the precommit target is the descendant of the justification // target, we may increase 'weight' of the justification target // // there's a lot of code in the `VoteGraph::insert` method inside `finality-grandpa` crate, // but in the end it is only used to find GHOST, which we don't care about. The only thing // that we care about is that the justification target has enough weight cumulative_weight = cumulative_weight .checked_add(authority_info.weight().0.into()) .ok_or(Error::ArithematicOverflow)?; // verify authority signature if !sp_consensus_grandpa::check_message_signature_with_buffer( &finality_grandpa::Message::Precommit(signed.precommit.clone()), &signed.id, &signed.signature, justification.round, authorities_set_id, &mut signature_buffer, ) { return Err(Error::InvalidAuthoritySignature); } } // check that there are no extra headers in the justification if !chain.unvisited.is_empty() { return Err(Error::ExtraHeadersInVotesAncestries); } // check that the cumulative weight of validators voted for the justification target (or one // of its descendents) is larger than required threshold. let threshold = authorities_set.threshold().0.into(); if cumulative_weight >= threshold { Ok(()) } else { Err(Error::TooLowCumulativeWeight) } } pub(crate) fn find_scheduled_change<H: HeaderT>( header: &H, ) -> Option<sp_consensus_grandpa::ScheduledChange<H::Number>> { use sp_runtime::generic::OpaqueDigestItemId; let id = OpaqueDigestItemId::Consensus(&GRANDPA_ENGINE_ID); let filter_log = |log: ConsensusLog<H::Number>| match log { ConsensusLog::ScheduledChange(change) => Some(change), _ => None, }; // find the first consensus digest with the right ID which converts to // the right kind of consensus log. header .digest() .convert_first(|l| l.try_to(id).and_then(filter_log)) } /// Checks the given header for a consensus digest signaling a **forced** scheduled change and /// extracts it. pub(crate) fn find_forced_change<H: HeaderT>( header: &H, ) -> Option<(H::Number, sp_consensus_grandpa::ScheduledChange<H::Number>)> { use sp_runtime::generic::OpaqueDigestItemId; let id = OpaqueDigestItemId::Consensus(&GRANDPA_ENGINE_ID); let filter_log = |log: ConsensusLog<H::Number>| match log { ConsensusLog::ForcedChange(delay, change) => Some((delay, change)), _ => None, }; // find the first consensus digest with the right ID which converts to // the right kind of consensus log. header .digest() .convert_first(|l| l.try_to(id).and_then(filter_log)) }
use std::fmt; use std::cmp; // 对《汉语拼音》方案当中的韵母表勘误: // 韵母 `un` 实际上应该为 `uen`, 在书写的时候如果前面携带声母则写成 `un` 。 // 韵母 `ie` 和 `üe` 实际上应该为 `iê` 和 `üê` ,在书写的时候 如果 韵母 `ê` 不是单独存在的,则写成 `e` // // 关于韵母 `-i(前)` 和 `-i(后)` ,这个在 《汉语拼音方案》当中被当成整体认读音节,所以不再区分他的韵母地位。 // // -i(前)这个韵母只和zcs有拼合关系,例如:紫,字,子 // -i(后)这个韵母只和zh ch sh r 有拼合关系,例如:之,指 ,支持,吃食,史诗 // ["a", "o", "e", "ê", "er", "ai", "ei", "ao", "ou", "an", "en", "ang", "eng", "ong"] pub const RHYME_TABLE_COLUMN_A : [[char; 4]; 14] = [ ['a', ' ', ' ', ' '], ['o', ' ', ' ', ' '], ['e', ' ', ' ', ' '], ['ê', ' ', ' ', ' '], ['e', 'r', ' ', ' '], ['a', 'i', ' ', ' '], ['e', 'i', ' ', ' '], ['a', 'o', ' ', ' '], ['o', 'u', ' ', ' '], ['a', 'n', ' ', ' '], ['e', 'n', ' ', ' '], ['a', 'n', 'g', ' '], ['e', 'n', 'g', ' '], ['o', 'n', 'g', ' '], ]; // ["i", "ia", "ie", "iao", "iou", "ian", "in", "iang", "ing", "iong"] pub const RHYME_TABLE_COLUMN_I : [[char; 4]; 10] = [ ['i', ' ', ' ', ' '], ['i', 'a', ' ', ' '], ['i', 'e', ' ', ' '], ['i', 'a', 'o', ' '], ['i', 'o', 'u', ' '], ['i', 'a', 'n', ' '], ['i', 'n', ' ', ' '], ['i', 'a', 'n', 'g'], ['i', 'n', 'g', ' '], ['i', 'o', 'n', 'g'], ]; // ["u", "ua", "uo", "uai", "uei", "uan", "uen", "uang", "ueng"] pub const RHYME_TABLE_COLUMN_U : [[char; 4]; 9] = [ ['u', ' ', ' ', ' '], ['u', 'a', ' ', ' '], ['u', 'o', ' ', ' '], ['u', 'a', 'i', ' '], ['u', 'e', 'i', ' '], ['u', 'a', 'n', ' '], ['u', 'e', 'n', ' '], ['u', 'a', 'n', 'g'], ['u', 'e', 'n', 'g'], ]; // ["ü", "üe", "üan", "ün"] pub const RHYME_TABLE_COLUMN_YU: [[char; 4]; 4] = [ ['ü', ' ', ' ', ' '], ['ü', 'e', ' ', ' '], ['ü', 'a', 'n', ' '], ['ü', 'n', ' ', ' '], ]; // NOTE: 为了视觉统一,该表并未排序,所以请不要使用 二分查找 之类的算法。 // 在《汉语拼音方案》当中 `un` 被当成了 韵母,实际上应该是 `uen` 。 // "a", "o", "e", "ê", "er", "ai", "ei", "ao", "ou", "an", "en", "ang", "eng", "ong", // "i", "ia", "ie", "iao", "iou", "ian", "in", "iang", "ing", "iong", // "u", "ua", "uo", "uai", "uei", "uan", "uen", "uang", "ueng", // "ü", "üe", "üan", "ün", pub const RHYME_TABLE: [[char; 4]; 37] = [ ['a', ' ', ' ', ' '], ['o', ' ', ' ', ' '], ['e', ' ', ' ', ' '], ['ê', ' ', ' ', ' '], ['e', 'r', ' ', ' '], ['a', 'i', ' ', ' '], ['e', 'i', ' ', ' '], ['a', 'o', ' ', ' '], ['o', 'u', ' ', ' '], ['a', 'n', ' ', ' '], ['e', 'n', ' ', ' '], ['a', 'n', 'g', ' '], ['e', 'n', 'g', ' '], ['o', 'n', 'g', ' '], ['i', ' ', ' ', ' '], ['i', 'a', ' ', ' '], ['i', 'e', ' ', ' '], ['i', 'a', 'o', ' '], ['i', 'o', 'u', ' '], ['i', 'a', 'n', ' '], ['i', 'n', ' ', ' '], ['i', 'a', 'n', 'g'], ['i', 'n', 'g', ' '], ['i', 'o', 'n', 'g'], ['u', ' ', ' ', ' '], ['u', 'a', ' ', ' '], ['u', 'o', ' ', ' '], ['u', 'a', 'i', ' '], ['u', 'e', 'i', ' '], ['u', 'a', 'n', ' '], ['u', 'e', 'n', ' '], ['u', 'a', 'n', 'g'], ['u', 'e', 'n', 'g'], ['ü', ' ', ' ', ' '], ['ü', 'e', ' ', ' '], ['ü', 'a', 'n', ' '], ['ü', 'n', ' ', ' '], ]; // 单元音韵母表 // a, o, e, ê, er, i, ia, ie, u, ua, uo, ü, üe pub const SIMPLE_VOWEL_RHYME_TABLE: [[char; 4]; 13] = [ ['a', ' ', ' ', ' '], ['o', ' ', ' ', ' '], ['e', ' ', ' ', ' '], ['ê', ' ', ' ', ' '], ['e', 'r', ' ', ' '], ['i', ' ', ' ', ' '], ['i', 'a', ' ', ' '], ['i', 'e', ' ', ' '], ['u', ' ', ' ', ' '], ['u', 'a', ' ', ' '], ['u', 'o', ' ', ' '], ['ü', ' ', ' ', ' '], ['ü', 'e', ' ', ' '], ]; // 复元音韵母表 // ai, ao, ei, iao, iou, ou, uai, uei pub const COMPOUND_VOWEL_RHYME_TABLE: [[char; 4]; 8] = [ ['a', 'i', ' ', ' '], ['a', 'o', ' ', ' '], ['e', 'i', ' ', ' '], ['i', 'a', 'o', ' '], ['i', 'o', 'u', ' '], ['o', 'u', ' ', ' '], ['u', 'a', 'i', ' '], ['u', 'e', 'i', ' '], ]; /// 韵母 #[derive(Debug, PartialEq, Eq, Copy, Clone)] pub struct Rhyme([char; 4]); impl Rhyme { pub fn new(chars: [char; 4]) -> Result<Self, ()> { if RHYME_TABLE.contains(&chars) { Ok(Rhyme(chars)) } else { Err(()) } } // https://zh.wikipedia.org/wiki/%E6%B1%89%E8%AF%AD%E6%8B%BC%E9%9F%B3#%E5%A3%B0%E8%B0%83 /// 声调标注规则 pub fn tone_mark_rule(rhyme: &[char; 4]) -> Result<char, ()> { if rhyme.contains(&'a') { return Ok('a'); } if rhyme.contains(&'o') && rhyme.contains(&'e') { // NOTE: 不能同时出现这两个字母 return Err(()); } if rhyme.contains(&'o') { return Ok('o'); } if rhyme.contains(&'e') { return Ok('e'); } let search = |c, rhyme: &[char]| -> Result<usize, ()> { for n in 0..rhyme.len() { if &rhyme[n] == c { return Ok(n); } } Err(()) }; let pos_u2 = search(&'ü', rhyme); let pos_i = search(&'i', rhyme); let pos_u = search(&'u', rhyme); if pos_u2.is_ok() { if pos_i.is_ok() || pos_u.is_ok() { // `ü` 不可能和 `i` 或 `u` 同时出现 return Err(()); } return Ok('ü'); } if pos_u.is_ok() && pos_i.is_ok() { // 如果 `i` 和 `u` 同时出现,则标在第二个韵母上 let u_index = pos_u.unwrap(); let i_index = pos_i.unwrap(); let i = cmp::max(u_index, i_index); if i == u_index { return Ok('u'); } else if i == i_index { return Ok('i'); } else { unreachable!(); } } if pos_i.is_ok() { return Ok('i'); } if pos_u.is_ok() { return Ok('u'); } // NOTE: 不合规范的韵母部分 return Err(()) } // 带声调的元音字母 pub fn vowel(&self) -> char { match Rhyme::tone_mark_rule(&self.0) { Ok(c) => c, Err(_) => { println!("[DEBUG] 在对 `{}` 计算声调位置时,出现未预料的错误。", self); panic!("声调位置计算出现未预料的错误!"); } } } // 判断是否为 单元音韵母 pub fn is_simple(&self) -> bool { SIMPLE_VOWEL_RHYME_TABLE.contains(&self.0) } // 判断是否为 复元音韵母 pub fn is_compound(&self) -> bool { COMPOUND_VOWEL_RHYME_TABLE.contains(&self.0) } // 判断是否带鼻音韵母 pub fn is_nasal(&self) -> bool { // an, ian, uan, üan, en, uen, in, ün, ang, iang, uang, eng, ing, ueng, ong, iong let mut i = 3usize; while self.0[i] != ' ' { i -= 1; } let last_char = self.0[i]; if last_char == 'n' || last_char == 'g' { true } else { false } } } impl fmt::Display for Rhyme { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { for elem in self.0.iter() { if elem == &' ' { break; } write!(f, "{}", elem); } Ok(()) } }
use tensorflow_rust_sys; use std::path::Path; use std::ffi::CString; use libc; use friday_logging; #[cfg(target_arch="x86_64")] pub type Char = i8; #[cfg(target_arch="x86_64")] pub type UInt = u64; #[cfg(target_arch="arm")] pub type Char = u8; #[cfg(target_arch="arm")] pub type UInt = u32; fn is_status_ok(status: *const tensorflow_rust_sys::TF_Status) -> bool { unsafe { if tensorflow_rust_sys::TF_GetCode(status) != tensorflow_rust_sys::TF_Code_TF_OK { let message = CString::from_raw(tensorflow_rust_sys::TF_Message(status) as *mut Char); let rust_message = message.to_str().expect("Failed to make string"); friday_logging::fatal!("(tensorflow-models): Error {}", rust_message); return false; } return true; } } pub struct Tensor { tensor: *mut tensorflow_rust_sys::TF_Tensor, op: tensorflow_rust_sys::TF_Output, pub dims: Vec<i64>, data_type: tensorflow_rust_sys::TF_DataType, data: *mut libc::c_void, is_empty: bool, is_valid: bool } impl Tensor { pub fn new(model: &Model, op_name: &CString) -> Tensor { friday_logging::info!("Loading tensor {}", op_name.to_str().expect("Failed to convert op_name to string")); let op = tensorflow_rust_sys::TF_Output { oper: unsafe { tensorflow_rust_sys::TF_GraphOperationByName(model.graph, op_name.as_ptr()) }, index: 0 }; if op.oper.is_null() { panic!("(tensorflow-models): No operation named {}", op_name.to_str().expect("Failed to convert op_name to string")); } // Get number of dimensions let n_dims = unsafe { tensorflow_rust_sys::TF_GraphGetTensorNumDims(model.graph, op, model.status) }; if !is_status_ok(model.status) { panic!("(tensorflow-models): Failed to get dims of op {}", op_name.to_str().expect("Failed to convert op_name to string")) } let data_type = unsafe { tensorflow_rust_sys::TF_OperationOutputType(op) }; if n_dims != 1 { panic!("(tensorflow-models): Only vector tensors are currently supported"); } let mut dims: Vec<i64> = Vec::new(); let c_dims = unsafe {libc::malloc(std::mem::size_of::<i64>() * n_dims as usize) as *mut i64}; unsafe { tensorflow_rust_sys::TF_GraphGetTensorShape(model.graph, op, c_dims, n_dims, model.status) }; if ! is_status_ok(model.status) { panic!("(tensorflow-models): Failed to get shape of tensor for op {}", op_name.to_str().expect("Failed to convert op_name to string")) } for i in 0..n_dims { dims.push(unsafe { *c_dims.add(i as usize) }); } friday_logging::info!("Successfully loaded Tensor"); return Tensor{ tensor: std::ptr::null::<tensorflow_rust_sys::TF_Tensor>() as *mut tensorflow_rust_sys::TF_Tensor, op, dims, data_type, data: std::ptr::null::<libc::c_void>() as *mut libc::c_void, is_empty: true, is_valid: true }; } pub fn set_data(&mut self, data: &Vec<i16>) { if !self.is_empty { unsafe {tensorflow_rust_sys::TF_DeleteTensor(self.tensor) }; self.is_empty = true } if !self.is_valid { panic!("(tensorflow-models): Trying to set data on an invalid tensor"); } let data_sz = std::mem::size_of::<i16>() * data.len(); let c_data = unsafe {libc::malloc(data_sz)}; unsafe { libc::memcpy(c_data, data.as_ptr() as *const libc::c_void, data_sz) }; unsafe extern "C" fn deallocator( data: *mut libc::c_void, _: tensorflow_rust_sys::size_t, _: *mut libc::c_void, ) { libc::free(data); } self.tensor = unsafe { tensorflow_rust_sys::TF_NewTensor( /*TF_DataType=*/self.data_type, /*dims=*/self.dims.as_ptr(), /*num_dims=*/self.dims.len() as i32, /*data=*/c_data, /*length=*/data_sz as UInt, /*deallocator=*/Some(deallocator), /*deallocator_args=*/std::ptr::null::<libc::c_void>() as *mut libc::c_void) }; if self.tensor.is_null() { panic!("(tensorflow-models): Failed to allocate new tensor when setting data"); } self.is_empty = false; } pub fn get_data<T>(&mut self) -> Vec<T> where T: Clone { if self.is_empty { panic!("(tensorflow-models): Trying to get data from empty tensor"); } if !self.is_valid { panic!("(tensorflow-models): Trying to get data from an invalid tensor"); } let data = unsafe { tensorflow_rust_sys::TF_TensorData(self.tensor) }; if data.is_null() { panic!("(tensorflow-models): Tensor data is empty"); } let data_size = self.dims.first().expect("(tensorflow-models): Failed to unwrap dims").clone() as usize; return unsafe { std::slice::from_raw_parts(data as *const T, data_size).to_vec()}; } fn free_tensor(&mut self) { if !self.is_empty { unsafe {tensorflow_rust_sys::TF_DeleteTensor(self.tensor) }; } self.is_empty = true; self.data = std::ptr::null::<libc::c_void>() as *mut libc::c_void; } } pub struct Model { graph: *mut tensorflow_rust_sys::TF_Graph, session: *mut tensorflow_rust_sys::TF_Session, status: *mut tensorflow_rust_sys::TF_Status } impl Model { pub fn free_tensorflow_resources(&mut self) -> bool { unsafe { tensorflow_rust_sys::TF_DeleteGraph(self.graph); tensorflow_rust_sys::TF_DeleteSession(self.session, self.status); if is_status_ok(self.status) { tensorflow_rust_sys::TF_DeleteStatus(self.status); return true; } return false; }; } pub fn new(export_dir: &Path) -> Option<Model> { friday_logging::info!("(tensorflow-models): Loading {}", export_dir.to_str().expect("Failed to convert path to String")); let status = unsafe { tensorflow_rust_sys::TF_NewStatus() }; let graph = unsafe { tensorflow_rust_sys::TF_NewGraph() }; let session_options = unsafe { tensorflow_rust_sys::TF_NewSessionOptions() }; // This is a encoded proto that tells tensorflow to only use one thread in inference (to // save resources) let magic_string = CString::new("\x10\x01(\x01").expect("Failed to create magic str"); let magic_void : *mut libc::c_void = magic_string.as_ptr() as *mut libc::c_void; unsafe {tensorflow_rust_sys::TF_SetConfig( /*options=*/session_options, /*proto=*/magic_void, /*proto_len=*/4, /*status=*/status) }; if ! is_status_ok(status) { return None; } let null_ptr: *mut tensorflow_rust_sys::TF_Buffer = std::ptr::null::<tensorflow_rust_sys::TF_Buffer>() as *mut tensorflow_rust_sys::TF_Buffer; let export_dir: CString = CString::new(export_dir.to_str() .expect("Failed to convert path to String")) .expect("Failed to create CString"); let tags = CString::new("serve").expect("Failed to create CString"); let session = unsafe { tensorflow_rust_sys::TF_LoadSessionFromSavedModel( /*session_options=*/session_options, /*run_options=*/null_ptr, /*export_dir=*/export_dir.as_ptr(), /*tags=*/&tags.as_ptr(), /*ntags=*/1, /*graph=*/graph, null_ptr, /*status=*/status) }; // Can delete sess opts here already since we wont use em unsafe { tensorflow_rust_sys::TF_DeleteSessionOptions(session_options) }; if is_status_ok(status) { friday_logging::info!("(tensorflow-models): Successfully loaded model"); return Some(Model { graph, session, status }); } return None } pub fn run(&mut self, input: &mut Tensor, output: &mut Tensor) { if input.is_empty { panic!("(tensorflow-models): Trying to run with empty input tensor"); } if !output.is_valid { panic!("(tensorflow-models): Trying to run with invalid output tensor"); } let null_ptr: *mut tensorflow_rust_sys::TF_Buffer = std::ptr::null::<tensorflow_rust_sys::TF_Buffer>() as *mut tensorflow_rust_sys::TF_Buffer; // Clear output output.free_tensor(); unsafe { tensorflow_rust_sys::TF_SessionRun( /*session=*/self.session, /*run_opts=*/null_ptr, /*input_ops=*/&input.op, /*input_values=*/&input.tensor, /*num_inputs=*/1, /*output_ops=*/&output.op, /*output_values=*/&mut output.tensor, /*num_outputs=*/1, /*target_operations=*/&(null_ptr as *const tensorflow_rust_sys::TF_Operation), /*num_targets=*/0, /*run_metadata=*/null_ptr, /*status=*/self.status) }; if is_status_ok(self.status) { output.is_empty = false; input.free_tensor(); } else { panic!("(tensorflow-models): Failed to execute tensorflow session"); } } } #[cfg(test)] mod tests { use super::*; #[test] fn test_load_model_and_tensors() { let model_path = Path::new("test-resources/1603634879"); let mut model = Model::new(&model_path).expect("Failed to create model"); let input_op_name = CString::new("input").expect("Failed to create input name"); let output_op_name = CString::new("output").expect("Failed to create output name"); let mut input_tensor = Tensor::new(&model, &input_op_name); let mut output_tensor = Tensor::new(&model, &output_op_name); let v: Vec<i16> = vec![1; 16000]; friday_logging::info!("Tensor size {}", v.len()); friday_logging::info!("datatype {}", input_tensor.data_type); input_tensor.set_data(&v); let u: Vec<i16> = input_tensor.get_data(); assert_eq!(v, u); model.run(&mut input_tensor, &mut output_tensor); friday_logging::info!("output datatype: {}", output_tensor.data_type); friday_logging::info!("{:?}", output_tensor.get_data::<f32>()); friday_logging::info!("Loading went fine!"); model.free_tensorflow_resources(); friday_logging::info!("Freeing went fine Woo!"); } }
#[doc = "Reader of register NVIC_IPR1"] pub type R = crate::R<u32, super::NVIC_IPR1>; #[doc = "Writer for register NVIC_IPR1"] pub type W = crate::W<u32, super::NVIC_IPR1>; #[doc = "Register NVIC_IPR1 `reset()`'s with value 0"] impl crate::ResetValue for super::NVIC_IPR1 { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `IP_7`"] pub type IP_7_R = crate::R<u8, u8>; #[doc = "Write proxy for field `IP_7`"] pub struct IP_7_W<'a> { w: &'a mut W, } impl<'a> IP_7_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 30)) | (((value as u32) & 0x03) << 30); self.w } } #[doc = "Reader of field `IP_6`"] pub type IP_6_R = crate::R<u8, u8>; #[doc = "Write proxy for field `IP_6`"] pub struct IP_6_W<'a> { w: &'a mut W, } impl<'a> IP_6_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 22)) | (((value as u32) & 0x03) << 22); self.w } } #[doc = "Reader of field `IP_5`"] pub type IP_5_R = crate::R<u8, u8>; #[doc = "Write proxy for field `IP_5`"] pub struct IP_5_W<'a> { w: &'a mut W, } impl<'a> IP_5_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 14)) | (((value as u32) & 0x03) << 14); self.w } } #[doc = "Reader of field `IP_4`"] pub type IP_4_R = crate::R<u8, u8>; #[doc = "Write proxy for field `IP_4`"] pub struct IP_4_W<'a> { w: &'a mut W, } impl<'a> IP_4_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 6)) | (((value as u32) & 0x03) << 6); self.w } } impl R { #[doc = "Bits 30:31 - Priority of interrupt 7"] #[inline(always)] pub fn ip_7(&self) -> IP_7_R { IP_7_R::new(((self.bits >> 30) & 0x03) as u8) } #[doc = "Bits 22:23 - Priority of interrupt 6"] #[inline(always)] pub fn ip_6(&self) -> IP_6_R { IP_6_R::new(((self.bits >> 22) & 0x03) as u8) } #[doc = "Bits 14:15 - Priority of interrupt 5"] #[inline(always)] pub fn ip_5(&self) -> IP_5_R { IP_5_R::new(((self.bits >> 14) & 0x03) as u8) } #[doc = "Bits 6:7 - Priority of interrupt 4"] #[inline(always)] pub fn ip_4(&self) -> IP_4_R { IP_4_R::new(((self.bits >> 6) & 0x03) as u8) } } impl W { #[doc = "Bits 30:31 - Priority of interrupt 7"] #[inline(always)] pub fn ip_7(&mut self) -> IP_7_W { IP_7_W { w: self } } #[doc = "Bits 22:23 - Priority of interrupt 6"] #[inline(always)] pub fn ip_6(&mut self) -> IP_6_W { IP_6_W { w: self } } #[doc = "Bits 14:15 - Priority of interrupt 5"] #[inline(always)] pub fn ip_5(&mut self) -> IP_5_W { IP_5_W { w: self } } #[doc = "Bits 6:7 - Priority of interrupt 4"] #[inline(always)] pub fn ip_4(&mut self) -> IP_4_W { IP_4_W { w: self } } }
#![doc = "generated by AutoRust 0.1.0"] #![allow(unused_mut)] #![allow(unused_variables)] #![allow(unused_imports)] use super::{models, API_VERSION}; #[non_exhaustive] #[derive(Debug, thiserror :: Error)] #[allow(non_camel_case_types)] pub enum Error { #[error(transparent)] Incidents_List(#[from] incidents::list::Error), #[error(transparent)] Incidents_Get(#[from] incidents::get::Error), #[error(transparent)] Incidents_CreateOrUpdate(#[from] incidents::create_or_update::Error), #[error(transparent)] Incidents_Delete(#[from] incidents::delete::Error), #[error(transparent)] Incidents_ListOfAlerts(#[from] incidents::list_of_alerts::Error), #[error(transparent)] Incidents_ListOfBookmarks(#[from] incidents::list_of_bookmarks::Error), #[error(transparent)] IncidentComments_ListByIncident(#[from] incident_comments::list_by_incident::Error), #[error(transparent)] IncidentComments_Get(#[from] incident_comments::get::Error), #[error(transparent)] IncidentComments_CreateComment(#[from] incident_comments::create_comment::Error), #[error(transparent)] IncidentComments_DeleteComment(#[from] incident_comments::delete_comment::Error), #[error(transparent)] Incidents_ListOfEntities(#[from] incidents::list_of_entities::Error), #[error(transparent)] IncidentRelations_List(#[from] incident_relations::list::Error), #[error(transparent)] IncidentRelations_GetRelation(#[from] incident_relations::get_relation::Error), #[error(transparent)] IncidentRelations_CreateOrUpdateRelation(#[from] incident_relations::create_or_update_relation::Error), #[error(transparent)] IncidentRelations_DeleteRelation(#[from] incident_relations::delete_relation::Error), #[error(transparent)] Operations_List(#[from] operations::list::Error), #[error(transparent)] Watchlists_List(#[from] watchlists::list::Error), #[error(transparent)] Watchlists_Get(#[from] watchlists::get::Error), #[error(transparent)] Watchlists_CreateOrUpdate(#[from] watchlists::create_or_update::Error), #[error(transparent)] Watchlists_Delete(#[from] watchlists::delete::Error), #[error(transparent)] WatchlistItems_List(#[from] watchlist_items::list::Error), #[error(transparent)] WatchlistItems_Get(#[from] watchlist_items::get::Error), #[error(transparent)] WatchlistItems_CreateOrUpdate(#[from] watchlist_items::create_or_update::Error), #[error(transparent)] WatchlistItems_Delete(#[from] watchlist_items::delete::Error), #[error(transparent)] ThreatIntelligenceIndicator_CreateIndicator(#[from] threat_intelligence_indicator::create_indicator::Error), #[error(transparent)] ThreatIntelligenceIndicators_List(#[from] threat_intelligence_indicators::list::Error), #[error(transparent)] ThreatIntelligenceIndicator_Get(#[from] threat_intelligence_indicator::get::Error), #[error(transparent)] ThreatIntelligenceIndicator_Create(#[from] threat_intelligence_indicator::create::Error), #[error(transparent)] ThreatIntelligenceIndicator_Delete(#[from] threat_intelligence_indicator::delete::Error), #[error(transparent)] ThreatIntelligenceIndicator_QueryIndicators(#[from] threat_intelligence_indicator::query_indicators::Error), #[error(transparent)] ThreatIntelligenceIndicatorMetrics_List(#[from] threat_intelligence_indicator_metrics::list::Error), #[error(transparent)] ThreatIntelligenceIndicator_AppendTags(#[from] threat_intelligence_indicator::append_tags::Error), #[error(transparent)] ThreatIntelligenceIndicator_ReplaceTags(#[from] threat_intelligence_indicator::replace_tags::Error), } pub mod incidents { use super::{models, API_VERSION}; pub async fn list( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, workspace_name: &str, filter: Option<&str>, orderby: Option<&str>, top: Option<i32>, skip_token: Option<&str>, ) -> std::result::Result<models::IncidentList, list::Error> { let http_client = operation_config.http_client(); let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/providers/Microsoft.SecurityInsights/incidents" , operation_config . base_path () , subscription_id , resource_group_name , workspace_name) ; let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); if let Some(filter) = filter { url.query_pairs_mut().append_pair("$filter", filter); } if let Some(orderby) = orderby { url.query_pairs_mut().append_pair("$orderby", orderby); } if let Some(top) = top { url.query_pairs_mut().append_pair("$top", top.to_string().as_str()); } if let Some(skip_token) = skip_token { url.query_pairs_mut().append_pair("$skipToken", skip_token); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::IncidentList = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::CloudError = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Err(list::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod list { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, workspace_name: &str, incident_id: &str, ) -> std::result::Result<models::Incident, get::Error> { let http_client = operation_config.http_client(); let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/providers/Microsoft.SecurityInsights/incidents/{}" , operation_config . base_path () , subscription_id , resource_group_name , workspace_name , incident_id) ; let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::Incident = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::CloudError = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Err(get::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod get { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn create_or_update( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, workspace_name: &str, incident_id: &str, incident: &models::Incident, ) -> std::result::Result<create_or_update::Response, create_or_update::Error> { let http_client = operation_config.http_client(); let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/providers/Microsoft.SecurityInsights/incidents/{}" , operation_config . base_path () , subscription_id , resource_group_name , workspace_name , incident_id) ; let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(create_or_update::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(incident).map_err(create_or_update::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(create_or_update::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::Incident = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(create_or_update::Response::Ok200(rsp_value)) } http::StatusCode::CREATED => { let rsp_body = rsp.body(); let rsp_value: models::Incident = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(create_or_update::Response::Created201(rsp_value)) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::CloudError = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Err(create_or_update::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod create_or_update { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200(models::Incident), Created201(models::Incident), } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn delete( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, workspace_name: &str, incident_id: &str, ) -> std::result::Result<delete::Response, delete::Error> { let http_client = operation_config.http_client(); let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/providers/Microsoft.SecurityInsights/incidents/{}" , operation_config . base_path () , subscription_id , resource_group_name , workspace_name , incident_id) ; let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(delete::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(delete::Response::Ok200), http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204), status_code => { let rsp_body = rsp.body(); let rsp_value: models::CloudError = serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?; Err(delete::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod delete { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200, NoContent204, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_of_alerts( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, workspace_name: &str, incident_id: &str, ) -> std::result::Result<models::IncidentAlertList, list_of_alerts::Error> { let http_client = operation_config.http_client(); let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/providers/Microsoft.SecurityInsights/incidents/{}/alerts" , operation_config . base_path () , subscription_id , resource_group_name , workspace_name , incident_id) ; let mut url = url::Url::parse(url_str).map_err(list_of_alerts::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_of_alerts::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list_of_alerts::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_of_alerts::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::IncidentAlertList = serde_json::from_slice(rsp_body).map_err(|source| list_of_alerts::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::CloudError = serde_json::from_slice(rsp_body).map_err(|source| list_of_alerts::Error::DeserializeError(source, rsp_body.clone()))?; Err(list_of_alerts::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod list_of_alerts { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_of_bookmarks( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, workspace_name: &str, incident_id: &str, ) -> std::result::Result<models::IncidentBookmarkList, list_of_bookmarks::Error> { let http_client = operation_config.http_client(); let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/providers/Microsoft.SecurityInsights/incidents/{}/bookmarks" , operation_config . base_path () , subscription_id , resource_group_name , workspace_name , incident_id) ; let mut url = url::Url::parse(url_str).map_err(list_of_bookmarks::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_of_bookmarks::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list_of_bookmarks::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_of_bookmarks::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::IncidentBookmarkList = serde_json::from_slice(rsp_body) .map_err(|source| list_of_bookmarks::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::CloudError = serde_json::from_slice(rsp_body) .map_err(|source| list_of_bookmarks::Error::DeserializeError(source, rsp_body.clone()))?; Err(list_of_bookmarks::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod list_of_bookmarks { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_of_entities( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, workspace_name: &str, incident_id: &str, ) -> std::result::Result<models::IncidentEntitiesResponse, list_of_entities::Error> { let http_client = operation_config.http_client(); let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/providers/Microsoft.SecurityInsights/incidents/{}/entities" , operation_config . base_path () , subscription_id , resource_group_name , workspace_name , incident_id) ; let mut url = url::Url::parse(url_str).map_err(list_of_entities::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_of_entities::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list_of_entities::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_of_entities::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::IncidentEntitiesResponse = serde_json::from_slice(rsp_body) .map_err(|source| list_of_entities::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::CloudError = serde_json::from_slice(rsp_body) .map_err(|source| list_of_entities::Error::DeserializeError(source, rsp_body.clone()))?; Err(list_of_entities::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod list_of_entities { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod incident_comments { use super::{models, API_VERSION}; pub async fn list_by_incident( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, workspace_name: &str, incident_id: &str, filter: Option<&str>, orderby: Option<&str>, top: Option<i32>, skip_token: Option<&str>, ) -> std::result::Result<models::IncidentCommentList, list_by_incident::Error> { let http_client = operation_config.http_client(); let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/providers/Microsoft.SecurityInsights/incidents/{}/comments" , operation_config . base_path () , subscription_id , resource_group_name , workspace_name , incident_id) ; let mut url = url::Url::parse(url_str).map_err(list_by_incident::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_by_incident::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); if let Some(filter) = filter { url.query_pairs_mut().append_pair("$filter", filter); } if let Some(orderby) = orderby { url.query_pairs_mut().append_pair("$orderby", orderby); } if let Some(top) = top { url.query_pairs_mut().append_pair("$top", top.to_string().as_str()); } if let Some(skip_token) = skip_token { url.query_pairs_mut().append_pair("$skipToken", skip_token); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list_by_incident::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_by_incident::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::IncidentCommentList = serde_json::from_slice(rsp_body) .map_err(|source| list_by_incident::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::CloudError = serde_json::from_slice(rsp_body) .map_err(|source| list_by_incident::Error::DeserializeError(source, rsp_body.clone()))?; Err(list_by_incident::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod list_by_incident { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, workspace_name: &str, incident_id: &str, incident_comment_id: &str, ) -> std::result::Result<models::IncidentComment, get::Error> { let http_client = operation_config.http_client(); let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/providers/Microsoft.SecurityInsights/incidents/{}/comments/{}" , operation_config . base_path () , subscription_id , resource_group_name , workspace_name , incident_id , incident_comment_id) ; let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::IncidentComment = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::CloudError = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Err(get::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod get { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn create_comment( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, workspace_name: &str, incident_id: &str, incident_comment_id: &str, incident_comment: &models::IncidentComment, ) -> std::result::Result<create_comment::Response, create_comment::Error> { let http_client = operation_config.http_client(); let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/providers/Microsoft.SecurityInsights/incidents/{}/comments/{}" , operation_config . base_path () , subscription_id , resource_group_name , workspace_name , incident_id , incident_comment_id) ; let mut url = url::Url::parse(url_str).map_err(create_comment::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(create_comment::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(incident_comment).map_err(create_comment::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(create_comment::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(create_comment::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::IncidentComment = serde_json::from_slice(rsp_body).map_err(|source| create_comment::Error::DeserializeError(source, rsp_body.clone()))?; Ok(create_comment::Response::Ok200(rsp_value)) } http::StatusCode::CREATED => { let rsp_body = rsp.body(); let rsp_value: models::IncidentComment = serde_json::from_slice(rsp_body).map_err(|source| create_comment::Error::DeserializeError(source, rsp_body.clone()))?; Ok(create_comment::Response::Created201(rsp_value)) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::CloudError = serde_json::from_slice(rsp_body).map_err(|source| create_comment::Error::DeserializeError(source, rsp_body.clone()))?; Err(create_comment::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod create_comment { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200(models::IncidentComment), Created201(models::IncidentComment), } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn delete_comment( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, workspace_name: &str, incident_id: &str, incident_comment_id: &str, ) -> std::result::Result<delete_comment::Response, delete_comment::Error> { let http_client = operation_config.http_client(); let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/providers/Microsoft.SecurityInsights/incidents/{}/comments/{}" , operation_config . base_path () , subscription_id , resource_group_name , workspace_name , incident_id , incident_comment_id) ; let mut url = url::Url::parse(url_str).map_err(delete_comment::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(delete_comment::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(delete_comment::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(delete_comment::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(delete_comment::Response::Ok200), http::StatusCode::NO_CONTENT => Ok(delete_comment::Response::NoContent204), status_code => { let rsp_body = rsp.body(); let rsp_value: models::CloudError = serde_json::from_slice(rsp_body).map_err(|source| delete_comment::Error::DeserializeError(source, rsp_body.clone()))?; Err(delete_comment::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod delete_comment { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200, NoContent204, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod incident_relations { use super::{models, API_VERSION}; pub async fn list( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, workspace_name: &str, incident_id: &str, filter: Option<&str>, orderby: Option<&str>, top: Option<i32>, skip_token: Option<&str>, ) -> std::result::Result<models::RelationList, list::Error> { let http_client = operation_config.http_client(); let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/providers/Microsoft.SecurityInsights/incidents/{}/relations" , operation_config . base_path () , subscription_id , resource_group_name , workspace_name , incident_id) ; let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); if let Some(filter) = filter { url.query_pairs_mut().append_pair("$filter", filter); } if let Some(orderby) = orderby { url.query_pairs_mut().append_pair("$orderby", orderby); } if let Some(top) = top { url.query_pairs_mut().append_pair("$top", top.to_string().as_str()); } if let Some(skip_token) = skip_token { url.query_pairs_mut().append_pair("$skipToken", skip_token); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::RelationList = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::CloudError = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Err(list::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod list { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get_relation( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, workspace_name: &str, incident_id: &str, relation_name: &str, ) -> std::result::Result<models::Relation, get_relation::Error> { let http_client = operation_config.http_client(); let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/providers/Microsoft.SecurityInsights/incidents/{}/relations/{}" , operation_config . base_path () , subscription_id , resource_group_name , workspace_name , incident_id , relation_name) ; let mut url = url::Url::parse(url_str).map_err(get_relation::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get_relation::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get_relation::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(get_relation::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::Relation = serde_json::from_slice(rsp_body).map_err(|source| get_relation::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::CloudError = serde_json::from_slice(rsp_body).map_err(|source| get_relation::Error::DeserializeError(source, rsp_body.clone()))?; Err(get_relation::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod get_relation { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn create_or_update_relation( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, workspace_name: &str, incident_id: &str, relation_name: &str, relation: &models::Relation, ) -> std::result::Result<create_or_update_relation::Response, create_or_update_relation::Error> { let http_client = operation_config.http_client(); let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/providers/Microsoft.SecurityInsights/incidents/{}/relations/{}" , operation_config . base_path () , subscription_id , resource_group_name , workspace_name , incident_id , relation_name) ; let mut url = url::Url::parse(url_str).map_err(create_or_update_relation::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(create_or_update_relation::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(relation).map_err(create_or_update_relation::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(create_or_update_relation::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(create_or_update_relation::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::Relation = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update_relation::Error::DeserializeError(source, rsp_body.clone()))?; Ok(create_or_update_relation::Response::Ok200(rsp_value)) } http::StatusCode::CREATED => { let rsp_body = rsp.body(); let rsp_value: models::Relation = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update_relation::Error::DeserializeError(source, rsp_body.clone()))?; Ok(create_or_update_relation::Response::Created201(rsp_value)) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::CloudError = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update_relation::Error::DeserializeError(source, rsp_body.clone()))?; Err(create_or_update_relation::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod create_or_update_relation { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200(models::Relation), Created201(models::Relation), } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn delete_relation( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, workspace_name: &str, incident_id: &str, relation_name: &str, ) -> std::result::Result<delete_relation::Response, delete_relation::Error> { let http_client = operation_config.http_client(); let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/providers/Microsoft.SecurityInsights/incidents/{}/relations/{}" , operation_config . base_path () , subscription_id , resource_group_name , workspace_name , incident_id , relation_name) ; let mut url = url::Url::parse(url_str).map_err(delete_relation::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(delete_relation::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(delete_relation::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(delete_relation::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(delete_relation::Response::Ok200), http::StatusCode::NO_CONTENT => Ok(delete_relation::Response::NoContent204), status_code => { let rsp_body = rsp.body(); let rsp_value: models::CloudError = serde_json::from_slice(rsp_body) .map_err(|source| delete_relation::Error::DeserializeError(source, rsp_body.clone()))?; Err(delete_relation::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod delete_relation { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200, NoContent204, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod operations { use super::{models, API_VERSION}; pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<models::OperationsList, list::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/providers/Microsoft.SecurityInsights/operations", operation_config.base_path(),); let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::OperationsList = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::CloudError = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Err(list::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod list { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod watchlists { use super::{models, API_VERSION}; pub async fn list( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, operational_insights_resource_provider: &str, workspace_name: &str, skip_token: Option<&str>, ) -> std::result::Result<models::WatchlistList, list::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/watchlists", operation_config.base_path(), subscription_id, resource_group_name, operational_insights_resource_provider, workspace_name ); let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); if let Some(skip_token) = skip_token { url.query_pairs_mut().append_pair("$skipToken", skip_token); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::WatchlistList = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::CloudError = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Err(list::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod list { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, operational_insights_resource_provider: &str, workspace_name: &str, watchlist_alias: &str, ) -> std::result::Result<models::Watchlist, get::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/watchlists/{}", operation_config.base_path(), subscription_id, resource_group_name, operational_insights_resource_provider, workspace_name, watchlist_alias ); let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::Watchlist = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::CloudError = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Err(get::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod get { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn create_or_update( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, operational_insights_resource_provider: &str, workspace_name: &str, watchlist_alias: &str, watchlist: &models::Watchlist, ) -> std::result::Result<create_or_update::Response, create_or_update::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/watchlists/{}", operation_config.base_path(), subscription_id, resource_group_name, operational_insights_resource_provider, workspace_name, watchlist_alias ); let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(create_or_update::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(watchlist).map_err(create_or_update::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(create_or_update::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::Watchlist = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(create_or_update::Response::Ok200(rsp_value)) } http::StatusCode::CREATED => { let rsp_body = rsp.body(); let rsp_value: models::Watchlist = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(create_or_update::Response::Created201(rsp_value)) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::CloudError = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Err(create_or_update::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod create_or_update { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200(models::Watchlist), Created201(models::Watchlist), } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn delete( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, operational_insights_resource_provider: &str, workspace_name: &str, watchlist_alias: &str, ) -> std::result::Result<delete::Response, delete::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/watchlists/{}", operation_config.base_path(), subscription_id, resource_group_name, operational_insights_resource_provider, workspace_name, watchlist_alias ); let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(delete::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(delete::Response::Ok200), http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204), status_code => { let rsp_body = rsp.body(); let rsp_value: models::CloudError = serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?; Err(delete::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod delete { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200, NoContent204, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod watchlist_items { use super::{models, API_VERSION}; pub async fn list( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, operational_insights_resource_provider: &str, workspace_name: &str, watchlist_alias: &str, skip_token: Option<&str>, ) -> std::result::Result<models::WatchlistItemList, list::Error> { let http_client = operation_config.http_client(); let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/watchlists/{}/watchlistItems" , operation_config . base_path () , subscription_id , resource_group_name , operational_insights_resource_provider , workspace_name , watchlist_alias) ; let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); if let Some(skip_token) = skip_token { url.query_pairs_mut().append_pair("$skipToken", skip_token); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::WatchlistItemList = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::CloudError = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Err(list::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod list { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, operational_insights_resource_provider: &str, workspace_name: &str, watchlist_alias: &str, watchlist_item_id: &str, ) -> std::result::Result<models::WatchlistItem, get::Error> { let http_client = operation_config.http_client(); let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/watchlists/{}/watchlistItems/{}" , operation_config . base_path () , subscription_id , resource_group_name , operational_insights_resource_provider , workspace_name , watchlist_alias , watchlist_item_id) ; let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::WatchlistItem = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::CloudError = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Err(get::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod get { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn create_or_update( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, operational_insights_resource_provider: &str, workspace_name: &str, watchlist_alias: &str, watchlist_item_id: &str, watchlist_item: &models::WatchlistItem, ) -> std::result::Result<create_or_update::Response, create_or_update::Error> { let http_client = operation_config.http_client(); let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/watchlists/{}/watchlistItems/{}" , operation_config . base_path () , subscription_id , resource_group_name , operational_insights_resource_provider , workspace_name , watchlist_alias , watchlist_item_id) ; let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(create_or_update::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(watchlist_item).map_err(create_or_update::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(create_or_update::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::WatchlistItem = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(create_or_update::Response::Ok200(rsp_value)) } http::StatusCode::CREATED => { let rsp_body = rsp.body(); let rsp_value: models::WatchlistItem = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(create_or_update::Response::Created201(rsp_value)) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::CloudError = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Err(create_or_update::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod create_or_update { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200(models::WatchlistItem), Created201(models::WatchlistItem), } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn delete( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, operational_insights_resource_provider: &str, workspace_name: &str, watchlist_alias: &str, watchlist_item_id: &str, ) -> std::result::Result<delete::Response, delete::Error> { let http_client = operation_config.http_client(); let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/watchlists/{}/watchlistItems/{}" , operation_config . base_path () , subscription_id , resource_group_name , operational_insights_resource_provider , workspace_name , watchlist_alias , watchlist_item_id) ; let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(delete::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(delete::Response::Ok200), http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204), status_code => { let rsp_body = rsp.body(); let rsp_value: models::CloudError = serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?; Err(delete::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod delete { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200, NoContent204, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod threat_intelligence_indicator { use super::{models, API_VERSION}; pub async fn create_indicator( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, operational_insights_resource_provider: &str, workspace_name: &str, threat_intelligence_properties: &models::ThreatIntelligenceIndicatorModelForRequestBody, ) -> std::result::Result<create_indicator::Response, create_indicator::Error> { let http_client = operation_config.http_client(); let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/threatIntelligence/main/createIndicator" , operation_config . base_path () , subscription_id , resource_group_name , operational_insights_resource_provider , workspace_name) ; let mut url = url::Url::parse(url_str).map_err(create_indicator::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(create_indicator::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(threat_intelligence_properties).map_err(create_indicator::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(create_indicator::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(create_indicator::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::ThreatIntelligenceInformation = serde_json::from_slice(rsp_body) .map_err(|source| create_indicator::Error::DeserializeError(source, rsp_body.clone()))?; Ok(create_indicator::Response::Ok200(rsp_value)) } http::StatusCode::CREATED => { let rsp_body = rsp.body(); let rsp_value: models::ThreatIntelligenceInformation = serde_json::from_slice(rsp_body) .map_err(|source| create_indicator::Error::DeserializeError(source, rsp_body.clone()))?; Ok(create_indicator::Response::Created201(rsp_value)) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::CloudError = serde_json::from_slice(rsp_body) .map_err(|source| create_indicator::Error::DeserializeError(source, rsp_body.clone()))?; Err(create_indicator::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod create_indicator { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200(models::ThreatIntelligenceInformation), Created201(models::ThreatIntelligenceInformation), } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, operational_insights_resource_provider: &str, workspace_name: &str, name: &str, ) -> std::result::Result<models::ThreatIntelligenceInformation, get::Error> { let http_client = operation_config.http_client(); let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/threatIntelligence/main/indicators/{}" , operation_config . base_path () , subscription_id , resource_group_name , operational_insights_resource_provider , workspace_name , name) ; let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::ThreatIntelligenceInformation = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::CloudError = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Err(get::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod get { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn create( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, operational_insights_resource_provider: &str, workspace_name: &str, name: &str, threat_intelligence_properties: &models::ThreatIntelligenceIndicatorModelForRequestBody, ) -> std::result::Result<create::Response, create::Error> { let http_client = operation_config.http_client(); let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/threatIntelligence/main/indicators/{}" , operation_config . base_path () , subscription_id , resource_group_name , operational_insights_resource_provider , workspace_name , name) ; let mut url = url::Url::parse(url_str).map_err(create::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(create::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(threat_intelligence_properties).map_err(create::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(create::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(create::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::ThreatIntelligenceInformation = serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?; Ok(create::Response::Ok200(rsp_value)) } http::StatusCode::CREATED => { let rsp_body = rsp.body(); let rsp_value: models::ThreatIntelligenceInformation = serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?; Ok(create::Response::Created201(rsp_value)) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::CloudError = serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?; Err(create::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod create { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200(models::ThreatIntelligenceInformation), Created201(models::ThreatIntelligenceInformation), } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn delete( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, operational_insights_resource_provider: &str, workspace_name: &str, name: &str, ) -> std::result::Result<delete::Response, delete::Error> { let http_client = operation_config.http_client(); let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/threatIntelligence/main/indicators/{}" , operation_config . base_path () , subscription_id , resource_group_name , operational_insights_resource_provider , workspace_name , name) ; let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(delete::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(delete::Response::Ok200), http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204), status_code => { let rsp_body = rsp.body(); let rsp_value: models::CloudError = serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?; Err(delete::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod delete { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200, NoContent204, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn query_indicators( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, operational_insights_resource_provider: &str, workspace_name: &str, threat_intelligence_filtering_criteria: &models::ThreatIntelligenceFilteringCriteria, ) -> std::result::Result<models::ThreatIntelligenceInformationList, query_indicators::Error> { let http_client = operation_config.http_client(); let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/threatIntelligence/main/queryIndicators" , operation_config . base_path () , subscription_id , resource_group_name , operational_insights_resource_provider , workspace_name) ; let mut url = url::Url::parse(url_str).map_err(query_indicators::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(query_indicators::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(threat_intelligence_filtering_criteria).map_err(query_indicators::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(query_indicators::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(query_indicators::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::ThreatIntelligenceInformationList = serde_json::from_slice(rsp_body) .map_err(|source| query_indicators::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::CloudError = serde_json::from_slice(rsp_body) .map_err(|source| query_indicators::Error::DeserializeError(source, rsp_body.clone()))?; Err(query_indicators::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod query_indicators { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn append_tags( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, operational_insights_resource_provider: &str, workspace_name: &str, name: &str, threat_intelligence_append_tags: &models::ThreatIntelligenceAppendTags, ) -> std::result::Result<(), append_tags::Error> { let http_client = operation_config.http_client(); let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/threatIntelligence/main/indicators/{}/appendTags" , operation_config . base_path () , subscription_id , resource_group_name , operational_insights_resource_provider , workspace_name , name) ; let mut url = url::Url::parse(url_str).map_err(append_tags::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(append_tags::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(threat_intelligence_append_tags).map_err(append_tags::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(append_tags::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(append_tags::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::CloudError = serde_json::from_slice(rsp_body).map_err(|source| append_tags::Error::DeserializeError(source, rsp_body.clone()))?; Err(append_tags::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod append_tags { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn replace_tags( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, operational_insights_resource_provider: &str, workspace_name: &str, name: &str, threat_intelligence_replace_tags: &models::ThreatIntelligenceIndicatorModelForRequestBody, ) -> std::result::Result<models::ThreatIntelligenceInformation, replace_tags::Error> { let http_client = operation_config.http_client(); let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/threatIntelligence/main/indicators/{}/replaceTags" , operation_config . base_path () , subscription_id , resource_group_name , operational_insights_resource_provider , workspace_name , name) ; let mut url = url::Url::parse(url_str).map_err(replace_tags::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(replace_tags::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(threat_intelligence_replace_tags).map_err(replace_tags::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(replace_tags::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(replace_tags::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::ThreatIntelligenceInformation = serde_json::from_slice(rsp_body).map_err(|source| replace_tags::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::CloudError = serde_json::from_slice(rsp_body).map_err(|source| replace_tags::Error::DeserializeError(source, rsp_body.clone()))?; Err(replace_tags::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod replace_tags { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod threat_intelligence_indicators { use super::{models, API_VERSION}; pub async fn list( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, operational_insights_resource_provider: &str, workspace_name: &str, filter: Option<&str>, top: Option<i32>, skip_token: Option<&str>, orderby: Option<&str>, ) -> std::result::Result<models::ThreatIntelligenceInformationList, list::Error> { let http_client = operation_config.http_client(); let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/threatIntelligence/main/indicators" , operation_config . base_path () , subscription_id , resource_group_name , operational_insights_resource_provider , workspace_name) ; let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); if let Some(filter) = filter { url.query_pairs_mut().append_pair("$filter", filter); } if let Some(top) = top { url.query_pairs_mut().append_pair("$top", top.to_string().as_str()); } if let Some(skip_token) = skip_token { url.query_pairs_mut().append_pair("$skipToken", skip_token); } if let Some(orderby) = orderby { url.query_pairs_mut().append_pair("$orderby", orderby); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::ThreatIntelligenceInformationList = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::CloudError = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Err(list::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod list { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod threat_intelligence_indicator_metrics { use super::{models, API_VERSION}; pub async fn list( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, operational_insights_resource_provider: &str, workspace_name: &str, ) -> std::result::Result<models::ThreatIntelligenceMetricsList, list::Error> { let http_client = operation_config.http_client(); let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/threatIntelligence/main/metrics" , operation_config . base_path () , subscription_id , resource_group_name , operational_insights_resource_provider , workspace_name) ; let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::ThreatIntelligenceMetricsList = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::CloudError = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Err(list::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod list { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } }
use std::io; fn inc(x: i32) -> i32 { x + 1 } fn dec(x: i32) -> i32 { x - 1 } fn main() { let x = 5; const MAX_POINT: u32 = 100000; let mut op = String::new(); println!("The value of x is: {}", x); // x の値は{} です println!("The value of x is: {}", MAX_POINT); loop { io::stdin().read_line(&mut op).expect("error"); if op == "+" { let x = inc(x); println!("value of x is: {}", x); } else if op == "-" { let x = dec(x); println!("value of x is: {}", x); } else { break; } //println!("value of x is: {}", x); //TODO do not work; fix; } }
use core::*; use graphics::*; use math::*; use num::*; use std::ops::*; use std::sync::*; use std::iter::*; use std::cmp::*; use std::hash::*; use std::marker::*; pub struct TransformSystem<T>(PhantomData<T>); impl<T: 'static> System for TransformSystem<T> { #[inline] fn disable(&mut self) { unimplemented!() } #[inline] fn enable(&mut self) { unimplemented!() } #[inline] fn new() -> TransformSystem<T> { TransformSystem(PhantomData) } /// Updates the transform hierarchy (removes parents if they point to the same entities or create cycles) and /// recalculates dirty model and world matrices. fn update(&mut self, delta_time: f64, world: &mut World) { // get the component collection of transforms and acquire a write lock match world.get_components::<Transform<T>>() { Some(locked) => { let mut transforms = locked.write().unwrap(); // get a mutable reference to all dirty transforms let dirty = transforms.get_dirty_mut(); // update all dirty entities for entity in dirty.iter() { // TODO::implement update of transforms (recalculate local/model matrices) } dirty.clear(); }, None => { println!("Transform components of type Transform<T> are missing. No transform was updated.."); } } } }
use anyhow::Result; use data_encoding::HEXLOWER; use ring::digest::{Context, SHA256}; use std::fs; use std::io::Read; use std::path::Path; #[cfg(test)] mod tests; pub fn digest(data: &[u8]) -> Result<String> { let mut context = Context::new(&SHA256); context.update(data); let digest = context.finish(); let s = HEXLOWER.encode(digest.as_ref()); Ok(s) } pub fn copy(src: &Path, dst: &Path) -> Result<()> { log::debug!("Copy: {:?} to {:?}", src, dst); fs::copy(src, dst)?; Ok(()) } pub fn read_string(path: &Path) -> Result<String> { let mut buf = String::new(); let mut file = fs::File::open(path)?; file.read_to_string(&mut buf)?; Ok(buf) } pub fn create_dirs(path: &Path) -> Result<()> { fs::create_dir_all(path)?; Ok(()) }
#[doc = "Register `IOPSMENR` reader"] pub type R = crate::R<IOPSMENR_SPEC>; #[doc = "Register `IOPSMENR` writer"] pub type W = crate::W<IOPSMENR_SPEC>; #[doc = "Field `IOPASMEN` reader - I/O port A clock enable during Sleep mode"] pub type IOPASMEN_R = crate::BitReader; #[doc = "Field `IOPASMEN` writer - I/O port A clock enable during Sleep mode"] pub type IOPASMEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `IOPBSMEN` reader - I/O port B clock enable during Sleep mode"] pub type IOPBSMEN_R = crate::BitReader; #[doc = "Field `IOPBSMEN` writer - I/O port B clock enable during Sleep mode"] pub type IOPBSMEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `IOPCSMEN` reader - I/O port C clock enable during Sleep mode"] pub type IOPCSMEN_R = crate::BitReader; #[doc = "Field `IOPCSMEN` writer - I/O port C clock enable during Sleep mode"] pub type IOPCSMEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `IOPDSMEN` reader - I/O port D clock enable during Sleep mode"] pub type IOPDSMEN_R = crate::BitReader; #[doc = "Field `IOPDSMEN` writer - I/O port D clock enable during Sleep mode"] pub type IOPDSMEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `IOPFSMEN` reader - I/O port F clock enable during Sleep mode"] pub type IOPFSMEN_R = crate::BitReader; #[doc = "Field `IOPFSMEN` writer - I/O port F clock enable during Sleep mode"] pub type IOPFSMEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; impl R { #[doc = "Bit 0 - I/O port A clock enable during Sleep mode"] #[inline(always)] pub fn iopasmen(&self) -> IOPASMEN_R { IOPASMEN_R::new((self.bits & 1) != 0) } #[doc = "Bit 1 - I/O port B clock enable during Sleep mode"] #[inline(always)] pub fn iopbsmen(&self) -> IOPBSMEN_R { IOPBSMEN_R::new(((self.bits >> 1) & 1) != 0) } #[doc = "Bit 2 - I/O port C clock enable during Sleep mode"] #[inline(always)] pub fn iopcsmen(&self) -> IOPCSMEN_R { IOPCSMEN_R::new(((self.bits >> 2) & 1) != 0) } #[doc = "Bit 3 - I/O port D clock enable during Sleep mode"] #[inline(always)] pub fn iopdsmen(&self) -> IOPDSMEN_R { IOPDSMEN_R::new(((self.bits >> 3) & 1) != 0) } #[doc = "Bit 5 - I/O port F clock enable during Sleep mode"] #[inline(always)] pub fn iopfsmen(&self) -> IOPFSMEN_R { IOPFSMEN_R::new(((self.bits >> 5) & 1) != 0) } } impl W { #[doc = "Bit 0 - I/O port A clock enable during Sleep mode"] #[inline(always)] #[must_use] pub fn iopasmen(&mut self) -> IOPASMEN_W<IOPSMENR_SPEC, 0> { IOPASMEN_W::new(self) } #[doc = "Bit 1 - I/O port B clock enable during Sleep mode"] #[inline(always)] #[must_use] pub fn iopbsmen(&mut self) -> IOPBSMEN_W<IOPSMENR_SPEC, 1> { IOPBSMEN_W::new(self) } #[doc = "Bit 2 - I/O port C clock enable during Sleep mode"] #[inline(always)] #[must_use] pub fn iopcsmen(&mut self) -> IOPCSMEN_W<IOPSMENR_SPEC, 2> { IOPCSMEN_W::new(self) } #[doc = "Bit 3 - I/O port D clock enable during Sleep mode"] #[inline(always)] #[must_use] pub fn iopdsmen(&mut self) -> IOPDSMEN_W<IOPSMENR_SPEC, 3> { IOPDSMEN_W::new(self) } #[doc = "Bit 5 - I/O port F clock enable during Sleep mode"] #[inline(always)] #[must_use] pub fn iopfsmen(&mut self) -> IOPFSMEN_W<IOPSMENR_SPEC, 5> { IOPFSMEN_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "GPIO in Sleep mode clock enable register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`iopsmenr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`iopsmenr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct IOPSMENR_SPEC; impl crate::RegisterSpec for IOPSMENR_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`iopsmenr::R`](R) reader structure"] impl crate::Readable for IOPSMENR_SPEC {} #[doc = "`write(|w| ..)` method takes [`iopsmenr::W`](W) writer structure"] impl crate::Writable for IOPSMENR_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets IOPSMENR to value 0"] impl crate::Resettable for IOPSMENR_SPEC { const RESET_VALUE: Self::Ux = 0; }
mod antipodal_circle; mod corridor; mod empty_scenario; mod scenario; use crate::agents::Agents; use crate::navmesh::Navmesh; use serde::Deserialize; pub use antipodal_circle::AntipodalCircleScenario; pub use corridor::CorridorScenario; pub use empty_scenario::EmptyScenario; pub use scenario::Scenario; #[derive(Clone, Copy, Debug, PartialEq, Deserialize)] #[serde(tag = "scenario")] enum Scenarii { AntipodalCircle(AntipodalCircleScenario), Corridor(CorridorScenario), Empty(EmptyScenario), } impl Scenario for Scenarii { fn generate(&self) -> (Agents, Navmesh) { match self { Scenarii::Corridor(s) => s.generate(), Scenarii::AntipodalCircle(s) => s.generate(), Scenarii::Empty(s) => s.generate(), } } } fn load_concrete_scenario(data: &str) -> Scenarii { serde_json::from_str::<Scenarii>(data) .unwrap_or_else(|_| Scenarii::Empty(EmptyScenario::default())) } pub fn load_scenario(data: &str) -> impl Scenario { load_concrete_scenario(data) } #[cfg(test)] mod tests { use super::*; #[test] fn test_load_scenario_empty() { match load_concrete_scenario("{}") { Scenarii::Empty(s) => assert_eq!(s, EmptyScenario::new()), _ => panic!("Expecting an EmptyScenario."), } } #[test] fn test_load_scenario_antipodal_circle() { match load_concrete_scenario( "{ \"scenario\": \"AntipodalCircle\", \"agents_count\": 3, \"radius\": 4.0 }", ) { Scenarii::AntipodalCircle(s) => assert_eq!( s, AntipodalCircleScenario { agents_count: 3, radius: 4.0, } ), _ => panic!("Expecting an AntipodalCircleScenario."), } } #[test] fn test_load_scenario_corridor() { match load_concrete_scenario( "{ \"scenario\": \"Corridor\", \"agents_per_side_count\": 3, \"length\": 10, \"width\": 3.0 }", ) { Scenarii::Corridor(s) => assert_eq!( s, CorridorScenario { agents_per_side_count: 3, length: 10.0, width: 3.0, } ), _ => panic!("Expecting an CorridorScenario."), } } #[test] fn test_load_scenario_corridor_default() { match load_concrete_scenario( "{ \"scenario\": \"Corridor\" }", ) { Scenarii::Corridor(s) => assert_eq!( s, CorridorScenario { agents_per_side_count: 1, length: 10.0, width: 1.0, } ), _ => panic!("Expecting an CorridorScenario."), } } }
#[doc = "Register `AXIMC_PERIPH_ID_5` reader"] pub type R = crate::R<AXIMC_PERIPH_ID_5_SPEC>; #[doc = "Field `PERIPH_ID_5` reader - PERIPH_ID_5"] pub type PERIPH_ID_5_R = crate::FieldReader; impl R { #[doc = "Bits 0:7 - PERIPH_ID_5"] #[inline(always)] pub fn periph_id_5(&self) -> PERIPH_ID_5_R { PERIPH_ID_5_R::new((self.bits & 0xff) as u8) } } #[doc = "AXIMC peripheral ID5 register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`aximc_periph_id_5::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct AXIMC_PERIPH_ID_5_SPEC; impl crate::RegisterSpec for AXIMC_PERIPH_ID_5_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`aximc_periph_id_5::R`](R) reader structure"] impl crate::Readable for AXIMC_PERIPH_ID_5_SPEC {} #[doc = "`reset()` method sets AXIMC_PERIPH_ID_5 to value 0"] impl crate::Resettable for AXIMC_PERIPH_ID_5_SPEC { const RESET_VALUE: Self::Ux = 0; }
use std::io; use std::collections::HashMap; fn main() { let mut strings = Vec::new(); loop { let mut input: String = String::new(); io::stdin().read_line(&mut input) .expect("failed to read line"); let input = input.trim(); if input.len() > 1 { strings.push(input.to_owned()); } else { break; } }; let mut output: Vec<HashMap<char, i8>> = Vec::new(); for string in strings { let mut holder = HashMap::new(); for letter in string.chars() { let val = if letter.is_lowercase() { 1 } else { -1 }; let letter = letter.to_ascii_lowercase(); let val = match holder.get(&letter) { Some(x) => x + val, None => val }; holder.insert(letter, val); } output.push(holder) } println!("{:?}", output); }
use nia_protocol_rust::DefineModifierRequest; use nia_protocol_rust::ModifierDescription; use crate::error::NiaServerError; use crate::error::NiaServerResult; use crate::protocol::NiaModifierDescription; use crate::protocol::Serializable; #[derive(Debug, Clone, PartialEq, Eq)] pub struct NiaDefineModifierRequest { modifier: NiaModifierDescription, } impl NiaDefineModifierRequest { pub fn new(modifier: NiaModifierDescription) -> NiaDefineModifierRequest { NiaDefineModifierRequest { modifier } } pub fn take_modifier(self) -> NiaModifierDescription { self.modifier } } impl Serializable< NiaDefineModifierRequest, nia_protocol_rust::DefineModifierRequest, > for NiaDefineModifierRequest { fn to_pb(&self) -> nia_protocol_rust::DefineModifierRequest { let modifier_pb = self.modifier.to_pb(); let mut define_modifier_request_pb = nia_protocol_rust::DefineModifierRequest::new(); define_modifier_request_pb.set_modifier(modifier_pb); define_modifier_request_pb } fn from_pb( object_pb: nia_protocol_rust::DefineModifierRequest, ) -> NiaServerResult<NiaDefineModifierRequest> { let mut object_pb = object_pb; let modifier_pb = object_pb.take_modifier(); let modifier = NiaModifierDescription::from_pb(modifier_pb)?; let mut define_modifier_request = NiaDefineModifierRequest::new(modifier); Ok(define_modifier_request) } } #[cfg(test)] mod tests { #[allow(unused_imports)] use super::*; use crate::protocol::NiaKey; #[test] fn serializes_and_deserializes() { let key = NiaKey::Key2(2, 3); let alias = String::from("test"); let expected = NiaDefineModifierRequest::new( NiaModifierDescription::new(key, alias), ); let bytes = expected.to_bytes().unwrap(); let result = NiaDefineModifierRequest::from_bytes(bytes).unwrap(); assert_eq!(expected, result); } }
use std::ops::{ Add, AddAssign, Div, DivAssign, Mul, MulAssign, Sub, SubAssign, }; use crate::vulkan::draw_system::Vertex; #[derive(Default, Debug, Clone, Copy, PartialEq, PartialOrd)] pub struct Point { pub x: f32, pub y: f32, } #[derive(Default, Debug, Clone, Copy, PartialEq, PartialOrd)] pub struct Rect { min: Point, max: Point, } impl Point { pub const ZERO: Self = Point { x: 0.0, y: 0.0 }; #[inline] pub fn new<T: Into<f32>>(x: T, y: T) -> Self { let x = x.into(); let y = y.into(); Self { x, y } } #[inline] pub fn length(&self) -> f32 { self.x.hypot(self.y) } pub fn toward(&self, other: Point) -> Point { let diff = *self - other; diff / diff.length() } #[inline] pub fn dist(&self, other: Point) -> f32 { let x_diff = (self.x - other.x).abs(); let y_diff = (self.y - other.y).abs(); x_diff.hypot(y_diff) } #[inline] pub fn dist_sqr(&self, other: Point) -> f32 { let x_diff = (self.x - other.x).abs(); let y_diff = (self.y - other.y).abs(); x_diff.powi(2) + y_diff.powi(2) } #[inline] pub fn vertex(&self) -> Vertex { Vertex { position: [self.x, self.y], } } } impl From<(f32, f32)> for Point { #[inline] fn from((x, y): (f32, f32)) -> Point { Point { x, y } } } impl From<(f64, f64)> for Point { #[inline] fn from((x, y): (f64, f64)) -> Point { let x = x as f32; let y = y as f32; Point { x, y } } } impl From<(i32, i32)> for Point { #[inline] fn from((x, y): (i32, i32)) -> Point { let x = x as f32; let y = y as f32; Point { x, y } } } impl From<egui::Pos2> for Point { #[inline] fn from(pos: egui::Pos2) -> Self { Self { x: pos.x, y: pos.y } } } impl From<egui::Vec2> for Point { #[inline] fn from(pos: egui::Vec2) -> Self { Self { x: pos.x, y: pos.y } } } impl Into<egui::Pos2> for Point { #[inline] fn into(self) -> egui::Pos2 { egui::Pos2 { x: self.x, y: self.y, } } } impl Into<egui::Vec2> for Point { #[inline] fn into(self) -> egui::Vec2 { egui::Vec2 { x: self.x, y: self.y, } } } impl Rect { #[inline] pub fn new<T: Into<Point>>(p0: T, p1: T) -> Self { let p0 = p0.into(); let p1 = p1.into(); let min = Point { x: p0.x.min(p1.x), y: p0.y.min(p1.y), }; let max = Point { x: p0.x.max(p1.x), y: p0.y.max(p1.y), }; Self { min, max } } #[inline] pub fn everywhere() -> Self { let min = Point { x: std::f32::MIN, y: std::f32::MIN, }; let max = Point { x: std::f32::MAX, y: std::f32::MAX, }; Self { min, max } } #[inline] pub fn nowhere() -> Self { let min = Point { x: std::f32::MAX, y: std::f32::MAX, }; let max = Point { x: std::f32::MIN, y: std::f32::MIN, }; Self { min, max } } #[inline] pub fn min(&self) -> Point { self.min } #[inline] pub fn max(&self) -> Point { self.max } #[inline] pub fn width(&self) -> f32 { self.max.x - self.min.x } #[inline] pub fn height(&self) -> f32 { self.max.y - self.min.y } #[inline] pub fn center(&self) -> Point { let w = self.width(); let h = self.height(); let x = (w / 2.0) + self.min.x; let y = (h / 2.0) + self.min.y; Point::new(x, y) } #[inline] pub fn contains(&self, p: Point) -> bool { self.min.x <= p.x && self.max.x >= p.x && self.min.y <= p.y && self.max.y >= p.y } #[inline] pub fn union(&self, other: Self) -> Self { let min = Point { x: self.min.x.min(other.min.x), y: self.min.y.min(other.min.y), }; let max = Point { x: self.max.x.max(other.max.x), y: self.max.y.max(other.max.y), }; Self { min, max } } pub fn intersection(&self, other: Self) -> Self { let min_x = self.min.x.max(other.min.x); let min_y = self.min.y.max(other.min.y); let max_x = self.max.x.min(other.max.x); let max_y = self.max.y.min(other.max.y); Self::new(Point::new(min_x, min_y), Point::new(max_x, max_y)) } pub fn intersects(&self, other: Self) -> bool { self.min.x <= other.max.x && other.min.x <= self.max.x && self.min.y <= other.max.y && other.min.y <= self.max.y } #[inline] pub fn resize(&self, factor: f32) -> Self { let center = self.center(); let new_width = self.width() * factor; let new_height = self.height() * factor; let left = center.x - (new_width / 2.0); let right = center.x + (new_width / 2.0); let top = center.y - (new_height / 2.0); let bottom = center.y + (new_height / 2.0); Self { min: Point::new(left, top), max: Point::new(right, bottom), } } } impl From<(Point, Point)> for Rect { #[inline] fn from((p0, p1): (Point, Point)) -> Self { Self::new(p0, p1) } } impl From<egui::Rect> for Rect { #[inline] fn from(rect: egui::Rect) -> Self { Self::new(rect.min, rect.max) } } impl Into<egui::Rect> for Rect { #[inline] fn into(self) -> egui::Rect { egui::Rect::from_min_max(self.min.into(), self.max.into()) } } macro_rules! impl_assign_binop { ($trait:ident, Rhs = $rhs:ty, $opfn:ident, $opassfn:ident) => { impl $trait<$rhs> for Point { #[inline] fn $opassfn(&mut self, other: $rhs) { *self = self.$opfn(other); } } }; ($trait:ident, $opfn:ident, $opassfn:ident) => { impl_assign_binop!($trait, Rhs = Point, $opfn, $opassfn); }; } macro_rules! impl_ref_binop { ($trait:ident, $rhs:ty, $opfn:ident) => { impl $trait<$rhs> for Point { type Output = Self; #[inline] fn $opfn(self, other: $rhs) -> Self { self.$opfn(*other) } } }; } macro_rules! impl_ref_assign_binop { ($trait:ident, $rhs:ty, $opfn:ident) => { impl $trait<$rhs> for Point { #[inline] fn $opfn(&mut self, other: $rhs) { self.$opfn(*other) } } }; } macro_rules! impl_point_ops { ($trait:ident, $traitass:ident, $opfn:ident, $opassfn:ident) => { impl $trait for Point { type Output = Self; #[inline] fn $opfn(self, other: Self) -> Self { Self { x: f32::$opfn(self.x, other.x), y: f32::$opfn(self.y, other.y), } } } impl_assign_binop!($traitass, $opfn, $opassfn); impl_ref_binop!($trait, &Point, $opfn); impl_ref_assign_binop!($traitass, &Point, $opassfn); }; } impl_point_ops!(Add, AddAssign, add, add_assign); impl_point_ops!(Sub, SubAssign, sub, sub_assign); impl Mul<f32> for Point { type Output = Self; #[inline] fn mul(self, other: f32) -> Self { Self { x: self.x * other, y: self.y * other, } } } impl_assign_binop!(MulAssign, Rhs = f32, mul, mul_assign); impl_ref_binop!(Mul, &f32, mul); impl_ref_assign_binop!(MulAssign, &f32, mul_assign); impl Div<f32> for Point { type Output = Self; #[inline] fn div(self, other: f32) -> Self { Self { x: self.x / other, y: self.y / other, } } } impl_assign_binop!(DivAssign, Rhs = f32, div, div_assign); impl_ref_binop!(Div, &f32, div); impl_ref_assign_binop!(DivAssign, &f32, div_assign);
// Copyright 2020 ChainSafe Systems // SPDX-License-Identifier: Apache-2.0, MIT use forest_blocks::{Block, BlockHeader, FullTipset}; use forest_cid::Cid; use forest_encoding::{ de::{self, Deserialize, Deserializer}, ser::{self, Serialize, Serializer}, }; use forest_message::{SignedMessage, UnsignedMessage}; use std::convert::TryFrom; /// Blocksync request options pub const BLOCKS: u64 = 1; pub const MESSAGES: u64 = 2; /// The payload that gets sent to another node to request for blocks and messages. It get DagCBOR serialized before sending over the wire. #[derive(Clone, Debug, PartialEq)] pub struct BlockSyncRequest { /// The tipset to start sync from pub start: Vec<Cid>, /// The amount of epochs to sync by pub request_len: u64, /// 1 = Block only, 2 = Messages only, 3 = Blocks and Messages pub options: u64, } impl Serialize for BlockSyncRequest { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { (&self.start, &self.request_len, &self.options).serialize(serializer) } } impl<'de> Deserialize<'de> for BlockSyncRequest { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de>, { let (start, request_len, options) = Deserialize::deserialize(deserializer)?; Ok(BlockSyncRequest { start, request_len, options, }) } } /// The response to a BlockSync request. #[derive(Clone, Debug, PartialEq)] pub struct BlockSyncResponse { /// The tipsets requested pub chain: Vec<TipsetBundle>, /// Error code pub status: u64, /// Status message indicating failure reason // TODO not included in blocksync spec, revisit if it will be removed in future pub message: String, } impl BlockSyncResponse { pub fn into_result(self) -> Result<Vec<FullTipset>, String> { if self.status != 0 { // TODO implement a better error type than string if needed to be handled differently return Err(format!("Status {}: {}", self.status, self.message)); } self.chain .into_iter() .map(FullTipset::try_from) .collect::<Result<_, _>>() } } impl Serialize for BlockSyncResponse { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { (&self.chain, &self.status, &self.message).serialize(serializer) } } impl<'de> Deserialize<'de> for BlockSyncResponse { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de>, { let (chain, status, message) = Deserialize::deserialize(deserializer)?; Ok(BlockSyncResponse { chain, status, message, }) } } /// Contains the blocks and messages in a particular tipset #[derive(Clone, Debug, PartialEq)] pub struct TipsetBundle { /// The blocks in the tipset pub blocks: Vec<BlockHeader>, /// Signed bls messages pub bls_msgs: Vec<UnsignedMessage>, /// Describes which block each message belongs to pub bls_msg_includes: Vec<Vec<u64>>, /// Unsigned secp messages pub secp_msgs: Vec<SignedMessage>, /// Describes which block each message belongs to pub secp_msg_includes: Vec<Vec<u64>>, } impl TryFrom<TipsetBundle> for FullTipset { type Error = String; fn try_from(tsb: TipsetBundle) -> Result<FullTipset, Self::Error> { // TODO: we may already want to check this on construction of the bundle if tsb.blocks.len() != tsb.bls_msg_includes.len() || tsb.blocks.len() != tsb.secp_msg_includes.len() { return Err( "Invalid formed Tipset bundle, lengths of includes does not match blocks" .to_string(), ); } fn values_from_indexes<T: Clone>(indexes: &[u64], values: &[T]) -> Result<Vec<T>, String> { let mut msgs = Vec::with_capacity(indexes.len()); for idx in indexes.iter() { msgs.push( values .get(*idx as usize) .cloned() .ok_or_else(|| "Invalid message index".to_string())?, ); } Ok(msgs) } let mut blocks: Vec<Block> = Vec::with_capacity(tsb.blocks.len()); for (i, header) in tsb.blocks.into_iter().enumerate() { let bls_messages = values_from_indexes(&tsb.bls_msg_includes[i], &tsb.bls_msgs)?; let secp_messages = values_from_indexes(&tsb.secp_msg_includes[i], &tsb.secp_msgs)?; blocks.push(Block { header, secp_messages, bls_messages, }); } Ok(FullTipset::new(blocks).map_err(|e| e.to_string())?) } } impl ser::Serialize for TipsetBundle { fn serialize<S>(&self, serializer: S) -> Result<<S as Serializer>::Ok, <S as Serializer>::Error> where S: Serializer, { ( &self.blocks, &self.bls_msgs, &self.bls_msg_includes, &self.secp_msgs, &self.secp_msg_includes, ) .serialize(serializer) } } impl<'de> de::Deserialize<'de> for TipsetBundle { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de>, { let (blocks, bls_msgs, bls_msg_includes, secp_msgs, secp_msg_includes) = Deserialize::deserialize(deserializer)?; Ok(TipsetBundle { blocks, bls_msgs, bls_msg_includes, secp_msgs, secp_msg_includes, }) } }
#![doc = "generated by AutoRust 0.1.0"] #![allow(unused_mut)] #![allow(unused_variables)] #![allow(unused_imports)] use super::{models, API_VERSION}; #[non_exhaustive] #[derive(Debug, thiserror :: Error)] #[allow(non_camel_case_types)] pub enum Error { #[error(transparent)] Metrics_Get(#[from] metrics::get::Error), #[error(transparent)] Metrics_GetMultiple(#[from] metrics::get_multiple::Error), #[error(transparent)] Metrics_GetMetadata(#[from] metrics::get_metadata::Error), #[error(transparent)] Events_GetByType(#[from] events::get_by_type::Error), #[error(transparent)] Events_Get(#[from] events::get::Error), #[error(transparent)] Events_GetOdataMetadata(#[from] events::get_odata_metadata::Error), #[error(transparent)] Query_Get(#[from] query::get::Error), #[error(transparent)] Query_Execute(#[from] query::execute::Error), #[error(transparent)] Metadata_Get(#[from] metadata::get::Error), #[error(transparent)] Metadata_Post(#[from] metadata::post::Error), } pub mod metrics { use super::{models, API_VERSION}; pub async fn get( operation_config: &crate::OperationConfig, app_id: &str, metric_id: &str, timespan: Option<&str>, interval: Option<&str>, aggregation: &[&str], segment: &[&str], top: Option<i32>, orderby: Option<&str>, filter: Option<&str>, ) -> std::result::Result<models::MetricsResult, get::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/apps/{}/metrics/{}", operation_config.base_path(), app_id, metric_id); let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } if let Some(timespan) = timespan { url.query_pairs_mut().append_pair("timespan", timespan); } if let Some(interval) = interval { url.query_pairs_mut().append_pair("interval", interval); } if let Some(top) = top { url.query_pairs_mut().append_pair("top", top.to_string().as_str()); } if let Some(orderby) = orderby { url.query_pairs_mut().append_pair("orderby", orderby); } if let Some(filter) = filter { url.query_pairs_mut().append_pair("filter", filter); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::MetricsResult = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Err(get::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod get { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get_multiple( operation_config: &crate::OperationConfig, app_id: &str, body: &models::MetricsPostBody, ) -> std::result::Result<models::MetricsResults, get_multiple::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/apps/{}/metrics", operation_config.base_path(), app_id); let mut url = url::Url::parse(url_str).map_err(get_multiple::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get_multiple::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(body).map_err(get_multiple::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get_multiple::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(get_multiple::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::MetricsResults = serde_json::from_slice(rsp_body).map_err(|source| get_multiple::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| get_multiple::Error::DeserializeError(source, rsp_body.clone()))?; Err(get_multiple::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod get_multiple { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get_metadata( operation_config: &crate::OperationConfig, app_id: &str, ) -> std::result::Result<serde_json::Value, get_metadata::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/apps/{}/metrics/metadata", operation_config.base_path(), app_id); let mut url = url::Url::parse(url_str).map_err(get_metadata::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get_metadata::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get_metadata::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(get_metadata::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: serde_json::Value = serde_json::from_slice(rsp_body).map_err(|source| get_metadata::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| get_metadata::Error::DeserializeError(source, rsp_body.clone()))?; Err(get_metadata::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod get_metadata { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod events { use super::{models, API_VERSION}; pub async fn get_by_type( operation_config: &crate::OperationConfig, app_id: &str, event_type: &str, timespan: Option<&str>, filter: Option<&str>, search: Option<&str>, orderby: Option<&str>, select: Option<&str>, skip: Option<i32>, top: Option<i32>, format: Option<&str>, count: Option<bool>, apply: Option<&str>, ) -> std::result::Result<models::EventsResults, get_by_type::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/apps/{}/events/{}", operation_config.base_path(), app_id, event_type); let mut url = url::Url::parse(url_str).map_err(get_by_type::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get_by_type::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } if let Some(timespan) = timespan { url.query_pairs_mut().append_pair("timespan", timespan); } if let Some(filter) = filter { url.query_pairs_mut().append_pair("$filter", filter); } if let Some(search) = search { url.query_pairs_mut().append_pair("$search", search); } if let Some(orderby) = orderby { url.query_pairs_mut().append_pair("$orderby", orderby); } if let Some(select) = select { url.query_pairs_mut().append_pair("$select", select); } if let Some(skip) = skip { url.query_pairs_mut().append_pair("$skip", skip.to_string().as_str()); } if let Some(top) = top { url.query_pairs_mut().append_pair("$top", top.to_string().as_str()); } if let Some(format) = format { url.query_pairs_mut().append_pair("$format", format); } if let Some(count) = count { url.query_pairs_mut().append_pair("$count", count.to_string().as_str()); } if let Some(apply) = apply { url.query_pairs_mut().append_pair("$apply", apply); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get_by_type::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(get_by_type::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::EventsResults = serde_json::from_slice(rsp_body).map_err(|source| get_by_type::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| get_by_type::Error::DeserializeError(source, rsp_body.clone()))?; Err(get_by_type::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod get_by_type { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get( operation_config: &crate::OperationConfig, app_id: &str, event_type: &str, timespan: Option<&str>, event_id: &str, ) -> std::result::Result<models::EventsResults, get::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/apps/{}/events/{}/{}", operation_config.base_path(), app_id, event_type, event_id ); let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } if let Some(timespan) = timespan { url.query_pairs_mut().append_pair("timespan", timespan); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::EventsResults = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Err(get::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod get { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get_odata_metadata( operation_config: &crate::OperationConfig, app_id: &str, ) -> std::result::Result<serde_json::Value, get_odata_metadata::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/apps/{}/events/$metadata", operation_config.base_path(), app_id); let mut url = url::Url::parse(url_str).map_err(get_odata_metadata::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get_odata_metadata::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get_odata_metadata::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(get_odata_metadata::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: serde_json::Value = serde_json::from_slice(rsp_body) .map_err(|source| get_odata_metadata::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body) .map_err(|source| get_odata_metadata::Error::DeserializeError(source, rsp_body.clone()))?; Err(get_odata_metadata::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod get_odata_metadata { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod query { use super::{models, API_VERSION}; pub async fn get( operation_config: &crate::OperationConfig, app_id: &str, query: &str, timespan: Option<&str>, ) -> std::result::Result<models::QueryResults, get::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/apps/{}/query", operation_config.base_path(), app_id); let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("query", query); if let Some(timespan) = timespan { url.query_pairs_mut().append_pair("timespan", timespan); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::QueryResults = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Err(get::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod get { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn execute( operation_config: &crate::OperationConfig, app_id: &str, body: &models::QueryBody, ) -> std::result::Result<models::QueryResults, execute::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/apps/{}/query", operation_config.base_path(), app_id); let mut url = url::Url::parse(url_str).map_err(execute::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(execute::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(body).map_err(execute::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(execute::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(execute::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::QueryResults = serde_json::from_slice(rsp_body).map_err(|source| execute::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| execute::Error::DeserializeError(source, rsp_body.clone()))?; Err(execute::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod execute { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod metadata { use super::{models, API_VERSION}; pub async fn get(operation_config: &crate::OperationConfig, app_id: &str) -> std::result::Result<models::MetadataResults, get::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/apps/{}/metadata", operation_config.base_path(), app_id); let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::MetadataResults = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Err(get::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod get { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn post( operation_config: &crate::OperationConfig, app_id: &str, ) -> std::result::Result<models::MetadataResults, post::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/apps/{}/metadata", operation_config.base_path(), app_id); let mut url = url::Url::parse(url_str).map_err(post::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(post::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(post::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(post::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::MetadataResults = serde_json::from_slice(rsp_body).map_err(|source| post::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| post::Error::DeserializeError(source, rsp_body.clone()))?; Err(post::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod post { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } }
use x86::io::outw; #[repr(u32)] #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum SemihostingExitStatus { Success = 0x10, Failure = 0x11, } pub fn semihosting_halt(status: SemihostingExitStatus) { unsafe { outw(0x501, status as u16); } }
use dungeons_and_rust::dnd_server; fn main() { dnd_server::start_server("127.0.0.1:8000"); }
use clap::Clap; use leetcode_picker::*; use question::{Answer, Question}; use log::*; fn print_code_snippet(arg: &Option<String>, qq: &Quiz) -> Result<(), String> { if let Some(ll) = arg { match qq.code_snippet(ll) { Some(cs) => println!("Code Snippet:\n{}", cs), None => return Err(format!("Cannot found {} code snippet", ll)), } } Ok(()) } fn main() -> Result<(), String> { env_logger::init(); let commandline_args = cli_args::Args::parse(); //dbg!(&commandline_args); // set token with command line token set_token(commandline_args.token()); match commandline_args.if_random() { true => { let qq = if commandline_args.if_interact() { loop { let qq = Quiz::get_randomly(commandline_args.level())?; info!("this quiz's description: {}", qq.quiz_description()?); println!( "{}", qq.use_fmt_temp( commandline_args.template(), commandline_args.if_show_code_snippet() )? ); // ask let a = Question::new("Is this good? (yes/no/y/n)") .yes_no() .until_acceptable() .ask() .unwrap(); if Answer::YES == a { break qq; } } } else { let qq = Quiz::get_randomly(commandline_args.level())?; info!("this quiz's description: {}", qq.quiz_description()?); println!( "{}", qq.use_fmt_temp( commandline_args.template(), commandline_args.if_show_code_snippet() )? ); qq }; // show code snippet print_code_snippet(commandline_args.if_show_code_snippet(), &qq)?; } false => { // try id first if let Some(ref id) = commandline_args.quiz_id() { let qq = Quiz::get_by_id(*id)?; info!("this quiz's description: {}", qq.quiz_description()?); println!( "{}", qq.use_fmt_temp( commandline_args.template(), commandline_args.if_show_code_snippet() )? ); // show code snippet print_code_snippet(commandline_args.if_show_code_snippet(), &qq)?; return Ok(()); } // try name then if let Some(ref name) = commandline_args.name() { let qq = Quiz::get_by_name(name)?; info!("this quiz's description: {}", qq.quiz_description()?); println!( "{}", qq.use_fmt_temp( commandline_args.template(), commandline_args.if_show_code_snippet() )? ); // show code snippet print_code_snippet(commandline_args.if_show_code_snippet(), &qq)?; return Ok(()); } println!("If it is not random, need more info. Check -h") } } Ok(()) }
use rustc_serialize::json; use super::characters::player::Player; use super::characters::Direction; use super::locations::Coordinates; use std::io::prelude::*; use std::net::{Shutdown, TcpListener, TcpStream}; use std::thread; #[derive(RustcDecodable, RustcEncodable)] struct NetworkPlayer { coordinates: Coordinates, direction: u8, username: String, } #[derive(RustcDecodable, RustcEncodable)] struct NetworkData { ip: String, player: NetworkPlayer, } type PlayerList = Vec<NetworkPlayer>; pub fn start_server() { // Use a thread for non-blocking. thread::spawn(move || { let listener: TcpListener = match TcpListener::bind(super::CLIENT_IP) { Ok(listener) => listener, Err(_) => panic!("Couldn't prepare for receiving data from server"), }; // Handle each request individually. This should not be non-blocking. for stream in listener.incoming() { match stream { Ok(mut stream) => { // Read the contents from the incoming stream. let mut data: String = String::new(); let _ = stream.read_to_string(&mut data); // Decode the stream JSON into a NetworkPlayer if possible. let players: PlayerList = match json::decode(&data[..]) { Ok(val) => val, Err(why) => { println!("Error decoding server req: {}", why); continue; }, }; // Create a new array of other players by defaulting it to // an array of 255 sets of 3 i32's. let mut new_array: [[i32; 3]; 255] = [[-1i32; 3]; 255]; let mut done: usize = 0; for player in &players { let x: i32 = player.coordinates.x as i32; let y: i32 = player.coordinates.y as i32; let d: i32 = player.direction as i32; new_array[done] = [x.clone(), y.clone(), d.clone()]; done += 1; } unsafe { super::OTHER_PLAYERS = new_array; } }, Err(e) => panic!("Err: {}", e), } } }); } pub fn update(player: &Player) { // Start a stream to the server. If one can't be started, then enter in // "offline mode". let mut stream: TcpStream = match TcpStream::connect(super::SERVER_IP) { Ok(stream) => stream, Err(_) => { println!("Couldn't connect to server: {}", super::SERVER_IP); return; }, }; let player_send: NetworkData = NetworkData { ip: String::from(super::CLIENT_IP), player: NetworkPlayer { coordinates: c![player.coordinates.x, player.coordinates.y], direction: match player.direction { Direction::South => 0, Direction::West => 1, Direction::North => 2, Direction::East => 3, }, username: String::from("player2"), }, }; let encoded: String = json::encode(&player_send).unwrap(); let _ = stream.write_all(encoded.as_bytes()); // Shutdown the stream once the data has been sent. let _ = stream.shutdown(Shutdown::Both); }
use std::ffi::CString; use std::os::raw::{c_char, c_int}; #[no_mangle] pub unsafe extern "C" fn say_hello() -> *const c_char { CString::new("hello").unwrap().into_raw() } #[no_mangle] pub unsafe extern "C" fn one() -> c_int { 1 }
use super::super::module_a;
#[test] fn cli_tests() { let t = trycmd::TestCases::new(); t.default_bin_name("maturin"); t.case("tests/cmd/*.toml"); #[cfg(not(feature = "upload"))] { t.skip("tests/cmd/upload.toml"); t.skip("tests/cmd/publish.toml"); } #[cfg(not(feature = "zig"))] { t.skip("tests/cmd/build.toml"); } #[cfg(not(feature = "scaffolding"))] { t.skip("tests/cmd/new.toml"); t.skip("tests/cmd/init.toml"); t.skip("tests/cmd/generate-ci.toml"); } #[cfg(not(all(feature = "upload", feature = "zig", feature = "scaffolding")))] { t.skip("tests/cmd/maturin.toml"); } }
use super::*; #[test] fn TYA_test() { let mut registers = Registers::new(); let opeland = 0x10; registers.Y = opeland; Calculator::TYA(&mut registers); assert_eq!(registers.A, opeland); assert_eq!(registers.P.negative, false); assert_eq!(registers.P.zero, false); } #[test] fn TYA_update_zero_test() { let mut registers = Registers::new(); let opeland = 0x00; // Zero operand registers.Y = opeland; Calculator::TYA(&mut registers); assert_eq!(registers.A, opeland); assert_eq!(registers.P.negative, false); assert_eq!(registers.P.zero, true); } #[test] fn TYA_update_negative_test() { let mut registers = Registers::new(); let opeland = 0x90; // Nagative opeland(over than 0x80) registers.Y = opeland; Calculator::TYA(&mut registers); assert_eq!(registers.A, opeland); assert_eq!(registers.P.negative, true); assert_eq!(registers.P.zero, false); }
use enums::networks::Network; use std::sync::Mutex; lazy_static! { static ref NETWORK: Mutex<Network> = { Mutex::new(Network::Mainnet) }; } pub fn set(network: Network) { *NETWORK.lock().unwrap() = network; } pub fn get() -> Network { (*NETWORK.lock().unwrap()).clone() } #[cfg(test)] mod tests { use super::*; #[test] fn get_network() { assert_eq!(get(), Network::Mainnet); } #[test] fn set_network() { set(Network::Devnet); assert_eq!(get(), Network::Devnet); set(Network::Mainnet); } }
/// An enum to represent all characters in the CyrillicExtendedB block. #[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)] pub enum CyrillicExtendedB { /// \u{a640}: 'Ꙁ' CyrillicCapitalLetterZemlya, /// \u{a641}: 'ꙁ' CyrillicSmallLetterZemlya, /// \u{a642}: 'Ꙃ' CyrillicCapitalLetterDzelo, /// \u{a643}: 'ꙃ' CyrillicSmallLetterDzelo, /// \u{a644}: 'Ꙅ' CyrillicCapitalLetterReversedDze, /// \u{a645}: 'ꙅ' CyrillicSmallLetterReversedDze, /// \u{a646}: 'Ꙇ' CyrillicCapitalLetterIota, /// \u{a647}: 'ꙇ' CyrillicSmallLetterIota, /// \u{a648}: 'Ꙉ' CyrillicCapitalLetterDjerv, /// \u{a649}: 'ꙉ' CyrillicSmallLetterDjerv, /// \u{a64a}: 'Ꙋ' CyrillicCapitalLetterMonographUk, /// \u{a64b}: 'ꙋ' CyrillicSmallLetterMonographUk, /// \u{a64c}: 'Ꙍ' CyrillicCapitalLetterBroadOmega, /// \u{a64d}: 'ꙍ' CyrillicSmallLetterBroadOmega, /// \u{a64e}: 'Ꙏ' CyrillicCapitalLetterNeutralYer, /// \u{a64f}: 'ꙏ' CyrillicSmallLetterNeutralYer, /// \u{a650}: 'Ꙑ' CyrillicCapitalLetterYeruWithBackYer, /// \u{a651}: 'ꙑ' CyrillicSmallLetterYeruWithBackYer, /// \u{a652}: 'Ꙓ' CyrillicCapitalLetterIotifiedYat, /// \u{a653}: 'ꙓ' CyrillicSmallLetterIotifiedYat, /// \u{a654}: 'Ꙕ' CyrillicCapitalLetterReversedYu, /// \u{a655}: 'ꙕ' CyrillicSmallLetterReversedYu, /// \u{a656}: 'Ꙗ' CyrillicCapitalLetterIotifiedA, /// \u{a657}: 'ꙗ' CyrillicSmallLetterIotifiedA, /// \u{a658}: 'Ꙙ' CyrillicCapitalLetterClosedLittleYus, /// \u{a659}: 'ꙙ' CyrillicSmallLetterClosedLittleYus, /// \u{a65a}: 'Ꙛ' CyrillicCapitalLetterBlendedYus, /// \u{a65b}: 'ꙛ' CyrillicSmallLetterBlendedYus, /// \u{a65c}: 'Ꙝ' CyrillicCapitalLetterIotifiedClosedLittleYus, /// \u{a65d}: 'ꙝ' CyrillicSmallLetterIotifiedClosedLittleYus, /// \u{a65e}: 'Ꙟ' CyrillicCapitalLetterYn, /// \u{a65f}: 'ꙟ' CyrillicSmallLetterYn, /// \u{a660}: 'Ꙡ' CyrillicCapitalLetterReversedTse, /// \u{a661}: 'ꙡ' CyrillicSmallLetterReversedTse, /// \u{a662}: 'Ꙣ' CyrillicCapitalLetterSoftDe, /// \u{a663}: 'ꙣ' CyrillicSmallLetterSoftDe, /// \u{a664}: 'Ꙥ' CyrillicCapitalLetterSoftEl, /// \u{a665}: 'ꙥ' CyrillicSmallLetterSoftEl, /// \u{a666}: 'Ꙧ' CyrillicCapitalLetterSoftEm, /// \u{a667}: 'ꙧ' CyrillicSmallLetterSoftEm, /// \u{a668}: 'Ꙩ' CyrillicCapitalLetterMonocularO, /// \u{a669}: 'ꙩ' CyrillicSmallLetterMonocularO, /// \u{a66a}: 'Ꙫ' CyrillicCapitalLetterBinocularO, /// \u{a66b}: 'ꙫ' CyrillicSmallLetterBinocularO, /// \u{a66c}: 'Ꙭ' CyrillicCapitalLetterDoubleMonocularO, /// \u{a66d}: 'ꙭ' CyrillicSmallLetterDoubleMonocularO, /// \u{a66e}: 'ꙮ' CyrillicLetterMultiocularO, /// \u{a66f}: '꙯' CombiningCyrillicVzmet, /// \u{a670}: '꙰' CombiningCyrillicTenMillionsSign, /// \u{a671}: '꙱' CombiningCyrillicHundredMillionsSign, /// \u{a672}: '꙲' CombiningCyrillicThousandMillionsSign, /// \u{a673}: '꙳' SlavonicAsterisk, /// \u{a674}: 'ꙴ' CombiningCyrillicLetterUkrainianIe, /// \u{a675}: 'ꙵ' CombiningCyrillicLetterI, /// \u{a676}: 'ꙶ' CombiningCyrillicLetterYi, /// \u{a677}: 'ꙷ' CombiningCyrillicLetterU, /// \u{a678}: 'ꙸ' CombiningCyrillicLetterHardSign, /// \u{a679}: 'ꙹ' CombiningCyrillicLetterYeru, /// \u{a67a}: 'ꙺ' CombiningCyrillicLetterSoftSign, /// \u{a67b}: 'ꙻ' CombiningCyrillicLetterOmega, /// \u{a67c}: '꙼' CombiningCyrillicKavyka, /// \u{a67d}: '꙽' CombiningCyrillicPayerok, /// \u{a67e}: '꙾' CyrillicKavyka, /// \u{a67f}: 'ꙿ' CyrillicPayerok, /// \u{a680}: 'Ꚁ' CyrillicCapitalLetterDwe, /// \u{a681}: 'ꚁ' CyrillicSmallLetterDwe, /// \u{a682}: 'Ꚃ' CyrillicCapitalLetterDzwe, /// \u{a683}: 'ꚃ' CyrillicSmallLetterDzwe, /// \u{a684}: 'Ꚅ' CyrillicCapitalLetterZhwe, /// \u{a685}: 'ꚅ' CyrillicSmallLetterZhwe, /// \u{a686}: 'Ꚇ' CyrillicCapitalLetterCche, /// \u{a687}: 'ꚇ' CyrillicSmallLetterCche, /// \u{a688}: 'Ꚉ' CyrillicCapitalLetterDzze, /// \u{a689}: 'ꚉ' CyrillicSmallLetterDzze, /// \u{a68a}: 'Ꚋ' CyrillicCapitalLetterTeWithMiddleHook, /// \u{a68b}: 'ꚋ' CyrillicSmallLetterTeWithMiddleHook, /// \u{a68c}: 'Ꚍ' CyrillicCapitalLetterTwe, /// \u{a68d}: 'ꚍ' CyrillicSmallLetterTwe, /// \u{a68e}: 'Ꚏ' CyrillicCapitalLetterTswe, /// \u{a68f}: 'ꚏ' CyrillicSmallLetterTswe, /// \u{a690}: 'Ꚑ' CyrillicCapitalLetterTsse, /// \u{a691}: 'ꚑ' CyrillicSmallLetterTsse, /// \u{a692}: 'Ꚓ' CyrillicCapitalLetterTche, /// \u{a693}: 'ꚓ' CyrillicSmallLetterTche, /// \u{a694}: 'Ꚕ' CyrillicCapitalLetterHwe, /// \u{a695}: 'ꚕ' CyrillicSmallLetterHwe, /// \u{a696}: 'Ꚗ' CyrillicCapitalLetterShwe, /// \u{a697}: 'ꚗ' CyrillicSmallLetterShwe, /// \u{a698}: 'Ꚙ' CyrillicCapitalLetterDoubleO, /// \u{a699}: 'ꚙ' CyrillicSmallLetterDoubleO, /// \u{a69a}: 'Ꚛ' CyrillicCapitalLetterCrossedO, /// \u{a69b}: 'ꚛ' CyrillicSmallLetterCrossedO, /// \u{a69c}: 'ꚜ' ModifierLetterCyrillicHardSign, /// \u{a69d}: 'ꚝ' ModifierLetterCyrillicSoftSign, /// \u{a69e}: 'ꚞ' CombiningCyrillicLetterEf, } impl Into<char> for CyrillicExtendedB { fn into(self) -> char { match self { CyrillicExtendedB::CyrillicCapitalLetterZemlya => 'Ꙁ', CyrillicExtendedB::CyrillicSmallLetterZemlya => 'ꙁ', CyrillicExtendedB::CyrillicCapitalLetterDzelo => 'Ꙃ', CyrillicExtendedB::CyrillicSmallLetterDzelo => 'ꙃ', CyrillicExtendedB::CyrillicCapitalLetterReversedDze => 'Ꙅ', CyrillicExtendedB::CyrillicSmallLetterReversedDze => 'ꙅ', CyrillicExtendedB::CyrillicCapitalLetterIota => 'Ꙇ', CyrillicExtendedB::CyrillicSmallLetterIota => 'ꙇ', CyrillicExtendedB::CyrillicCapitalLetterDjerv => 'Ꙉ', CyrillicExtendedB::CyrillicSmallLetterDjerv => 'ꙉ', CyrillicExtendedB::CyrillicCapitalLetterMonographUk => 'Ꙋ', CyrillicExtendedB::CyrillicSmallLetterMonographUk => 'ꙋ', CyrillicExtendedB::CyrillicCapitalLetterBroadOmega => 'Ꙍ', CyrillicExtendedB::CyrillicSmallLetterBroadOmega => 'ꙍ', CyrillicExtendedB::CyrillicCapitalLetterNeutralYer => 'Ꙏ', CyrillicExtendedB::CyrillicSmallLetterNeutralYer => 'ꙏ', CyrillicExtendedB::CyrillicCapitalLetterYeruWithBackYer => 'Ꙑ', CyrillicExtendedB::CyrillicSmallLetterYeruWithBackYer => 'ꙑ', CyrillicExtendedB::CyrillicCapitalLetterIotifiedYat => 'Ꙓ', CyrillicExtendedB::CyrillicSmallLetterIotifiedYat => 'ꙓ', CyrillicExtendedB::CyrillicCapitalLetterReversedYu => 'Ꙕ', CyrillicExtendedB::CyrillicSmallLetterReversedYu => 'ꙕ', CyrillicExtendedB::CyrillicCapitalLetterIotifiedA => 'Ꙗ', CyrillicExtendedB::CyrillicSmallLetterIotifiedA => 'ꙗ', CyrillicExtendedB::CyrillicCapitalLetterClosedLittleYus => 'Ꙙ', CyrillicExtendedB::CyrillicSmallLetterClosedLittleYus => 'ꙙ', CyrillicExtendedB::CyrillicCapitalLetterBlendedYus => 'Ꙛ', CyrillicExtendedB::CyrillicSmallLetterBlendedYus => 'ꙛ', CyrillicExtendedB::CyrillicCapitalLetterIotifiedClosedLittleYus => 'Ꙝ', CyrillicExtendedB::CyrillicSmallLetterIotifiedClosedLittleYus => 'ꙝ', CyrillicExtendedB::CyrillicCapitalLetterYn => 'Ꙟ', CyrillicExtendedB::CyrillicSmallLetterYn => 'ꙟ', CyrillicExtendedB::CyrillicCapitalLetterReversedTse => 'Ꙡ', CyrillicExtendedB::CyrillicSmallLetterReversedTse => 'ꙡ', CyrillicExtendedB::CyrillicCapitalLetterSoftDe => 'Ꙣ', CyrillicExtendedB::CyrillicSmallLetterSoftDe => 'ꙣ', CyrillicExtendedB::CyrillicCapitalLetterSoftEl => 'Ꙥ', CyrillicExtendedB::CyrillicSmallLetterSoftEl => 'ꙥ', CyrillicExtendedB::CyrillicCapitalLetterSoftEm => 'Ꙧ', CyrillicExtendedB::CyrillicSmallLetterSoftEm => 'ꙧ', CyrillicExtendedB::CyrillicCapitalLetterMonocularO => 'Ꙩ', CyrillicExtendedB::CyrillicSmallLetterMonocularO => 'ꙩ', CyrillicExtendedB::CyrillicCapitalLetterBinocularO => 'Ꙫ', CyrillicExtendedB::CyrillicSmallLetterBinocularO => 'ꙫ', CyrillicExtendedB::CyrillicCapitalLetterDoubleMonocularO => 'Ꙭ', CyrillicExtendedB::CyrillicSmallLetterDoubleMonocularO => 'ꙭ', CyrillicExtendedB::CyrillicLetterMultiocularO => 'ꙮ', CyrillicExtendedB::CombiningCyrillicVzmet => '꙯', CyrillicExtendedB::CombiningCyrillicTenMillionsSign => '꙰', CyrillicExtendedB::CombiningCyrillicHundredMillionsSign => '꙱', CyrillicExtendedB::CombiningCyrillicThousandMillionsSign => '꙲', CyrillicExtendedB::SlavonicAsterisk => '꙳', CyrillicExtendedB::CombiningCyrillicLetterUkrainianIe => 'ꙴ', CyrillicExtendedB::CombiningCyrillicLetterI => 'ꙵ', CyrillicExtendedB::CombiningCyrillicLetterYi => 'ꙶ', CyrillicExtendedB::CombiningCyrillicLetterU => 'ꙷ', CyrillicExtendedB::CombiningCyrillicLetterHardSign => 'ꙸ', CyrillicExtendedB::CombiningCyrillicLetterYeru => 'ꙹ', CyrillicExtendedB::CombiningCyrillicLetterSoftSign => 'ꙺ', CyrillicExtendedB::CombiningCyrillicLetterOmega => 'ꙻ', CyrillicExtendedB::CombiningCyrillicKavyka => '꙼', CyrillicExtendedB::CombiningCyrillicPayerok => '꙽', CyrillicExtendedB::CyrillicKavyka => '꙾', CyrillicExtendedB::CyrillicPayerok => 'ꙿ', CyrillicExtendedB::CyrillicCapitalLetterDwe => 'Ꚁ', CyrillicExtendedB::CyrillicSmallLetterDwe => 'ꚁ', CyrillicExtendedB::CyrillicCapitalLetterDzwe => 'Ꚃ', CyrillicExtendedB::CyrillicSmallLetterDzwe => 'ꚃ', CyrillicExtendedB::CyrillicCapitalLetterZhwe => 'Ꚅ', CyrillicExtendedB::CyrillicSmallLetterZhwe => 'ꚅ', CyrillicExtendedB::CyrillicCapitalLetterCche => 'Ꚇ', CyrillicExtendedB::CyrillicSmallLetterCche => 'ꚇ', CyrillicExtendedB::CyrillicCapitalLetterDzze => 'Ꚉ', CyrillicExtendedB::CyrillicSmallLetterDzze => 'ꚉ', CyrillicExtendedB::CyrillicCapitalLetterTeWithMiddleHook => 'Ꚋ', CyrillicExtendedB::CyrillicSmallLetterTeWithMiddleHook => 'ꚋ', CyrillicExtendedB::CyrillicCapitalLetterTwe => 'Ꚍ', CyrillicExtendedB::CyrillicSmallLetterTwe => 'ꚍ', CyrillicExtendedB::CyrillicCapitalLetterTswe => 'Ꚏ', CyrillicExtendedB::CyrillicSmallLetterTswe => 'ꚏ', CyrillicExtendedB::CyrillicCapitalLetterTsse => 'Ꚑ', CyrillicExtendedB::CyrillicSmallLetterTsse => 'ꚑ', CyrillicExtendedB::CyrillicCapitalLetterTche => 'Ꚓ', CyrillicExtendedB::CyrillicSmallLetterTche => 'ꚓ', CyrillicExtendedB::CyrillicCapitalLetterHwe => 'Ꚕ', CyrillicExtendedB::CyrillicSmallLetterHwe => 'ꚕ', CyrillicExtendedB::CyrillicCapitalLetterShwe => 'Ꚗ', CyrillicExtendedB::CyrillicSmallLetterShwe => 'ꚗ', CyrillicExtendedB::CyrillicCapitalLetterDoubleO => 'Ꚙ', CyrillicExtendedB::CyrillicSmallLetterDoubleO => 'ꚙ', CyrillicExtendedB::CyrillicCapitalLetterCrossedO => 'Ꚛ', CyrillicExtendedB::CyrillicSmallLetterCrossedO => 'ꚛ', CyrillicExtendedB::ModifierLetterCyrillicHardSign => 'ꚜ', CyrillicExtendedB::ModifierLetterCyrillicSoftSign => 'ꚝ', CyrillicExtendedB::CombiningCyrillicLetterEf => 'ꚞ', } } } impl std::convert::TryFrom<char> for CyrillicExtendedB { type Error = (); fn try_from(c: char) -> Result<Self, Self::Error> { match c { 'Ꙁ' => Ok(CyrillicExtendedB::CyrillicCapitalLetterZemlya), 'ꙁ' => Ok(CyrillicExtendedB::CyrillicSmallLetterZemlya), 'Ꙃ' => Ok(CyrillicExtendedB::CyrillicCapitalLetterDzelo), 'ꙃ' => Ok(CyrillicExtendedB::CyrillicSmallLetterDzelo), 'Ꙅ' => Ok(CyrillicExtendedB::CyrillicCapitalLetterReversedDze), 'ꙅ' => Ok(CyrillicExtendedB::CyrillicSmallLetterReversedDze), 'Ꙇ' => Ok(CyrillicExtendedB::CyrillicCapitalLetterIota), 'ꙇ' => Ok(CyrillicExtendedB::CyrillicSmallLetterIota), 'Ꙉ' => Ok(CyrillicExtendedB::CyrillicCapitalLetterDjerv), 'ꙉ' => Ok(CyrillicExtendedB::CyrillicSmallLetterDjerv), 'Ꙋ' => Ok(CyrillicExtendedB::CyrillicCapitalLetterMonographUk), 'ꙋ' => Ok(CyrillicExtendedB::CyrillicSmallLetterMonographUk), 'Ꙍ' => Ok(CyrillicExtendedB::CyrillicCapitalLetterBroadOmega), 'ꙍ' => Ok(CyrillicExtendedB::CyrillicSmallLetterBroadOmega), 'Ꙏ' => Ok(CyrillicExtendedB::CyrillicCapitalLetterNeutralYer), 'ꙏ' => Ok(CyrillicExtendedB::CyrillicSmallLetterNeutralYer), 'Ꙑ' => Ok(CyrillicExtendedB::CyrillicCapitalLetterYeruWithBackYer), 'ꙑ' => Ok(CyrillicExtendedB::CyrillicSmallLetterYeruWithBackYer), 'Ꙓ' => Ok(CyrillicExtendedB::CyrillicCapitalLetterIotifiedYat), 'ꙓ' => Ok(CyrillicExtendedB::CyrillicSmallLetterIotifiedYat), 'Ꙕ' => Ok(CyrillicExtendedB::CyrillicCapitalLetterReversedYu), 'ꙕ' => Ok(CyrillicExtendedB::CyrillicSmallLetterReversedYu), 'Ꙗ' => Ok(CyrillicExtendedB::CyrillicCapitalLetterIotifiedA), 'ꙗ' => Ok(CyrillicExtendedB::CyrillicSmallLetterIotifiedA), 'Ꙙ' => Ok(CyrillicExtendedB::CyrillicCapitalLetterClosedLittleYus), 'ꙙ' => Ok(CyrillicExtendedB::CyrillicSmallLetterClosedLittleYus), 'Ꙛ' => Ok(CyrillicExtendedB::CyrillicCapitalLetterBlendedYus), 'ꙛ' => Ok(CyrillicExtendedB::CyrillicSmallLetterBlendedYus), 'Ꙝ' => Ok(CyrillicExtendedB::CyrillicCapitalLetterIotifiedClosedLittleYus), 'ꙝ' => Ok(CyrillicExtendedB::CyrillicSmallLetterIotifiedClosedLittleYus), 'Ꙟ' => Ok(CyrillicExtendedB::CyrillicCapitalLetterYn), 'ꙟ' => Ok(CyrillicExtendedB::CyrillicSmallLetterYn), 'Ꙡ' => Ok(CyrillicExtendedB::CyrillicCapitalLetterReversedTse), 'ꙡ' => Ok(CyrillicExtendedB::CyrillicSmallLetterReversedTse), 'Ꙣ' => Ok(CyrillicExtendedB::CyrillicCapitalLetterSoftDe), 'ꙣ' => Ok(CyrillicExtendedB::CyrillicSmallLetterSoftDe), 'Ꙥ' => Ok(CyrillicExtendedB::CyrillicCapitalLetterSoftEl), 'ꙥ' => Ok(CyrillicExtendedB::CyrillicSmallLetterSoftEl), 'Ꙧ' => Ok(CyrillicExtendedB::CyrillicCapitalLetterSoftEm), 'ꙧ' => Ok(CyrillicExtendedB::CyrillicSmallLetterSoftEm), 'Ꙩ' => Ok(CyrillicExtendedB::CyrillicCapitalLetterMonocularO), 'ꙩ' => Ok(CyrillicExtendedB::CyrillicSmallLetterMonocularO), 'Ꙫ' => Ok(CyrillicExtendedB::CyrillicCapitalLetterBinocularO), 'ꙫ' => Ok(CyrillicExtendedB::CyrillicSmallLetterBinocularO), 'Ꙭ' => Ok(CyrillicExtendedB::CyrillicCapitalLetterDoubleMonocularO), 'ꙭ' => Ok(CyrillicExtendedB::CyrillicSmallLetterDoubleMonocularO), 'ꙮ' => Ok(CyrillicExtendedB::CyrillicLetterMultiocularO), '꙯' => Ok(CyrillicExtendedB::CombiningCyrillicVzmet), '꙰' => Ok(CyrillicExtendedB::CombiningCyrillicTenMillionsSign), '꙱' => Ok(CyrillicExtendedB::CombiningCyrillicHundredMillionsSign), '꙲' => Ok(CyrillicExtendedB::CombiningCyrillicThousandMillionsSign), '꙳' => Ok(CyrillicExtendedB::SlavonicAsterisk), 'ꙴ' => Ok(CyrillicExtendedB::CombiningCyrillicLetterUkrainianIe), 'ꙵ' => Ok(CyrillicExtendedB::CombiningCyrillicLetterI), 'ꙶ' => Ok(CyrillicExtendedB::CombiningCyrillicLetterYi), 'ꙷ' => Ok(CyrillicExtendedB::CombiningCyrillicLetterU), 'ꙸ' => Ok(CyrillicExtendedB::CombiningCyrillicLetterHardSign), 'ꙹ' => Ok(CyrillicExtendedB::CombiningCyrillicLetterYeru), 'ꙺ' => Ok(CyrillicExtendedB::CombiningCyrillicLetterSoftSign), 'ꙻ' => Ok(CyrillicExtendedB::CombiningCyrillicLetterOmega), '꙼' => Ok(CyrillicExtendedB::CombiningCyrillicKavyka), '꙽' => Ok(CyrillicExtendedB::CombiningCyrillicPayerok), '꙾' => Ok(CyrillicExtendedB::CyrillicKavyka), 'ꙿ' => Ok(CyrillicExtendedB::CyrillicPayerok), 'Ꚁ' => Ok(CyrillicExtendedB::CyrillicCapitalLetterDwe), 'ꚁ' => Ok(CyrillicExtendedB::CyrillicSmallLetterDwe), 'Ꚃ' => Ok(CyrillicExtendedB::CyrillicCapitalLetterDzwe), 'ꚃ' => Ok(CyrillicExtendedB::CyrillicSmallLetterDzwe), 'Ꚅ' => Ok(CyrillicExtendedB::CyrillicCapitalLetterZhwe), 'ꚅ' => Ok(CyrillicExtendedB::CyrillicSmallLetterZhwe), 'Ꚇ' => Ok(CyrillicExtendedB::CyrillicCapitalLetterCche), 'ꚇ' => Ok(CyrillicExtendedB::CyrillicSmallLetterCche), 'Ꚉ' => Ok(CyrillicExtendedB::CyrillicCapitalLetterDzze), 'ꚉ' => Ok(CyrillicExtendedB::CyrillicSmallLetterDzze), 'Ꚋ' => Ok(CyrillicExtendedB::CyrillicCapitalLetterTeWithMiddleHook), 'ꚋ' => Ok(CyrillicExtendedB::CyrillicSmallLetterTeWithMiddleHook), 'Ꚍ' => Ok(CyrillicExtendedB::CyrillicCapitalLetterTwe), 'ꚍ' => Ok(CyrillicExtendedB::CyrillicSmallLetterTwe), 'Ꚏ' => Ok(CyrillicExtendedB::CyrillicCapitalLetterTswe), 'ꚏ' => Ok(CyrillicExtendedB::CyrillicSmallLetterTswe), 'Ꚑ' => Ok(CyrillicExtendedB::CyrillicCapitalLetterTsse), 'ꚑ' => Ok(CyrillicExtendedB::CyrillicSmallLetterTsse), 'Ꚓ' => Ok(CyrillicExtendedB::CyrillicCapitalLetterTche), 'ꚓ' => Ok(CyrillicExtendedB::CyrillicSmallLetterTche), 'Ꚕ' => Ok(CyrillicExtendedB::CyrillicCapitalLetterHwe), 'ꚕ' => Ok(CyrillicExtendedB::CyrillicSmallLetterHwe), 'Ꚗ' => Ok(CyrillicExtendedB::CyrillicCapitalLetterShwe), 'ꚗ' => Ok(CyrillicExtendedB::CyrillicSmallLetterShwe), 'Ꚙ' => Ok(CyrillicExtendedB::CyrillicCapitalLetterDoubleO), 'ꚙ' => Ok(CyrillicExtendedB::CyrillicSmallLetterDoubleO), 'Ꚛ' => Ok(CyrillicExtendedB::CyrillicCapitalLetterCrossedO), 'ꚛ' => Ok(CyrillicExtendedB::CyrillicSmallLetterCrossedO), 'ꚜ' => Ok(CyrillicExtendedB::ModifierLetterCyrillicHardSign), 'ꚝ' => Ok(CyrillicExtendedB::ModifierLetterCyrillicSoftSign), 'ꚞ' => Ok(CyrillicExtendedB::CombiningCyrillicLetterEf), _ => Err(()), } } } impl Into<u32> for CyrillicExtendedB { fn into(self) -> u32 { let c: char = self.into(); let hex = c .escape_unicode() .to_string() .replace("\\u{", "") .replace("}", ""); u32::from_str_radix(&hex, 16).unwrap() } } impl std::convert::TryFrom<u32> for CyrillicExtendedB { type Error = (); fn try_from(u: u32) -> Result<Self, Self::Error> { if let Ok(c) = char::try_from(u) { Self::try_from(c) } else { Err(()) } } } impl Iterator for CyrillicExtendedB { type Item = Self; fn next(&mut self) -> Option<Self> { let index: u32 = (*self).into(); use std::convert::TryFrom; Self::try_from(index + 1).ok() } } impl CyrillicExtendedB { /// The character with the lowest index in this unicode block pub fn new() -> Self { CyrillicExtendedB::CyrillicCapitalLetterZemlya } /// The character's name, in sentence case pub fn name(&self) -> String { let s = std::format!("CyrillicExtendedB{:#?}", self); string_morph::to_sentence_case(&s) } }
use std::fs::File; use std::io::Read; use std::path::Path; use rust_crypto; use rust_crypto::mac::Mac; use rust_crypto::symmetriccipher::BlockDecryptor; use ::misc::*; use ::zbackup::data::*; use ::zbackup::disk_format::*; /// This implements the decryption and verification of a ZBackup encryption key, /// from the `EncryptionKeyInfo` and the password file. This will normally be /// called automatically when constructing a `Repository`, but it is made public /// because it may be useful in some cases. #[ inline ] pub fn decrypt_key < PasswordFilePath: AsRef <Path>, > ( password_file_path: PasswordFilePath, encryption_key: DiskEncryptionKeyInfoRef, ) -> Result <Option <EncryptionKey>, String> { decrypt_key_impl ( password_file_path.as_ref (), encryption_key, ) } fn decrypt_key_impl ( password_file_path: & Path, encryption_key_info: DiskEncryptionKeyInfoRef, ) -> Result <Option <EncryptionKey>, String> { // read password from file let mut password_file = io_result ( File::open ( password_file_path), ) ?; let mut password_string = String::new (); io_result ( password_file.read_to_string ( & mut password_string), ) ?; // remove trailing newline if password_string.ends_with ("\n") { let password_length = password_string.len (); password_string.truncate ( password_length - 1); } let password_bytes = password_string.as_bytes (); // derive password key from password let mut password_hmac = rust_crypto::hmac::Hmac::new ( rust_crypto::sha1::Sha1::new (), password_bytes); let mut password_result = [0u8; KEY_SIZE]; rust_crypto::pbkdf2::pbkdf2 ( & mut password_hmac, encryption_key_info.salt (), encryption_key_info.rounds (), & mut password_result); // decrypt actual key using password key let key_decryptor = rust_crypto::aessafe::AesSafe128Decryptor::new ( & password_result); let mut key_result = [0u8; KEY_SIZE]; key_decryptor.decrypt_block ( & encryption_key_info.encrypted_key (), & mut key_result); // derive check result to verify password let mut check_hmac = rust_crypto::hmac::Hmac::new ( rust_crypto::sha1::Sha1::new (), & key_result); check_hmac.input ( encryption_key_info.key_check_input ()); let mut check_result = [0u8; HMAC_SIZE]; check_hmac.raw_result ( & mut check_result); // return if check_result == encryption_key_info.key_check_hmac () { Ok (Some ( key_result )) } else { Ok (None) } } // ex: noet ts=4 filetype=rust
extern crate clap; extern crate external_sort; extern crate fine_grained; extern crate rand; use std::fs::File; use std::io::{Write, BufWriter}; use clap::{Arg, App}; use rand::{Rng, SeedableRng, XorShiftRng}; use rand::os::OsRng; use rand::distributions::{IndependentSample, Range}; use external_sort::conversions::{to_usize_saturating, to_u64_panicking}; const LINE_LENGTH_WITH_NEWLINE : usize = 128; const MEGABYTE_SIZE : u64 = 1024 * 1024; fn main() { let matches = App::new("generate") .version("0.1.0") .author("Drake Tetreault <drakeat@amazon.com>") .about("Generates test files for CodeDeploy reading group project 1") .arg(Arg::with_name("size") .short("s") .long("size") .value_name("SIZE") .help("Size of the output to generate in megabytes; defaults to 1024 if not provided") .takes_value(true)) .arg(Arg::with_name("destination") .short("d") .long("destination") .value_name("OUTPUT_PATH") .help("Path to the output file; if not present then the generated file will be written to standard output") .takes_value(true)) .get_matches(); let size_in_megabytes : u64 = matches.value_of("size").map_or(1024u64, |size| { size.parse::<u64>().expect("Invalid size value") }); if let Some(destination) = matches.value_of("destination") { let mut file = File::create(destination).expect("Unable to create destination file"); generate(size_in_megabytes, &mut file); } else { let mut stdout = ::std::io::stdout(); let mut stdout_lock = stdout.lock(); generate(size_in_megabytes, &mut stdout_lock); } } fn generate<TWrite : Write>(size_in_megabytes: u64, writer: &mut TWrite) { let mut writer = BufWriter::with_capacity(to_usize_saturating(MEGABYTE_SIZE), writer); let mut rng = create_prng(); let mut line = create_blank_line_with_newline(); let letter_range = Range::new('a' as u8, 'z' as u8 + 1); let stopwatch = fine_grained::Stopwatch::start_new(); let lines_needed = size_in_megabytes * MEGABYTE_SIZE / to_u64_panicking(LINE_LENGTH_WITH_NEWLINE); for _ in 0..lines_needed { fill_line(&mut line[0..LINE_LENGTH_WITH_NEWLINE - 1], &mut rng, &letter_range); writer.write(&line).expect("Failed to write line"); } writeln!(std::io::stderr(), "Elapsed time: {duration}", duration = stopwatch).unwrap(); } fn create_prng() -> XorShiftRng { let mut seeding_rng = OsRng::new().expect("Failed to get initial random entropy from OS source"); let seed = [ seeding_rng.next_u32(), seeding_rng.next_u32(), seeding_rng.next_u32(), seeding_rng.next_u32() ]; XorShiftRng::from_seed(seed) } fn create_blank_line_with_newline() -> [u8; LINE_LENGTH_WITH_NEWLINE] { let mut line = ['!' as u8; LINE_LENGTH_WITH_NEWLINE]; line[LINE_LENGTH_WITH_NEWLINE - 1] = '\n' as u8; line } fn fill_line<TRng: Rng>(line: &mut [u8], rng: &mut TRng, range: &Range<u8>) { for entry in line.iter_mut() { *entry = range.ind_sample(rng); } }
#[doc = "Reader of register RCC_BR_RSTSCLRR"] pub type R = crate::R<u32, super::RCC_BR_RSTSCLRR>; #[doc = "Writer for register RCC_BR_RSTSCLRR"] pub type W = crate::W<u32, super::RCC_BR_RSTSCLRR>; #[doc = "Register RCC_BR_RSTSCLRR `reset()`'s with value 0x15"] impl crate::ResetValue for super::RCC_BR_RSTSCLRR { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0x15 } } #[doc = "PORRSTF\n\nValue on reset: 1"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum PORRSTF_A { #[doc = "0: Writing has no effect, reading means\r\n that no POR/PDR reset occurred"] B_0X0 = 0, #[doc = "1: Writing clears the PORRSTF flag,\r\n reading means that a POR/PDR reset occurred\r\n (default after por_rst reset)"] B_0X1 = 1, } impl From<PORRSTF_A> for bool { #[inline(always)] fn from(variant: PORRSTF_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `PORRSTF`"] pub type PORRSTF_R = crate::R<bool, PORRSTF_A>; impl PORRSTF_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> PORRSTF_A { match self.bits { false => PORRSTF_A::B_0X0, true => PORRSTF_A::B_0X1, } } #[doc = "Checks if the value of the field is `B_0X0`"] #[inline(always)] pub fn is_b_0x0(&self) -> bool { *self == PORRSTF_A::B_0X0 } #[doc = "Checks if the value of the field is `B_0X1`"] #[inline(always)] pub fn is_b_0x1(&self) -> bool { *self == PORRSTF_A::B_0X1 } } #[doc = "Write proxy for field `PORRSTF`"] pub struct PORRSTF_W<'a> { w: &'a mut W, } impl<'a> PORRSTF_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: PORRSTF_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "Writing has no effect, reading means that no POR/PDR reset occurred"] #[inline(always)] pub fn b_0x0(self) -> &'a mut W { self.variant(PORRSTF_A::B_0X0) } #[doc = "Writing clears the PORRSTF flag, reading means that a POR/PDR reset occurred (default after por_rst reset)"] #[inline(always)] pub fn b_0x1(self) -> &'a mut W { self.variant(PORRSTF_A::B_0X1) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } #[doc = "BORRSTF\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum BORRSTF_A { #[doc = "0: Writing has no effect, reading means\r\n that no BOR reset occurred (default after por_rst\r\n reset)"] B_0X0 = 0, #[doc = "1: Writing clears the BORRSTF flag,\r\n reading means that a BOR reset\r\n occurred"] B_0X1 = 1, } impl From<BORRSTF_A> for bool { #[inline(always)] fn from(variant: BORRSTF_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `BORRSTF`"] pub type BORRSTF_R = crate::R<bool, BORRSTF_A>; impl BORRSTF_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> BORRSTF_A { match self.bits { false => BORRSTF_A::B_0X0, true => BORRSTF_A::B_0X1, } } #[doc = "Checks if the value of the field is `B_0X0`"] #[inline(always)] pub fn is_b_0x0(&self) -> bool { *self == BORRSTF_A::B_0X0 } #[doc = "Checks if the value of the field is `B_0X1`"] #[inline(always)] pub fn is_b_0x1(&self) -> bool { *self == BORRSTF_A::B_0X1 } } #[doc = "Write proxy for field `BORRSTF`"] pub struct BORRSTF_W<'a> { w: &'a mut W, } impl<'a> BORRSTF_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: BORRSTF_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "Writing has no effect, reading means that no BOR reset occurred (default after por_rst reset)"] #[inline(always)] pub fn b_0x0(self) -> &'a mut W { self.variant(BORRSTF_A::B_0X0) } #[doc = "Writing clears the BORRSTF flag, reading means that a BOR reset occurred"] #[inline(always)] pub fn b_0x1(self) -> &'a mut W { self.variant(BORRSTF_A::B_0X1) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1); self.w } } #[doc = "PADRSTF\n\nValue on reset: 1"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum PADRSTF_A { #[doc = "0: Writing has no effect, reading means\r\n that no PAD reset occurred (default after por_rst\r\n reset)"] B_0X0 = 0, #[doc = "1: Writing clears the PADRSTF flag,\r\n reading means that a PAD reset\r\n occurred"] B_0X1 = 1, } impl From<PADRSTF_A> for bool { #[inline(always)] fn from(variant: PADRSTF_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `PADRSTF`"] pub type PADRSTF_R = crate::R<bool, PADRSTF_A>; impl PADRSTF_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> PADRSTF_A { match self.bits { false => PADRSTF_A::B_0X0, true => PADRSTF_A::B_0X1, } } #[doc = "Checks if the value of the field is `B_0X0`"] #[inline(always)] pub fn is_b_0x0(&self) -> bool { *self == PADRSTF_A::B_0X0 } #[doc = "Checks if the value of the field is `B_0X1`"] #[inline(always)] pub fn is_b_0x1(&self) -> bool { *self == PADRSTF_A::B_0X1 } } #[doc = "Write proxy for field `PADRSTF`"] pub struct PADRSTF_W<'a> { w: &'a mut W, } impl<'a> PADRSTF_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: PADRSTF_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "Writing has no effect, reading means that no PAD reset occurred (default after por_rst reset)"] #[inline(always)] pub fn b_0x0(self) -> &'a mut W { self.variant(PADRSTF_A::B_0X0) } #[doc = "Writing clears the PADRSTF flag, reading means that a PAD reset occurred"] #[inline(always)] pub fn b_0x1(self) -> &'a mut W { self.variant(PADRSTF_A::B_0X1) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2); self.w } } #[doc = "HCSSRSTF\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum HCSSRSTF_A { #[doc = "0: Writing has no effect, reading means\r\n that no HSE CSS reset occurred (default after\r\n por_rst reset)"] B_0X0 = 0, #[doc = "1: Writing clears the HCSSRSTF flag,\r\n reading means that a HSE CSS reset\r\n occurred"] B_0X1 = 1, } impl From<HCSSRSTF_A> for bool { #[inline(always)] fn from(variant: HCSSRSTF_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `HCSSRSTF`"] pub type HCSSRSTF_R = crate::R<bool, HCSSRSTF_A>; impl HCSSRSTF_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> HCSSRSTF_A { match self.bits { false => HCSSRSTF_A::B_0X0, true => HCSSRSTF_A::B_0X1, } } #[doc = "Checks if the value of the field is `B_0X0`"] #[inline(always)] pub fn is_b_0x0(&self) -> bool { *self == HCSSRSTF_A::B_0X0 } #[doc = "Checks if the value of the field is `B_0X1`"] #[inline(always)] pub fn is_b_0x1(&self) -> bool { *self == HCSSRSTF_A::B_0X1 } } #[doc = "Write proxy for field `HCSSRSTF`"] pub struct HCSSRSTF_W<'a> { w: &'a mut W, } impl<'a> HCSSRSTF_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: HCSSRSTF_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "Writing has no effect, reading means that no HSE CSS reset occurred (default after por_rst reset)"] #[inline(always)] pub fn b_0x0(self) -> &'a mut W { self.variant(HCSSRSTF_A::B_0X0) } #[doc = "Writing clears the HCSSRSTF flag, reading means that a HSE CSS reset occurred"] #[inline(always)] pub fn b_0x1(self) -> &'a mut W { self.variant(HCSSRSTF_A::B_0X1) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3); self.w } } #[doc = "VCORERSTF\n\nValue on reset: 1"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum VCORERSTF_A { #[doc = "0: Writing has no effect, reading means\r\n that VDD_CORE is not the origin of the\r\n reset"] B_0X0 = 0, #[doc = "1: Writing clears the VCORERSTF flag,\r\n reading means that VDD_CORE is the origin of the\r\n reset (default after por_rst reset)"] B_0X1 = 1, } impl From<VCORERSTF_A> for bool { #[inline(always)] fn from(variant: VCORERSTF_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `VCORERSTF`"] pub type VCORERSTF_R = crate::R<bool, VCORERSTF_A>; impl VCORERSTF_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> VCORERSTF_A { match self.bits { false => VCORERSTF_A::B_0X0, true => VCORERSTF_A::B_0X1, } } #[doc = "Checks if the value of the field is `B_0X0`"] #[inline(always)] pub fn is_b_0x0(&self) -> bool { *self == VCORERSTF_A::B_0X0 } #[doc = "Checks if the value of the field is `B_0X1`"] #[inline(always)] pub fn is_b_0x1(&self) -> bool { *self == VCORERSTF_A::B_0X1 } } #[doc = "Write proxy for field `VCORERSTF`"] pub struct VCORERSTF_W<'a> { w: &'a mut W, } impl<'a> VCORERSTF_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: VCORERSTF_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "Writing has no effect, reading means that VDD_CORE is not the origin of the reset"] #[inline(always)] pub fn b_0x0(self) -> &'a mut W { self.variant(VCORERSTF_A::B_0X0) } #[doc = "Writing clears the VCORERSTF flag, reading means that VDD_CORE is the origin of the reset (default after por_rst reset)"] #[inline(always)] pub fn b_0x1(self) -> &'a mut W { self.variant(VCORERSTF_A::B_0X1) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4); self.w } } #[doc = "MPSYSRSTF\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum MPSYSRSTF_A { #[doc = "0: Writing has no effect, reading means\r\n that no system reset generated by the MPU\r\n occurred (default after por_rst\r\n reset)"] B_0X0 = 0, #[doc = "1: Writing clears the MCURSTF flag,\r\n reading means that a system reset generated by\r\n the MPU occurred"] B_0X1 = 1, } impl From<MPSYSRSTF_A> for bool { #[inline(always)] fn from(variant: MPSYSRSTF_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `MPSYSRSTF`"] pub type MPSYSRSTF_R = crate::R<bool, MPSYSRSTF_A>; impl MPSYSRSTF_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> MPSYSRSTF_A { match self.bits { false => MPSYSRSTF_A::B_0X0, true => MPSYSRSTF_A::B_0X1, } } #[doc = "Checks if the value of the field is `B_0X0`"] #[inline(always)] pub fn is_b_0x0(&self) -> bool { *self == MPSYSRSTF_A::B_0X0 } #[doc = "Checks if the value of the field is `B_0X1`"] #[inline(always)] pub fn is_b_0x1(&self) -> bool { *self == MPSYSRSTF_A::B_0X1 } } #[doc = "Write proxy for field `MPSYSRSTF`"] pub struct MPSYSRSTF_W<'a> { w: &'a mut W, } impl<'a> MPSYSRSTF_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: MPSYSRSTF_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "Writing has no effect, reading means that no system reset generated by the MPU occurred (default after por_rst reset)"] #[inline(always)] pub fn b_0x0(self) -> &'a mut W { self.variant(MPSYSRSTF_A::B_0X0) } #[doc = "Writing clears the MCURSTF flag, reading means that a system reset generated by the MPU occurred"] #[inline(always)] pub fn b_0x1(self) -> &'a mut W { self.variant(MPSYSRSTF_A::B_0X1) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6); self.w } } #[doc = "MCSYSRSTF\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum MCSYSRSTF_A { #[doc = "0: Writing has no effect, reading means\r\n that no system reset generated by the MCU\r\n occurred (default after por_rst\r\n reset)"] B_0X0 = 0, #[doc = "1: Writing clears the MCURSTF flag,\r\n reading means that a system reset generated by\r\n the MCU occurred"] B_0X1 = 1, } impl From<MCSYSRSTF_A> for bool { #[inline(always)] fn from(variant: MCSYSRSTF_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `MCSYSRSTF`"] pub type MCSYSRSTF_R = crate::R<bool, MCSYSRSTF_A>; impl MCSYSRSTF_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> MCSYSRSTF_A { match self.bits { false => MCSYSRSTF_A::B_0X0, true => MCSYSRSTF_A::B_0X1, } } #[doc = "Checks if the value of the field is `B_0X0`"] #[inline(always)] pub fn is_b_0x0(&self) -> bool { *self == MCSYSRSTF_A::B_0X0 } #[doc = "Checks if the value of the field is `B_0X1`"] #[inline(always)] pub fn is_b_0x1(&self) -> bool { *self == MCSYSRSTF_A::B_0X1 } } #[doc = "Write proxy for field `MCSYSRSTF`"] pub struct MCSYSRSTF_W<'a> { w: &'a mut W, } impl<'a> MCSYSRSTF_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: MCSYSRSTF_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "Writing has no effect, reading means that no system reset generated by the MCU occurred (default after por_rst reset)"] #[inline(always)] pub fn b_0x0(self) -> &'a mut W { self.variant(MCSYSRSTF_A::B_0X0) } #[doc = "Writing clears the MCURSTF flag, reading means that a system reset generated by the MCU occurred"] #[inline(always)] pub fn b_0x1(self) -> &'a mut W { self.variant(MCSYSRSTF_A::B_0X1) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7); self.w } } #[doc = "IWDG1RSTF\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum IWDG1RSTF_A { #[doc = "0: Writing has no effect, reading means\r\n that no IWDG1 reset occurred (default after\r\n por_rst reset)"] B_0X0 = 0, #[doc = "1: Writing clears the IWDG1RSTF flag,\r\n reading means that a IWDG1 reset\r\n occurred"] B_0X1 = 1, } impl From<IWDG1RSTF_A> for bool { #[inline(always)] fn from(variant: IWDG1RSTF_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `IWDG1RSTF`"] pub type IWDG1RSTF_R = crate::R<bool, IWDG1RSTF_A>; impl IWDG1RSTF_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> IWDG1RSTF_A { match self.bits { false => IWDG1RSTF_A::B_0X0, true => IWDG1RSTF_A::B_0X1, } } #[doc = "Checks if the value of the field is `B_0X0`"] #[inline(always)] pub fn is_b_0x0(&self) -> bool { *self == IWDG1RSTF_A::B_0X0 } #[doc = "Checks if the value of the field is `B_0X1`"] #[inline(always)] pub fn is_b_0x1(&self) -> bool { *self == IWDG1RSTF_A::B_0X1 } } #[doc = "Write proxy for field `IWDG1RSTF`"] pub struct IWDG1RSTF_W<'a> { w: &'a mut W, } impl<'a> IWDG1RSTF_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: IWDG1RSTF_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "Writing has no effect, reading means that no IWDG1 reset occurred (default after por_rst reset)"] #[inline(always)] pub fn b_0x0(self) -> &'a mut W { self.variant(IWDG1RSTF_A::B_0X0) } #[doc = "Writing clears the IWDG1RSTF flag, reading means that a IWDG1 reset occurred"] #[inline(always)] pub fn b_0x1(self) -> &'a mut W { self.variant(IWDG1RSTF_A::B_0X1) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8); self.w } } #[doc = "IWDG2RSTF\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum IWDG2RSTF_A { #[doc = "0: Writing has no effect, reading means\r\n that no IWDG2 reset occurred (default after\r\n por_rst reset)"] B_0X0 = 0, #[doc = "1: Writing clears the IWDG2RSTF flag,\r\n reading means that a IWDG2 reset\r\n occurred"] B_0X1 = 1, } impl From<IWDG2RSTF_A> for bool { #[inline(always)] fn from(variant: IWDG2RSTF_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `IWDG2RSTF`"] pub type IWDG2RSTF_R = crate::R<bool, IWDG2RSTF_A>; impl IWDG2RSTF_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> IWDG2RSTF_A { match self.bits { false => IWDG2RSTF_A::B_0X0, true => IWDG2RSTF_A::B_0X1, } } #[doc = "Checks if the value of the field is `B_0X0`"] #[inline(always)] pub fn is_b_0x0(&self) -> bool { *self == IWDG2RSTF_A::B_0X0 } #[doc = "Checks if the value of the field is `B_0X1`"] #[inline(always)] pub fn is_b_0x1(&self) -> bool { *self == IWDG2RSTF_A::B_0X1 } } #[doc = "Write proxy for field `IWDG2RSTF`"] pub struct IWDG2RSTF_W<'a> { w: &'a mut W, } impl<'a> IWDG2RSTF_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: IWDG2RSTF_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "Writing has no effect, reading means that no IWDG2 reset occurred (default after por_rst reset)"] #[inline(always)] pub fn b_0x0(self) -> &'a mut W { self.variant(IWDG2RSTF_A::B_0X0) } #[doc = "Writing clears the IWDG2RSTF flag, reading means that a IWDG2 reset occurred"] #[inline(always)] pub fn b_0x1(self) -> &'a mut W { self.variant(IWDG2RSTF_A::B_0X1) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9); self.w } } #[doc = "WWDG1RSTF\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum WWDG1RSTF_A { #[doc = "0: Writing has no effect, reading means\r\n that no WWDG1 reset occurred (default after\r\n por_rst reset)"] B_0X0 = 0, #[doc = "1: Writing clears the WWDG1RSTF flag,\r\n reading means that a WWDG1 reset\r\n occurred"] B_0X1 = 1, } impl From<WWDG1RSTF_A> for bool { #[inline(always)] fn from(variant: WWDG1RSTF_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `WWDG1RSTF`"] pub type WWDG1RSTF_R = crate::R<bool, WWDG1RSTF_A>; impl WWDG1RSTF_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> WWDG1RSTF_A { match self.bits { false => WWDG1RSTF_A::B_0X0, true => WWDG1RSTF_A::B_0X1, } } #[doc = "Checks if the value of the field is `B_0X0`"] #[inline(always)] pub fn is_b_0x0(&self) -> bool { *self == WWDG1RSTF_A::B_0X0 } #[doc = "Checks if the value of the field is `B_0X1`"] #[inline(always)] pub fn is_b_0x1(&self) -> bool { *self == WWDG1RSTF_A::B_0X1 } } #[doc = "Write proxy for field `WWDG1RSTF`"] pub struct WWDG1RSTF_W<'a> { w: &'a mut W, } impl<'a> WWDG1RSTF_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: WWDG1RSTF_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "Writing has no effect, reading means that no WWDG1 reset occurred (default after por_rst reset)"] #[inline(always)] pub fn b_0x0(self) -> &'a mut W { self.variant(WWDG1RSTF_A::B_0X0) } #[doc = "Writing clears the WWDG1RSTF flag, reading means that a WWDG1 reset occurred"] #[inline(always)] pub fn b_0x1(self) -> &'a mut W { self.variant(WWDG1RSTF_A::B_0X1) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10); self.w } } impl R { #[doc = "Bit 0 - PORRSTF"] #[inline(always)] pub fn porrstf(&self) -> PORRSTF_R { PORRSTF_R::new((self.bits & 0x01) != 0) } #[doc = "Bit 1 - BORRSTF"] #[inline(always)] pub fn borrstf(&self) -> BORRSTF_R { BORRSTF_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 2 - PADRSTF"] #[inline(always)] pub fn padrstf(&self) -> PADRSTF_R { PADRSTF_R::new(((self.bits >> 2) & 0x01) != 0) } #[doc = "Bit 3 - HCSSRSTF"] #[inline(always)] pub fn hcssrstf(&self) -> HCSSRSTF_R { HCSSRSTF_R::new(((self.bits >> 3) & 0x01) != 0) } #[doc = "Bit 4 - VCORERSTF"] #[inline(always)] pub fn vcorerstf(&self) -> VCORERSTF_R { VCORERSTF_R::new(((self.bits >> 4) & 0x01) != 0) } #[doc = "Bit 6 - MPSYSRSTF"] #[inline(always)] pub fn mpsysrstf(&self) -> MPSYSRSTF_R { MPSYSRSTF_R::new(((self.bits >> 6) & 0x01) != 0) } #[doc = "Bit 7 - MCSYSRSTF"] #[inline(always)] pub fn mcsysrstf(&self) -> MCSYSRSTF_R { MCSYSRSTF_R::new(((self.bits >> 7) & 0x01) != 0) } #[doc = "Bit 8 - IWDG1RSTF"] #[inline(always)] pub fn iwdg1rstf(&self) -> IWDG1RSTF_R { IWDG1RSTF_R::new(((self.bits >> 8) & 0x01) != 0) } #[doc = "Bit 9 - IWDG2RSTF"] #[inline(always)] pub fn iwdg2rstf(&self) -> IWDG2RSTF_R { IWDG2RSTF_R::new(((self.bits >> 9) & 0x01) != 0) } #[doc = "Bit 10 - WWDG1RSTF"] #[inline(always)] pub fn wwdg1rstf(&self) -> WWDG1RSTF_R { WWDG1RSTF_R::new(((self.bits >> 10) & 0x01) != 0) } } impl W { #[doc = "Bit 0 - PORRSTF"] #[inline(always)] pub fn porrstf(&mut self) -> PORRSTF_W { PORRSTF_W { w: self } } #[doc = "Bit 1 - BORRSTF"] #[inline(always)] pub fn borrstf(&mut self) -> BORRSTF_W { BORRSTF_W { w: self } } #[doc = "Bit 2 - PADRSTF"] #[inline(always)] pub fn padrstf(&mut self) -> PADRSTF_W { PADRSTF_W { w: self } } #[doc = "Bit 3 - HCSSRSTF"] #[inline(always)] pub fn hcssrstf(&mut self) -> HCSSRSTF_W { HCSSRSTF_W { w: self } } #[doc = "Bit 4 - VCORERSTF"] #[inline(always)] pub fn vcorerstf(&mut self) -> VCORERSTF_W { VCORERSTF_W { w: self } } #[doc = "Bit 6 - MPSYSRSTF"] #[inline(always)] pub fn mpsysrstf(&mut self) -> MPSYSRSTF_W { MPSYSRSTF_W { w: self } } #[doc = "Bit 7 - MCSYSRSTF"] #[inline(always)] pub fn mcsysrstf(&mut self) -> MCSYSRSTF_W { MCSYSRSTF_W { w: self } } #[doc = "Bit 8 - IWDG1RSTF"] #[inline(always)] pub fn iwdg1rstf(&mut self) -> IWDG1RSTF_W { IWDG1RSTF_W { w: self } } #[doc = "Bit 9 - IWDG2RSTF"] #[inline(always)] pub fn iwdg2rstf(&mut self) -> IWDG2RSTF_W { IWDG2RSTF_W { w: self } } #[doc = "Bit 10 - WWDG1RSTF"] #[inline(always)] pub fn wwdg1rstf(&mut self) -> WWDG1RSTF_W { WWDG1RSTF_W { w: self } } }
use crate::error::{from_protobuf_error, NiaServerError, NiaServerResult}; use crate::protocol::Serializable; use protobuf::Message; #[derive(Clone, Debug, PartialEq, Eq)] pub struct ActionMouseRelativeMove { dx: i32, dy: i32, } impl ActionMouseRelativeMove { pub fn new(dx: i32, dy: i32) -> ActionMouseRelativeMove { ActionMouseRelativeMove { dx, dy } } pub fn get_dx(&self) -> i32 { self.dx } pub fn get_dy(&self) -> i32 { self.dy } } impl Serializable< ActionMouseRelativeMove, nia_protocol_rust::ActionMouseRelativeMove, > for ActionMouseRelativeMove { fn to_pb(&self) -> nia_protocol_rust::ActionMouseRelativeMove { let mut action_mouse_absolute_move_pb = nia_protocol_rust::ActionMouseRelativeMove::new(); action_mouse_absolute_move_pb.set_dx(self.dx); action_mouse_absolute_move_pb.set_dy(self.dy); action_mouse_absolute_move_pb } fn from_pb( object_pb: nia_protocol_rust::ActionMouseRelativeMove, ) -> NiaServerResult<ActionMouseRelativeMove> { let action_mouse_absolute_move = ActionMouseRelativeMove::new( object_pb.get_dx(), object_pb.get_dy(), ); Ok(action_mouse_absolute_move) } } #[cfg(test)] mod tests { #[allow(unused_imports)] use super::*; #[test] fn serializable_and_deserializable() { let dx_expected = 100; let dy_expected = 200; let action_mouse_relative_move = ActionMouseRelativeMove::new(dx_expected, dy_expected); let bytes = action_mouse_relative_move.to_bytes().unwrap(); let action_mouse_relative_move = ActionMouseRelativeMove::from_bytes(bytes).unwrap(); let dx_actual = action_mouse_relative_move.dx; let dy_actual = action_mouse_relative_move.dy; assert_eq!(dx_expected, dx_actual); assert_eq!(dy_expected, dy_actual); } }
use async_trait::async_trait; use tokio::sync::oneshot::Receiver; use crate::event::Event; use crate::result::Result; #[derive(Default)] pub struct PublicationResult { pub published_events: u32, pub ok_handlers: u32, pub err_handlers: u32, } impl PublicationResult { pub fn published_events(&self) -> u32 { self.published_events } pub fn ok_handlers(&self) -> u32 { self.ok_handlers } pub fn err_handlers(&self) -> u32 { self.err_handlers } pub fn activated_handlers(&self) -> u32 { self.ok_handlers + self.err_handlers } } #[async_trait] pub trait EventPublisher: Sync + Send { async fn publish(&self, event: Event) -> Result<Receiver<PublicationResult>>; async fn publish_all(&self, events: Vec<Event>) -> Result<Receiver<PublicationResult>>; }
extern crate rand; use std::io; use std::cmp::Ordering; use rand::Rng; fn main() { println!("I'm thinking of a goddamned number between 1 and 100 damnit."); let secret_number = rand::thread_rng().gen_range(1, 101); loop { println!("What's your damn guess?"); let mut guess = String::new(); io::stdin().read_line(&mut guess) .expect("Couldn't read your damn number."); let guess: u32 = match guess.trim().parse() { Ok(num) => num, Err(_) => { println!("I said type a damn number!"); continue; }, }; match guess.cmp(&secret_number) { Ordering::Less => println!("Too damn low."), Ordering::Greater => println!("Too damn high."), Ordering::Equal => { println!("Damn right."); break; }, } } }
use totsu_core::solver::SolverParam; use num_traits::Float; use num_traits::Num; pub fn num_by_env<N: Num + std::fmt::Display>(e: &str) -> Option<N> { if let Some(v) = std::env::var(e).ok() .and_then(|s| {N::from_str_radix(&s, 10).ok()}) { log::info!("{}: {}", e, v); Some(v) } else { None } } pub fn set_par_by_env<F: Float + std::fmt::Display>(p: &mut SolverParam<F>) { p.max_iter = num_by_env("MAX_ITER").or(p.max_iter); p.eps_acc = num_by_env("EPS_ACC").unwrap_or(p.eps_acc); p.eps_inf = num_by_env("EPS_INF").unwrap_or(p.eps_inf); p.eps_zero = num_by_env("EPS_ZERO").unwrap_or(p.eps_zero); p.log_period = num_by_env("LOG_PERIOD").unwrap_or(p.log_period); }
/// An enum to represent all characters in the MahjongTiles block. #[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)] pub enum MahjongTiles { /// \u{1f000}: '🀀' MahjongTileEastWind, /// \u{1f001}: '🀁' MahjongTileSouthWind, /// \u{1f002}: '🀂' MahjongTileWestWind, /// \u{1f003}: '🀃' MahjongTileNorthWind, /// \u{1f004}: '🀄' MahjongTileRedDragon, /// \u{1f005}: '🀅' MahjongTileGreenDragon, /// \u{1f006}: '🀆' MahjongTileWhiteDragon, /// \u{1f007}: '🀇' MahjongTileOneOfCharacters, /// \u{1f008}: '🀈' MahjongTileTwoOfCharacters, /// \u{1f009}: '🀉' MahjongTileThreeOfCharacters, /// \u{1f00a}: '🀊' MahjongTileFourOfCharacters, /// \u{1f00b}: '🀋' MahjongTileFiveOfCharacters, /// \u{1f00c}: '🀌' MahjongTileSixOfCharacters, /// \u{1f00d}: '🀍' MahjongTileSevenOfCharacters, /// \u{1f00e}: '🀎' MahjongTileEightOfCharacters, /// \u{1f00f}: '🀏' MahjongTileNineOfCharacters, /// \u{1f010}: '🀐' MahjongTileOneOfBamboos, /// \u{1f011}: '🀑' MahjongTileTwoOfBamboos, /// \u{1f012}: '🀒' MahjongTileThreeOfBamboos, /// \u{1f013}: '🀓' MahjongTileFourOfBamboos, /// \u{1f014}: '🀔' MahjongTileFiveOfBamboos, /// \u{1f015}: '🀕' MahjongTileSixOfBamboos, /// \u{1f016}: '🀖' MahjongTileSevenOfBamboos, /// \u{1f017}: '🀗' MahjongTileEightOfBamboos, /// \u{1f018}: '🀘' MahjongTileNineOfBamboos, /// \u{1f019}: '🀙' MahjongTileOneOfCircles, /// \u{1f01a}: '🀚' MahjongTileTwoOfCircles, /// \u{1f01b}: '🀛' MahjongTileThreeOfCircles, /// \u{1f01c}: '🀜' MahjongTileFourOfCircles, /// \u{1f01d}: '🀝' MahjongTileFiveOfCircles, /// \u{1f01e}: '🀞' MahjongTileSixOfCircles, /// \u{1f01f}: '🀟' MahjongTileSevenOfCircles, /// \u{1f020}: '🀠' MahjongTileEightOfCircles, /// \u{1f021}: '🀡' MahjongTileNineOfCircles, /// \u{1f022}: '🀢' MahjongTilePlum, /// \u{1f023}: '🀣' MahjongTileOrchid, /// \u{1f024}: '🀤' MahjongTileBamboo, /// \u{1f025}: '🀥' MahjongTileChrysanthemum, /// \u{1f026}: '🀦' MahjongTileSpring, /// \u{1f027}: '🀧' MahjongTileSummer, /// \u{1f028}: '🀨' MahjongTileAutumn, /// \u{1f029}: '🀩' MahjongTileWinter, /// \u{1f02a}: '🀪' MahjongTileJoker, /// \u{1f02b}: '🀫' MahjongTileBack, } impl Into<char> for MahjongTiles { fn into(self) -> char { match self { MahjongTiles::MahjongTileEastWind => '🀀', MahjongTiles::MahjongTileSouthWind => '🀁', MahjongTiles::MahjongTileWestWind => '🀂', MahjongTiles::MahjongTileNorthWind => '🀃', MahjongTiles::MahjongTileRedDragon => '🀄', MahjongTiles::MahjongTileGreenDragon => '🀅', MahjongTiles::MahjongTileWhiteDragon => '🀆', MahjongTiles::MahjongTileOneOfCharacters => '🀇', MahjongTiles::MahjongTileTwoOfCharacters => '🀈', MahjongTiles::MahjongTileThreeOfCharacters => '🀉', MahjongTiles::MahjongTileFourOfCharacters => '🀊', MahjongTiles::MahjongTileFiveOfCharacters => '🀋', MahjongTiles::MahjongTileSixOfCharacters => '🀌', MahjongTiles::MahjongTileSevenOfCharacters => '🀍', MahjongTiles::MahjongTileEightOfCharacters => '🀎', MahjongTiles::MahjongTileNineOfCharacters => '🀏', MahjongTiles::MahjongTileOneOfBamboos => '🀐', MahjongTiles::MahjongTileTwoOfBamboos => '🀑', MahjongTiles::MahjongTileThreeOfBamboos => '🀒', MahjongTiles::MahjongTileFourOfBamboos => '🀓', MahjongTiles::MahjongTileFiveOfBamboos => '🀔', MahjongTiles::MahjongTileSixOfBamboos => '🀕', MahjongTiles::MahjongTileSevenOfBamboos => '🀖', MahjongTiles::MahjongTileEightOfBamboos => '🀗', MahjongTiles::MahjongTileNineOfBamboos => '🀘', MahjongTiles::MahjongTileOneOfCircles => '🀙', MahjongTiles::MahjongTileTwoOfCircles => '🀚', MahjongTiles::MahjongTileThreeOfCircles => '🀛', MahjongTiles::MahjongTileFourOfCircles => '🀜', MahjongTiles::MahjongTileFiveOfCircles => '🀝', MahjongTiles::MahjongTileSixOfCircles => '🀞', MahjongTiles::MahjongTileSevenOfCircles => '🀟', MahjongTiles::MahjongTileEightOfCircles => '🀠', MahjongTiles::MahjongTileNineOfCircles => '🀡', MahjongTiles::MahjongTilePlum => '🀢', MahjongTiles::MahjongTileOrchid => '🀣', MahjongTiles::MahjongTileBamboo => '🀤', MahjongTiles::MahjongTileChrysanthemum => '🀥', MahjongTiles::MahjongTileSpring => '🀦', MahjongTiles::MahjongTileSummer => '🀧', MahjongTiles::MahjongTileAutumn => '🀨', MahjongTiles::MahjongTileWinter => '🀩', MahjongTiles::MahjongTileJoker => '🀪', MahjongTiles::MahjongTileBack => '🀫', } } } impl std::convert::TryFrom<char> for MahjongTiles { type Error = (); fn try_from(c: char) -> Result<Self, Self::Error> { match c { '🀀' => Ok(MahjongTiles::MahjongTileEastWind), '🀁' => Ok(MahjongTiles::MahjongTileSouthWind), '🀂' => Ok(MahjongTiles::MahjongTileWestWind), '🀃' => Ok(MahjongTiles::MahjongTileNorthWind), '🀄' => Ok(MahjongTiles::MahjongTileRedDragon), '🀅' => Ok(MahjongTiles::MahjongTileGreenDragon), '🀆' => Ok(MahjongTiles::MahjongTileWhiteDragon), '🀇' => Ok(MahjongTiles::MahjongTileOneOfCharacters), '🀈' => Ok(MahjongTiles::MahjongTileTwoOfCharacters), '🀉' => Ok(MahjongTiles::MahjongTileThreeOfCharacters), '🀊' => Ok(MahjongTiles::MahjongTileFourOfCharacters), '🀋' => Ok(MahjongTiles::MahjongTileFiveOfCharacters), '🀌' => Ok(MahjongTiles::MahjongTileSixOfCharacters), '🀍' => Ok(MahjongTiles::MahjongTileSevenOfCharacters), '🀎' => Ok(MahjongTiles::MahjongTileEightOfCharacters), '🀏' => Ok(MahjongTiles::MahjongTileNineOfCharacters), '🀐' => Ok(MahjongTiles::MahjongTileOneOfBamboos), '🀑' => Ok(MahjongTiles::MahjongTileTwoOfBamboos), '🀒' => Ok(MahjongTiles::MahjongTileThreeOfBamboos), '🀓' => Ok(MahjongTiles::MahjongTileFourOfBamboos), '🀔' => Ok(MahjongTiles::MahjongTileFiveOfBamboos), '🀕' => Ok(MahjongTiles::MahjongTileSixOfBamboos), '🀖' => Ok(MahjongTiles::MahjongTileSevenOfBamboos), '🀗' => Ok(MahjongTiles::MahjongTileEightOfBamboos), '🀘' => Ok(MahjongTiles::MahjongTileNineOfBamboos), '🀙' => Ok(MahjongTiles::MahjongTileOneOfCircles), '🀚' => Ok(MahjongTiles::MahjongTileTwoOfCircles), '🀛' => Ok(MahjongTiles::MahjongTileThreeOfCircles), '🀜' => Ok(MahjongTiles::MahjongTileFourOfCircles), '🀝' => Ok(MahjongTiles::MahjongTileFiveOfCircles), '🀞' => Ok(MahjongTiles::MahjongTileSixOfCircles), '🀟' => Ok(MahjongTiles::MahjongTileSevenOfCircles), '🀠' => Ok(MahjongTiles::MahjongTileEightOfCircles), '🀡' => Ok(MahjongTiles::MahjongTileNineOfCircles), '🀢' => Ok(MahjongTiles::MahjongTilePlum), '🀣' => Ok(MahjongTiles::MahjongTileOrchid), '🀤' => Ok(MahjongTiles::MahjongTileBamboo), '🀥' => Ok(MahjongTiles::MahjongTileChrysanthemum), '🀦' => Ok(MahjongTiles::MahjongTileSpring), '🀧' => Ok(MahjongTiles::MahjongTileSummer), '🀨' => Ok(MahjongTiles::MahjongTileAutumn), '🀩' => Ok(MahjongTiles::MahjongTileWinter), '🀪' => Ok(MahjongTiles::MahjongTileJoker), '🀫' => Ok(MahjongTiles::MahjongTileBack), _ => Err(()), } } } impl Into<u32> for MahjongTiles { fn into(self) -> u32 { let c: char = self.into(); let hex = c .escape_unicode() .to_string() .replace("\\u{", "") .replace("}", ""); u32::from_str_radix(&hex, 16).unwrap() } } impl std::convert::TryFrom<u32> for MahjongTiles { type Error = (); fn try_from(u: u32) -> Result<Self, Self::Error> { if let Ok(c) = char::try_from(u) { Self::try_from(c) } else { Err(()) } } } impl Iterator for MahjongTiles { type Item = Self; fn next(&mut self) -> Option<Self> { let index: u32 = (*self).into(); use std::convert::TryFrom; Self::try_from(index + 1).ok() } } impl MahjongTiles { /// The character with the lowest index in this unicode block pub fn new() -> Self { MahjongTiles::MahjongTileEastWind } /// The character's name, in sentence case pub fn name(&self) -> String { let s = std::format!("MahjongTiles{:#?}", self); string_morph::to_sentence_case(&s) } }
use eyre::ensure; #[derive(Debug)] pub struct Rom { pub prg: [u8; 0x8000], } impl Rom { pub fn from_ines(ines: impl AsRef<[u8]>) -> eyre::Result<Self> { use std::convert::TryInto; const INES_MAGIC: &[u8] = b"NES\x1A"; let ines = ines.as_ref(); ensure!(ines.len() == 16 + 0x8000 + 0x8000, "iNES ROM size mismatch"); ensure!(ines.starts_with(INES_MAGIC), "iNES magic not found"); let prg = ines[16..][..0x8000].try_into().unwrap(); Ok(Self { prg }) } }
use ckb_chain_spec::consensus::Consensus; use ckb_core::block::Block; use ckb_core::extras::{BlockExt, EpochExt}; use ckb_core::header::{BlockNumber, Header}; use ckb_core::transaction::{ProposalShortId, Transaction}; use ckb_core::uncle::UncleBlock; use ckb_script::ScriptConfig; use ckb_store::ChainStore; use numext_fixed_hash::H256; use std::sync::Arc; pub trait ChainProvider: Sync + Send { type Store: ChainStore; fn store(&self) -> &Arc<Self::Store>; fn script_config(&self) -> &ScriptConfig; fn block_body(&self, hash: &H256) -> Option<Vec<Transaction>>; fn block_header(&self, hash: &H256) -> Option<Header>; fn block_proposal_txs_ids(&self, hash: &H256) -> Option<Vec<ProposalShortId>>; fn uncles(&self, hash: &H256) -> Option<Vec<UncleBlock>>; fn block_hash(&self, number: BlockNumber) -> Option<H256>; fn block_ext(&self, hash: &H256) -> Option<BlockExt>; fn block_number(&self, hash: &H256) -> Option<BlockNumber>; fn block(&self, hash: &H256) -> Option<Block>; fn genesis_hash(&self) -> &H256; fn get_transaction(&self, hash: &H256) -> Option<(Transaction, H256)>; fn get_ancestor(&self, base: &H256, number: BlockNumber) -> Option<Header>; fn get_block_epoch(&self, hash: &H256) -> Option<EpochExt>; fn next_epoch_ext(&self, last_epoch: &EpochExt, header: &Header) -> Option<EpochExt>; fn consensus(&self) -> &Consensus; }
use std::env; use std::io::BufRead; use std::fs; pub mod intcode; fn get_filename() -> String { env::args().nth(1).expect("no filename given") } #[allow(dead_code)] pub fn get_content() -> String { let filename = get_filename(); fs::read_to_string(filename).expect("could not read file") } #[allow(dead_code)] pub fn get_lines() -> Vec<String> { let filename = get_filename(); let file = std::fs::File::open(filename).expect("could not open file"); let buffer = std::io::BufReader::new(file); buffer .lines() .map(|l| l.expect("could not read line")) .collect() }
use std::fs::File; use std::io::prelude::*; use std::path::Path; use crate::util::fill_vec; //读取文本内容 pub fn read_file(filename: &str) -> Result<String, std::io::Error> { let path = Path::new(&filename); // Open the path in read-only mode, returns `io::Result<File>` match File::open(&path) { // The `description` method of `io::Error` returns a string that // describes the error Err(why) => Err(why), Ok(mut file) => { // Read the file contents into a string, returns `io::Result<usize>` let mut s: String = String::new(); file.read_to_string(&mut s)?; Ok(s) } } } pub fn read_acm(filename: String) -> Result<(usize, Vec<u8>), std::io::Error> { let path = Path::new(&filename); // Open the path in read-only mode, returns `io::Result<File>` match File::open(&path) { // The `description` method of `io::Error` returns a string that // describes the error Err(why) => Err(why), Ok(mut file) => { //读取当前游标 let mut hbuf = [0u8; 8]; match file.read_exact(&mut hbuf) { Err(e) => Err(e), Ok(_) => { //读取key长度 let mut hbuf = [0u8; 8]; match file.read_exact(&mut hbuf) { Err(e) => Err(e), Ok(_) => { let size = unsafe { std::mem::transmute::<[u8; 8], usize>(hbuf) }; let mut buf: Vec<u8> = Vec::with_capacity(size); match file.read_to_end(&mut buf) { Err(why) => Err(why), Ok(_) => Ok((size, buf)), } } } } } } } } pub fn read(filename: &String) -> Result<Vec<u8>, std::io::Error> { let path = Path::new(&filename); // Open the path in read-only mode, returns `io::Result<File>` match File::open(&path) { // The `description` method of `io::Error` returns a string that // describes the error Err(why) => Err(why), Ok(mut file) => { //读取数据头 let mut hbuf = [0u8; 8]; match file.read_exact(&mut hbuf) { Err(e) => Err(e), Ok(_) => { let size = unsafe { std::mem::transmute::<[u8; 8], usize>(hbuf) }; let mut buf: Vec<u8> = Vec::with_capacity(size); match file.read_to_end(&mut buf) { Err(why) => Err(why), Ok(_) => Ok(buf), } } } } } } pub fn read_at(filename: &String, offset: u64) -> Result<Vec<u8>, std::io::Error> { let path = Path::new(&filename); // Open the path in read-only mode, returns `io::Result<File>` match std::fs::OpenOptions::new().read(true).open(&path) { // The `description` method of `io::Error` returns a string that // describes the error Err(why) => Err(why), Ok(mut file) => { let mut hbuf = [0u8; 4]; match file.read_exact(&mut hbuf) { Err(e) => Err(e), Ok(_) => { if offset != 0 { let cursor = file.seek(std::io::SeekFrom::Current(offset as i64))?; println!("curosr:{}", cursor); } let size = unsafe { std::mem::transmute::<[u8; 4], u32>(hbuf) }; println!("buf len {}", size); let mut buf: Vec<u8> = Vec::with_capacity(size as usize); fill_vec(&mut buf, size as usize); let cursor = file.seek(std::io::SeekFrom::Current(0))?; println!("curosr:{}", cursor); match file.read_exact(&mut buf) { Err(why) => Err(why), Ok(_) => Ok(buf), } } } } } } pub fn read_len(filename: String) -> Result<usize, std::io::Error> { let path = Path::new(&filename); match std::fs::metadata(path) { Err(e) => Err(e), Ok(metadata) => Ok(metadata.len() as usize), } }
#![feature(async_closure)] mod cmds; mod server; mod utils; use cmds::parse_cmd; fn main() { let mut runnable = parse_cmd(); runnable.as_mut().run().unwrap(); }
#[doc = "Register `ESR` reader"] pub type R = crate::R<ESR_SPEC>; #[doc = "Field `TEA` reader - Transfer Error Address These bits are set and cleared by HW, in case of an MDMA data transfer error. It is used in conjunction with TED. This field indicates the 7 LSBits of the address which generated a transfer/access error. It may be used by SW to retrieve the failing address, by adding this value (truncated to the buffer transfer length size) to the current SAR/DAR value. Note: The SAR/DAR current value doesnt reflect this last address due to the FIFO management system. The SAR/DAR are only updated at the end of a (buffer) transfer (of TLEN+1 bytes). Note: It is not set in case of a link data error."] pub type TEA_R = crate::FieldReader; #[doc = "Field `TED` reader - Transfer Error Direction These bit is set and cleared by HW, in case of an MDMA data transfer error."] pub type TED_R = crate::BitReader; #[doc = "Field `TELD` reader - Transfer Error Link Data These bit is set by HW, in case of a transfer error while reading the block link data structure. It is cleared by software writing 1 to the CTEIFx bit in the DMA_IFCRy register."] pub type TELD_R = crate::BitReader; #[doc = "Field `TEMD` reader - Transfer Error Mask Data These bit is set by HW, in case of a transfer error while writing the Mask Data. It is cleared by software writing 1 to the CTEIFx bit in the DMA_IFCRy register."] pub type TEMD_R = crate::BitReader; #[doc = "Field `ASE` reader - Address/Size Error These bit is set by HW, when the programmed address is not aligned with the data size. TED will indicate whether the problem is on the source or destination. It is cleared by software writing 1 to the CTEIFx bit in the DMA_IFCRy register."] pub type ASE_R = crate::BitReader; #[doc = "Field `BSE` reader - Block Size Error These bit is set by HW, when the block size is not an integer multiple of the data size either for source or destination. TED will indicate whether the problem is on the source or destination. It is cleared by software writing 1 to the CTEIFx bit in the DMA_IFCRy register."] pub type BSE_R = crate::BitReader; impl R { #[doc = "Bits 0:6 - Transfer Error Address These bits are set and cleared by HW, in case of an MDMA data transfer error. It is used in conjunction with TED. This field indicates the 7 LSBits of the address which generated a transfer/access error. It may be used by SW to retrieve the failing address, by adding this value (truncated to the buffer transfer length size) to the current SAR/DAR value. Note: The SAR/DAR current value doesnt reflect this last address due to the FIFO management system. The SAR/DAR are only updated at the end of a (buffer) transfer (of TLEN+1 bytes). Note: It is not set in case of a link data error."] #[inline(always)] pub fn tea(&self) -> TEA_R { TEA_R::new((self.bits & 0x7f) as u8) } #[doc = "Bit 7 - Transfer Error Direction These bit is set and cleared by HW, in case of an MDMA data transfer error."] #[inline(always)] pub fn ted(&self) -> TED_R { TED_R::new(((self.bits >> 7) & 1) != 0) } #[doc = "Bit 8 - Transfer Error Link Data These bit is set by HW, in case of a transfer error while reading the block link data structure. It is cleared by software writing 1 to the CTEIFx bit in the DMA_IFCRy register."] #[inline(always)] pub fn teld(&self) -> TELD_R { TELD_R::new(((self.bits >> 8) & 1) != 0) } #[doc = "Bit 9 - Transfer Error Mask Data These bit is set by HW, in case of a transfer error while writing the Mask Data. It is cleared by software writing 1 to the CTEIFx bit in the DMA_IFCRy register."] #[inline(always)] pub fn temd(&self) -> TEMD_R { TEMD_R::new(((self.bits >> 9) & 1) != 0) } #[doc = "Bit 10 - Address/Size Error These bit is set by HW, when the programmed address is not aligned with the data size. TED will indicate whether the problem is on the source or destination. It is cleared by software writing 1 to the CTEIFx bit in the DMA_IFCRy register."] #[inline(always)] pub fn ase(&self) -> ASE_R { ASE_R::new(((self.bits >> 10) & 1) != 0) } #[doc = "Bit 11 - Block Size Error These bit is set by HW, when the block size is not an integer multiple of the data size either for source or destination. TED will indicate whether the problem is on the source or destination. It is cleared by software writing 1 to the CTEIFx bit in the DMA_IFCRy register."] #[inline(always)] pub fn bse(&self) -> BSE_R { BSE_R::new(((self.bits >> 11) & 1) != 0) } } #[doc = "MDMA Channel x error status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`esr::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct ESR_SPEC; impl crate::RegisterSpec for ESR_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`esr::R`](R) reader structure"] impl crate::Readable for ESR_SPEC {} #[doc = "`reset()` method sets ESR to value 0"] impl crate::Resettable for ESR_SPEC { const RESET_VALUE: Self::Ux = 0; }
use super::blueprint::BluePrint; use super::file; use super::makefile::MakeFile; use super::utils; use super::zip; pub trait BuildSystem { fn generate(&self) -> i32; } #[derive(Debug, Clone)] pub struct BuildSystemBase { /// The path of the apk input: String, /// Name of the apk name: String, /// Override architectures default_architectures: Vec<String>, /// Has default architectures overwritten has_default_architecture: bool, /// OS version (Unused remove?) os: String, /// To enable or disable pre-optimization preopt_dex: (bool, bool), /// Make an app privileged (priv-app) privileged: bool, /// JNI libs found on apk libraries: Vec<String>, /// Architectures found on apk architectures: Vec<String>, /// Extract JNI libs if any, /// Only available for makefiles extract_so: bool, /// Full logs debug: bool, /// Output a Android.bp file blueprint: bool, /// Output a Android.mk file makefile: bool, /// Output a bazel BUILD file (NOT IMPLEMENTED) bazel: bool, } impl BuildSystemBase { pub fn new<I, N, D, O>( input: I, name: N, default: D, has_default: bool, os: O, preopt_dex: (bool, bool), privileged: bool, extract_so: bool, debug: bool, bp: bool, mk: bool, bz: bool, ) -> Self where I: Into<String>, N: Into<String>, D: Into<String>, O: Into<String>, { let default_architectures = utils::input_to_abi_vec(&default.into(), bp); let mut base = BuildSystemBase { input: input.into(), name: name.into(), default_architectures: default_architectures, has_default_architecture: has_default, os: os.into(), preopt_dex: preopt_dex, privileged: privileged, libraries: Vec::new(), architectures: Vec::new(), extract_so: extract_so, debug: debug, blueprint: bp, makefile: mk, bazel: bz, }; base.init(); base } /// Handles initialization of some variable /// Reads apk files, extracts .so files and adds them as list /// Reads architectures, extracts them as list pub fn init(&mut self) { self.parse_ndk_libs(); let architectures: Vec<String> = self.get_architectures(); // If there's only one architecture and we haven't specified a default one.. // then autochose what we have if architectures.len() == 1 && !self.has_default_architecture { let arch = architectures[0].clone(); let msg = format!("Only one architecture, autochoosing {}", arch); self.log(msg); self.set_default_architectures(architectures); } // If there's no name passed, override name with apk name // If there's one passed, respect that let input = self.get_input(); let name = self.get_name(); let default_name = if name.is_empty() { let name_buf = file::file_name(&input); name_buf } else { name }; self.set_name(default_name); } pub fn parse_ndk_libs(&mut self) { let zip_files = zip::run(&self.input); let (arch, so): (Vec<String>, Vec<String>) = file::get_ndk_libs(zip_files, self.blueprint); self.set_libraries(so); self.set_architectures(arch); } pub fn get_name(&self) -> String { self.name.clone() } pub fn set_name<N>(&mut self, name: N) -> &mut Self where N: Into<String>, { self.name = name.into(); self } pub fn get_input(&self) -> String { self.input.clone() } pub fn set_libraries(&mut self, libraries: Vec<String>) { self.libraries = libraries } pub fn set_architectures(&mut self, architectures: Vec<String>) { self.architectures = architectures } pub fn get_architectures(&self) -> Vec<String> { self.architectures.clone() } pub fn get_default_architectures(&self) -> Vec<String> { self.default_architectures.clone() } pub fn set_default_architectures(&mut self, default_architectures: Vec<String>) { self.default_architectures = default_architectures; } pub fn get_preopt_dex(&self) -> (bool, bool) { self.preopt_dex } pub fn set_preopt_dex(&mut self, dex: (bool, bool)) { self.preopt_dex.0 = dex.0; self.preopt_dex.1 = dex.1; } pub fn extract_so(&self) -> bool { self.extract_so } pub fn set_extract_so(&mut self, should_extract_so: bool) { self.extract_so = should_extract_so; } pub fn privileged(&self) -> bool { self.privileged } pub fn set_privileged(&mut self, priv_app: bool) { self.privileged = priv_app } pub fn set_has_default_architecture(&mut self, has_def: bool) { self.has_default_architecture = has_def; } pub fn has_default_architecture(&self) -> bool { self.has_default_architecture } pub fn get_libraries(&self) -> Vec<String> { self.libraries.clone() } pub fn is_debug(&self) -> bool { self.debug } pub fn set_debug(&mut self, debug: bool) { self.debug = debug; } fn new_mk(&self) -> MakeFile { MakeFile { build_system: self.clone(), } } fn new_bp(&self) -> BluePrint { BluePrint { build_system: self.clone(), } } fn is_blueprint(&self) -> bool { self.blueprint } fn set_blueprint(&mut self, is_blueprint: bool) { self.blueprint = is_blueprint; } fn is_make_file(&self) -> bool { self.makefile } pub fn log<S>(&self, msg: S) where S: Into<String>, { if self.debug { println!("{:?}", msg.into()); } } pub fn generate(&self) -> i32 { if self.is_make_file() { let make = self.new_mk(); self.gen(make) } else if self.is_blueprint() { let bp = self.new_bp(); self.gen(bp) } else { panic!("Bazel not implemented!"); } } fn gen<T>(&self, system: T) -> i32 where T: BuildSystem, { system.generate() } } impl Default for BuildSystemBase { fn default() -> BuildSystemBase { BuildSystemBase { input: "tests/data/multipleArch.apk".into(), // input name: "multipleArch".into(), // name default_architectures: vec!["arm64-v8a".into()], // default_architecture has_default_architecture: false, // has default architecture os: "6.0".into(), // (un-used) os version preopt_dex: (false, false), // pre-optimize dex files privileged: false, // priviledged libraries: Vec::new(), // JNI libraries found within APK architectures: Vec::new(), // architectures found within APK extract_so: false, // extract_so debug: true, // debug flag blueprint: false, // generate blueprint Android.bp makefile: true, // generate makefile Android.mk bazel: false, // generate bazel BUILD } } } pub struct BuildSystemBaseBuilder { base: BuildSystemBase, } impl BuildSystemBaseBuilder { pub fn new() -> Self { BuildSystemBaseBuilder { base: BuildSystemBase::default(), } } pub fn set_input<N>(&mut self, input: N) -> &mut Self where N: Into<String>, { self.base.input = input.into(); self } pub fn set_name<N>(&mut self, name: N) -> &mut Self where N: Into<String>, { self.base.name = name.into(); self } pub fn set_make_file(&mut self, is_make_file: bool) -> &mut Self { self.base.makefile = is_make_file; self } pub fn set_blueprint(&mut self, bp: bool) -> &mut Self { self.base.set_blueprint(bp); self } pub fn set_extract_so(&mut self, so: bool) -> &mut Self { self.base.extract_so = so; self } pub fn set_default_architectures(&mut self, default_architectures: Vec<String>) { self.base.default_architectures = default_architectures; } pub fn get_default_architectures(&self) -> Vec<String> { self.base.get_default_architectures() } pub fn set_has_default_architecture(&mut self, has_def: bool) { self.base.has_default_architecture = has_def; } pub fn has_default_architecture(&self) -> bool { self.base.has_default_architecture } // The way input receives it, as a string pub fn override_arch(&mut self, arch: String) { let default_architectures = utils::input_to_abi_vec(&arch, self.build().blueprint); self.set_default_architectures(default_architectures); self.set_has_default_architecture(true); } pub fn build(&mut self) -> BuildSystemBase { self.base.init(); self.base.clone() } }
extern crate rand; pub use xoroshiro128plus::Xoroshiro128Plus; mod xoroshiro128plus;
use std::cmp::{Eq, PartialEq}; use std::convert::{TryFrom, TryInto}; use std::ffi::{CStr, CString}; use std::fmt; use std::hash::{Hash, Hasher}; use z3_sys::*; use crate::z3::Context; use crate::z3::Pattern; use crate::z3::Sort; use crate::z3::Symbol; use crate::z3::Z3_MUTEX; #[cfg(feature = "arbitrary-size-numeral")] use num::bigint::BigInt; #[cfg(feature = "arbitrary-size-numeral")] use num::rational::BigRational; /// [`Ast`](trait.Ast.html) node representing a boolean value. pub struct Bool<'ctx> { pub(crate) ctx: &'ctx Context, pub(crate) z3_ast: Z3_ast, } /// [`Ast`](trait.Ast.html) node representing an integer value. pub struct Int<'ctx> { pub(crate) ctx: &'ctx Context, pub(crate) z3_ast: Z3_ast, } /// [`Ast`](trait.Ast.html) node representing a real value. pub struct Real<'ctx> { pub(crate) ctx: &'ctx Context, pub(crate) z3_ast: Z3_ast, } /// [`Ast`](trait.Ast.html) node representing a bitvector value. pub struct BV<'ctx> { pub(crate) ctx: &'ctx Context, pub(crate) z3_ast: Z3_ast, } /// [`Ast`](trait.Ast.html) node representing an array value. /// An array in Z3 is a mapping from indices to values. pub struct Array<'ctx> { pub(crate) ctx: &'ctx Context, pub(crate) z3_ast: Z3_ast, } /// [`Ast`](trait.Ast.html) node representing a set value. pub struct Set<'ctx> { pub(crate) ctx: &'ctx Context, pub(crate) z3_ast: Z3_ast, } /// [`Ast`](trait.Ast.html) node representing a datatype or enumeration value. pub struct Datatype<'ctx> { pub(crate) ctx: &'ctx Context, pub(crate) z3_ast: Z3_ast, } /// A dynamically typed [`Ast`](trait.Ast.html) node. pub struct Dynamic<'ctx> { pub(crate) ctx: &'ctx Context, pub(crate) z3_ast: Z3_ast, } macro_rules! unop { ( $f:ident, $z3fn:ident, $retty:ty ) => { pub fn $f(&self) -> $retty { <$retty>::new(self.ctx, unsafe { let guard = Z3_MUTEX.lock().unwrap(); $z3fn(self.ctx.z3_ctx, self.z3_ast) }) } }; } macro_rules! binop { ( $f:ident, $z3fn:ident, $retty:ty ) => { pub fn $f(&self, other: &Self) -> $retty { <$retty>::new(self.ctx, unsafe { let guard = Z3_MUTEX.lock().unwrap(); $z3fn(self.ctx.z3_ctx, self.z3_ast, other.z3_ast) }) } }; } /* We aren't currently using the trinop! macro for any of our trinops macro_rules! trinop { ( $f:ident, $z3fn:ident, $retty:ty ) => { pub fn $f(&self, a: &Self, b: &Self) -> $retty { <$retty>::new(self.ctx, unsafe { let guard = Z3_MUTEX.lock().unwrap(); $z3fn(self.ctx.z3_ctx, self.z3_ast, a.z3_ast, b.z3_ast) }) } }; } */ macro_rules! varop { ( $f:ident, $z3fn:ident, $retty:ty ) => { pub fn $f(&self, other: &[&Self]) -> $retty { <$retty>::new(self.ctx, unsafe { let guard = Z3_MUTEX.lock().unwrap(); let mut tmp = vec![self.z3_ast]; for a in other { tmp.push(a.z3_ast) } assert!(tmp.len() <= 0xffff_ffff); $z3fn(self.ctx.z3_ctx, tmp.len() as u32, tmp.as_ptr()) }) } }; } /// Abstract syntax tree (AST) nodes represent terms, constants, or expressions. /// The `Ast` trait contains methods common to all AST subtypes. pub trait Ast<'ctx>: Sized + fmt::Debug { fn get_ctx(&self) -> &'ctx Context; fn get_z3_ast(&self) -> Z3_ast; // This would be great, but gives error E0071 "expected struct, variant or union type, found Self" // so I don't think we can write a generic constructor like this. // Instead we just require the method, and use the new_ast! macro defined below to implement it // on each Ast subtype. /* fn new(ctx: &'ctx Context, ast: Z3_ast) -> Self { assert!(!ast.is_null()); Self { ctx, z3_ast: unsafe { debug!("new ast {:p}", ast); let guard = Z3_MUTEX.lock().unwrap(); Z3_inc_ref(ctx.z3_ctx, ast); ast }, } } */ fn new(ctx: &'ctx Context, ast: Z3_ast) -> Self; /// Compare this `Ast` with another `Ast`, and get a [`Bool`](struct.Bool.html) /// representing the result. /// /// This operation works with all possible `Ast`s (int, real, BV, etc), but the two /// `Ast`s being compared must be the same type. // // Note that we can't use the binop! macro because of the `pub` keyword on it fn _eq(&self, other: &Self) -> Bool<'ctx> { Bool::new(self.get_ctx(), unsafe { let guard = Z3_MUTEX.lock().unwrap(); Z3_mk_eq(self.get_ctx().z3_ctx, self.get_z3_ast(), other.get_z3_ast()) }) } /// Compare this `Ast` with a list of other `Ast`s, and get a [`Bool`](struct.Bool.html) /// which is true only if all arguments (including Self) are pairwise distinct. /// /// This operation works with all possible `Ast`s (int, real, BV, etc), but the /// `Ast`s being compared must all be the same type. // // Note that we can't use the varop! macro because of the `pub` keyword on it fn distinct(&self, other: &[&Self]) -> Bool<'ctx> { Bool::new(self.get_ctx(), unsafe { let guard = Z3_MUTEX.lock().unwrap(); let mut tmp = vec![self.get_z3_ast()]; for a in other { tmp.push(a.get_z3_ast()) } assert!(tmp.len() <= 0xffff_ffff); Z3_mk_distinct(self.get_ctx().z3_ctx, tmp.len() as u32, tmp.as_ptr()) }) } /// Get the [`Sort`](../struct.Sort.html) of the `Ast` fn get_sort(&self) -> Sort<'ctx> { Sort::new(self.get_ctx(), unsafe { Z3_get_sort(self.get_ctx().z3_ctx, self.get_z3_ast()) }) } /// Simplify the `Ast`. Returns a new `Ast` which is equivalent, /// but simplified using algebraic simplification rules, such as /// constant propagation. fn simplify(&self) -> Self { Self::new(self.get_ctx(), unsafe { Z3_simplify(self.get_ctx().z3_ctx, self.get_z3_ast()) }) } /// Performs substitution on the `Ast`. The slice `substitutions` contains a /// list of pairs with a "from" `Ast` that will be substituted by a "to" `Ast`. fn substitute<T: Ast<'ctx>>(&self, substitutions: &[(&T, &T)]) -> Self { Self::new(self.get_ctx(), unsafe { let guard = Z3_MUTEX.lock().unwrap(); let this_ast = self.get_z3_ast(); let num_exprs = substitutions.len() as ::std::os::raw::c_uint; let mut froms: Vec<_> = vec![]; let mut tos: Vec<_> = vec![]; for (from_ast, to_ast) in substitutions { froms.push(from_ast.get_z3_ast()); tos.push(to_ast.get_z3_ast()); } Z3_substitute( self.get_ctx().z3_ctx, this_ast, num_exprs, froms.as_ptr(), tos.as_ptr(), ) }) } } macro_rules! impl_ast { ($ast:ident) => { impl<'ctx> Ast<'ctx> for $ast<'ctx> { fn new(ctx: &'ctx Context, ast: Z3_ast) -> Self { assert!(!ast.is_null()); Self { ctx, z3_ast: unsafe { debug!("new ast {:p}", ast); let guard = Z3_MUTEX.lock().unwrap(); Z3_inc_ref(ctx.z3_ctx, ast); ast }, } } fn get_ctx(&self) -> &'ctx Context { self.ctx } fn get_z3_ast(&self) -> Z3_ast { self.z3_ast } } impl<'ctx> From<$ast<'ctx>> for Z3_ast { fn from(ast: $ast<'ctx>) -> Self { ast.z3_ast } } impl<'ctx> PartialEq for $ast<'ctx> { fn eq(&self, other: &$ast<'ctx>) -> bool { assert_eq!(self.ctx, other.ctx); unsafe { Z3_is_eq_ast(self.ctx.z3_ctx, self.z3_ast, other.z3_ast) } } } impl<'ctx> Eq for $ast<'ctx> {} impl<'ctx> Clone for $ast<'ctx> { fn clone(&self) -> Self { debug!("clone ast {:p}", self.z3_ast); Self::new(self.ctx, self.z3_ast) } } impl<'ctx> Drop for $ast<'ctx> { fn drop(&mut self) { debug!("drop ast {:p}", self.z3_ast); unsafe { Z3_dec_ref(self.ctx.z3_ctx, self.z3_ast); } } } impl<'ctx> Hash for $ast<'ctx> { fn hash<H: Hasher>(&self, state: &mut H) { unsafe { let u = Z3_get_ast_hash(self.ctx.z3_ctx, self.z3_ast); u.hash(state); } } } impl<'ctx> fmt::Debug for $ast<'ctx> { fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { let p = unsafe { Z3_ast_to_string(self.ctx.z3_ctx, self.z3_ast) }; if p.is_null() { return Result::Err(fmt::Error); } match unsafe { CStr::from_ptr(p) }.to_str() { Ok(s) => write!(f, "{}", s), Err(_) => Result::Err(fmt::Error), } } } impl<'ctx> fmt::Display for $ast<'ctx> { fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { <Self as fmt::Debug>::fmt(self, f) } } }; } macro_rules! impl_from_try_into_dynamic { ($ast:ident, $as_ast:ident) => { impl<'ctx> From<$ast<'ctx>> for Dynamic<'ctx> { fn from(ast: $ast<'ctx>) -> Self { Dynamic::new(ast.ctx, ast.z3_ast) } } impl<'ctx> TryFrom<Dynamic<'ctx>> for $ast<'ctx> { type Error = String; fn try_from(ast: Dynamic<'ctx>) -> Result<Self, String> { ast.$as_ast() .ok_or_else(|| format!("Dynamic is not of requested type: {:?}", ast)) } } }; } impl_ast!(Bool); impl_from_try_into_dynamic!(Bool, as_bool); impl_ast!(Int); impl_from_try_into_dynamic!(Int, as_int); impl_ast!(Real); impl_from_try_into_dynamic!(Real, as_real); impl_ast!(BV); impl_from_try_into_dynamic!(BV, as_bv); impl_ast!(Array); impl_from_try_into_dynamic!(Array, as_array); impl_ast!(Set); // Dynamic::as_set does not exist, so just implement one direction here impl<'ctx> From<Set<'ctx>> for Dynamic<'ctx> { fn from(ast: Set<'ctx>) -> Self { Dynamic::new(ast.ctx, ast.z3_ast) } } impl<'ctx> Int<'ctx> { #[cfg(feature = "arbitrary-size-numeral")] pub fn from_big_int(ctx: &'ctx Context, value: &BigInt) -> Int<'ctx> { Int::from_str(ctx, &value.to_str_radix(10)).unwrap() } pub fn from_str(ctx: &'ctx Context, value: &str) -> Option<Int<'ctx>> { let sort = Sort::int(ctx); let ast = unsafe { let guard = Z3_MUTEX.lock().unwrap(); let int_cstring = CString::new(value).unwrap(); let numeral_ptr = Z3_mk_numeral(ctx.z3_ctx, int_cstring.as_ptr(), sort.z3_sort); if numeral_ptr.is_null() { return None; } numeral_ptr }; Some(Int::new(ctx, ast)) } } impl<'ctx> Real<'ctx> { #[cfg(feature = "arbitrary-size-numeral")] pub fn from_big_rational(ctx: &'ctx Context, value: &BigRational) -> Real<'ctx> { let num = value.numer(); let den = value.denom(); Real::from_real_str(ctx, &num.to_str_radix(10), &den.to_str_radix(10)).unwrap() } pub fn from_real_str(ctx: &'ctx Context, num: &str, den: &str) -> Option<Real<'ctx>> { let sort = Sort::real(ctx); let ast = unsafe { let guard = Z3_MUTEX.lock().unwrap(); let fraction_cstring = CString::new(format!("{:} / {:}", num, den)).unwrap(); let numeral_ptr = Z3_mk_numeral(ctx.z3_ctx, fraction_cstring.as_ptr(), sort.z3_sort); if numeral_ptr.is_null() { return None; } numeral_ptr }; Some(Real::new(ctx, ast)) } } impl_ast!(Datatype); impl_from_try_into_dynamic!(Datatype, as_datatype); impl_ast!(Dynamic); impl<'ctx> Bool<'ctx> { pub fn new_const<S: Into<Symbol>>(ctx: &'ctx Context, name: S) -> Bool<'ctx> { let sort = Sort::bool(ctx); Self::new(ctx, unsafe { let guard = Z3_MUTEX.lock().unwrap(); Z3_mk_const(ctx.z3_ctx, name.into().as_z3_symbol(ctx), sort.z3_sort) }) } pub fn fresh_const(ctx: &'ctx Context, prefix: &str) -> Bool<'ctx> { let sort = Sort::bool(ctx); Self::new(ctx, unsafe { let pp = CString::new(prefix).unwrap(); let p = pp.as_ptr(); let guard = Z3_MUTEX.lock().unwrap(); Z3_mk_fresh_const(ctx.z3_ctx, p, sort.z3_sort) }) } pub fn from_bool(ctx: &'ctx Context, b: bool) -> Bool<'ctx> { Self::new(ctx, unsafe { let guard = Z3_MUTEX.lock().unwrap(); if b { Z3_mk_true(ctx.z3_ctx) } else { Z3_mk_false(ctx.z3_ctx) } }) } pub fn as_bool(&self) -> Option<bool> { unsafe { let guard = Z3_MUTEX.lock().unwrap(); match Z3_get_bool_value(self.ctx.z3_ctx, self.z3_ast) { Z3_L_TRUE => Some(true), Z3_L_FALSE => Some(false), _ => None, } } } // TODO: this should be on the Ast trait, but I don't know how to return Self<'dest_ctx>. // When I try, it gives the error E0109 "lifetime arguments are not allowed for this type". pub fn translate<'dest_ctx>(&self, dest: &'dest_ctx Context) -> Bool<'dest_ctx> { Bool::new(dest, unsafe { let guard = Z3_MUTEX.lock().unwrap(); Z3_translate(self.ctx.z3_ctx, self.z3_ast, dest.z3_ctx) }) } // This doesn't quite fit the trinop! macro because of the generic argty pub fn ite<T>(&self, a: &T, b: &T) -> T where T: Ast<'ctx>, { T::new(self.ctx, unsafe { let guard = Z3_MUTEX.lock().unwrap(); Z3_mk_ite(self.ctx.z3_ctx, self.z3_ast, a.get_z3_ast(), b.get_z3_ast()) }) } varop!(and, Z3_mk_and, Self); varop!(or, Z3_mk_or, Self); binop!(xor, Z3_mk_xor, Self); unop!(not, Z3_mk_not, Self); binop!(iff, Z3_mk_iff, Self); binop!(implies, Z3_mk_implies, Self); pub fn pb_le(&self, other: &[&Bool<'ctx>], coeffs: Vec<i32>, k: i32) -> Bool<'ctx> { Bool::new(self.ctx, unsafe { let guard = Z3_MUTEX.lock().unwrap(); let mut tmp = vec![self.z3_ast]; for a in other { tmp.push(a.z3_ast) } assert!(tmp.len() <= 0xffffffff); let mut tmp_coeffs = coeffs.clone(); Z3_mk_pble( self.ctx.z3_ctx, tmp.len() as u32, tmp.as_ptr(), tmp_coeffs.as_mut_ptr(), k, ) }) } pub fn pb_ge(&self, other: &[&Bool<'ctx>], coeffs: Vec<i32>, k: i32) -> Bool<'ctx> { Bool::new(self.ctx, unsafe { let guard = Z3_MUTEX.lock().unwrap(); let mut tmp = vec![self.z3_ast]; for a in other { tmp.push(a.z3_ast) } assert!(tmp.len() <= 0xffffffff); let mut tmp_coeffs = coeffs.clone(); Z3_mk_pbge( self.ctx.z3_ctx, tmp.len() as u32, tmp.as_ptr(), tmp_coeffs.as_mut_ptr(), k, ) }) } pub fn pb_eq(&self, other: &[&Bool<'ctx>], coeffs: Vec<i32>, k: i32) -> Bool<'ctx> { Bool::new(self.ctx, unsafe { let guard = Z3_MUTEX.lock().unwrap(); let mut tmp = vec![self.z3_ast]; for a in other { tmp.push(a.z3_ast) } assert!(tmp.len() <= 0xffffffff); let mut tmp_coeffs = coeffs.clone(); Z3_mk_pbeq( self.ctx.z3_ctx, tmp.len() as u32, tmp.as_ptr(), tmp_coeffs.as_mut_ptr(), k, ) }) } } impl<'ctx> Int<'ctx> { pub fn new_const<S: Into<Symbol>>(ctx: &'ctx Context, name: S) -> Int<'ctx> { let sort = Sort::int(ctx); Self::new(ctx, unsafe { let guard = Z3_MUTEX.lock().unwrap(); Z3_mk_const(ctx.z3_ctx, name.into().as_z3_symbol(ctx), sort.z3_sort) }) } pub fn fresh_const(ctx: &'ctx Context, prefix: &str) -> Int<'ctx> { let sort = Sort::int(ctx); Self::new(ctx, unsafe { let pp = CString::new(prefix).unwrap(); let p = pp.as_ptr(); let guard = Z3_MUTEX.lock().unwrap(); Z3_mk_fresh_const(ctx.z3_ctx, p, sort.z3_sort) }) } pub fn from_i64(ctx: &'ctx Context, i: i64) -> Int<'ctx> { let sort = Sort::int(ctx); Self::new(ctx, unsafe { let guard = Z3_MUTEX.lock().unwrap(); Z3_mk_int64(ctx.z3_ctx, i, sort.z3_sort) }) } pub fn from_u64(ctx: &'ctx Context, u: u64) -> Int<'ctx> { let sort = Sort::int(ctx); Self::new(ctx, unsafe { let guard = Z3_MUTEX.lock().unwrap(); Z3_mk_unsigned_int64(ctx.z3_ctx, u, sort.z3_sort) }) } pub fn as_i64(&self) -> Option<i64> { unsafe { let guard = Z3_MUTEX.lock().unwrap(); let mut tmp: ::std::os::raw::c_longlong = 0; if Z3_get_numeral_int64(self.ctx.z3_ctx, self.z3_ast, &mut tmp) { Some(tmp) } else { None } } } pub fn as_u64(&self) -> Option<u64> { unsafe { let guard = Z3_MUTEX.lock().unwrap(); let mut tmp: ::std::os::raw::c_ulonglong = 0; if Z3_get_numeral_uint64(self.ctx.z3_ctx, self.z3_ast, &mut tmp) { Some(tmp) } else { None } } } pub fn from_real(ast: &Real<'ctx>) -> Int<'ctx> { Self::new(ast.ctx, unsafe { let guard = Z3_MUTEX.lock().unwrap(); Z3_mk_real2int(ast.ctx.z3_ctx, ast.z3_ast) }) } /// Create a real from an integer. /// This is just a convenience wrapper around /// [`Real::from_int`](struct.Real.html#method.from_int); see notes there pub fn to_real(&self) -> Real<'ctx> { Real::from_int(self) } /// Create an integer from a bitvector. /// /// Signed and unsigned version. /// /// # Examples /// ``` /// # use z3::{ast, Config, Context, SatResult, Solver}; /// # use z3::ast::Ast; /// # let cfg = Config::new(); /// # let ctx = Context::new(&cfg); /// # let solver = Solver::new(&ctx); /// let bv = ast::BV::new_const(&ctx, "x", 32); /// solver.assert(&bv._eq(&ast::BV::from_i64(&ctx, -3, 32))); /// /// let x = ast::Int::from_bv(&bv, true); /// /// assert_eq!(solver.check(), SatResult::Sat); /// let model = solver.get_model(); /// /// assert_eq!(-3, model.eval(&x).unwrap().as_i64().unwrap()); /// ``` pub fn from_bv(ast: &BV<'ctx>, signed: bool) -> Int<'ctx> { Self::new(ast.ctx, unsafe { let guard = Z3_MUTEX.lock().unwrap(); Z3_mk_bv2int(ast.ctx.z3_ctx, ast.z3_ast, signed) }) } /// Create a bitvector from an integer. /// This is just a convenience wrapper around /// [`BV::from_int`](struct.BV.html#method.from_int); see notes there pub fn to_ast(&self, sz: u32) -> BV<'ctx> { BV::from_int(self, sz) } // TODO: this should be on the Ast trait, but I don't know how to return Self<'dest_ctx>. // When I try, it gives the error E0109 "lifetime arguments are not allowed for this type". pub fn translate<'dest_ctx>(&self, dest: &'dest_ctx Context) -> Int<'dest_ctx> { Int::new(dest, unsafe { let guard = Z3_MUTEX.lock().unwrap(); Z3_translate(self.ctx.z3_ctx, self.z3_ast, dest.z3_ctx) }) } varop!(add, Z3_mk_add, Self); varop!(sub, Z3_mk_sub, Self); varop!(mul, Z3_mk_mul, Self); binop!(div, Z3_mk_div, Self); binop!(rem, Z3_mk_rem, Self); binop!(modulo, Z3_mk_mod, Self); binop!(power, Z3_mk_power, Self); unop!(unary_minus, Z3_mk_unary_minus, Self); binop!(lt, Z3_mk_lt, Bool<'ctx>); binop!(le, Z3_mk_le, Bool<'ctx>); binop!(gt, Z3_mk_gt, Bool<'ctx>); binop!(ge, Z3_mk_ge, Bool<'ctx>); // Z3 does support mixing ints and reals in add(), sub(), mul(), div(), and power() // (but not rem(), modulo(), lt(), le(), gt(), or ge()). // TODO: we could consider expressing this by having a Numeric trait with these methods. // Int and Real would have the Numeric trait, but not the other Asts. // For example: // fn add(&self, other: &impl Numeric<'ctx>) -> Dynamic<'ctx> { ... } // Note the return type would have to be Dynamic I think (?), as the exact result type // depends on the particular types of the inputs. // Alternately, we could just have // Int::add_real(&self, other: &Real<'ctx>) -> Real<'ctx> // and // Real::add_int(&self, other: &Int<'ctx>) -> Real<'ctx> // This might be cleaner because we know exactly what the output type will be for these methods. } impl<'ctx> Real<'ctx> { pub fn new_const<S: Into<Symbol>>(ctx: &'ctx Context, name: S) -> Real<'ctx> { let sort = Sort::real(ctx); Self::new(ctx, unsafe { let guard = Z3_MUTEX.lock().unwrap(); Z3_mk_const(ctx.z3_ctx, name.into().as_z3_symbol(ctx), sort.z3_sort) }) } pub fn fresh_const(ctx: &'ctx Context, prefix: &str) -> Real<'ctx> { let sort = Sort::real(ctx); Self::new(ctx, unsafe { let pp = CString::new(prefix).unwrap(); let p = pp.as_ptr(); let guard = Z3_MUTEX.lock().unwrap(); Z3_mk_fresh_const(ctx.z3_ctx, p, sort.z3_sort) }) } pub fn from_real(ctx: &'ctx Context, num: i32, den: i32) -> Real<'ctx> { Self::new(ctx, unsafe { let guard = Z3_MUTEX.lock().unwrap(); Z3_mk_real( ctx.z3_ctx, num as ::std::os::raw::c_int, den as ::std::os::raw::c_int, ) }) } pub fn as_real(&self) -> Option<(i64, i64)> { unsafe { let guard = Z3_MUTEX.lock().unwrap(); let mut num: i64 = 0; let mut den: i64 = 0; if Z3_get_numeral_small(self.ctx.z3_ctx, self.z3_ast, &mut num, &mut den) { Some((num, den)) } else { None } } } pub fn from_int(ast: &Int<'ctx>) -> Real<'ctx> { Self::new(ast.ctx, unsafe { let guard = Z3_MUTEX.lock().unwrap(); Z3_mk_int2real(ast.ctx.z3_ctx, ast.z3_ast) }) } /// Create an integer from a real. /// This is just a convenience wrapper around /// [`Int::from_real`](struct.Int.html#method.from_real); see notes there pub fn to_int(&self) -> Int<'ctx> { Int::from_real(self) } unop!(is_int, Z3_mk_is_int, Bool<'ctx>); // TODO: this should be on the Ast trait, but I don't know how to return Self<'dest_ctx>. // When I try, it gives the error E0109 "lifetime arguments are not allowed for this type". pub fn translate<'dest_ctx>(&self, dest: &'dest_ctx Context) -> Real<'dest_ctx> { Real::new(dest, unsafe { let guard = Z3_MUTEX.lock().unwrap(); Z3_translate(self.ctx.z3_ctx, self.z3_ast, dest.z3_ctx) }) } varop!(add, Z3_mk_add, Self); varop!(sub, Z3_mk_sub, Self); varop!(mul, Z3_mk_mul, Self); binop!(div, Z3_mk_div, Self); binop!(power, Z3_mk_power, Self); unop!(unary_minus, Z3_mk_unary_minus, Self); binop!(lt, Z3_mk_lt, Bool<'ctx>); binop!(le, Z3_mk_le, Bool<'ctx>); binop!(gt, Z3_mk_gt, Bool<'ctx>); binop!(ge, Z3_mk_ge, Bool<'ctx>); } macro_rules! bv_overflow_check_signed { ( $f:ident, $z3fn:ident) => { pub fn $f(&self, other: &BV<'ctx>, b: bool) -> Bool<'ctx> { Ast::new(self.ctx, unsafe { $z3fn(self.ctx.z3_ctx, self.z3_ast, other.z3_ast, b) }) } }; } impl<'ctx> BV<'ctx> { pub fn new_const<S: Into<Symbol>>(ctx: &'ctx Context, name: S, sz: u32) -> BV<'ctx> { let sort = Sort::bitvector(ctx, sz); Self::new(ctx, unsafe { let guard = Z3_MUTEX.lock().unwrap(); Z3_mk_const(ctx.z3_ctx, name.into().as_z3_symbol(ctx), sort.z3_sort) }) } pub fn fresh_const(ctx: &'ctx Context, prefix: &str, sz: u32) -> BV<'ctx> { let sort = Sort::bitvector(ctx, sz); Self::new(ctx, unsafe { let pp = CString::new(prefix).unwrap(); let p = pp.as_ptr(); let guard = Z3_MUTEX.lock().unwrap(); Z3_mk_fresh_const(ctx.z3_ctx, p, sort.z3_sort) }) } pub fn from_i64(ctx: &'ctx Context, i: i64, sz: u32) -> BV<'ctx> { let sort = Sort::bitvector(ctx, sz); Self::new(ctx, unsafe { let guard = Z3_MUTEX.lock().unwrap(); Z3_mk_int64(ctx.z3_ctx, i, sort.z3_sort) }) } pub fn from_u64(ctx: &'ctx Context, u: u64, sz: u32) -> BV<'ctx> { let sort = Sort::bitvector(ctx, sz); Self::new(ctx, unsafe { let guard = Z3_MUTEX.lock().unwrap(); Z3_mk_unsigned_int64(ctx.z3_ctx, u, sort.z3_sort) }) } pub fn as_i64(&self) -> Option<i64> { unsafe { let guard = Z3_MUTEX.lock().unwrap(); let mut tmp: ::std::os::raw::c_longlong = 0; if Z3_get_numeral_int64(self.ctx.z3_ctx, self.z3_ast, &mut tmp) { Some(tmp) } else { None } } } pub fn as_u64(&self) -> Option<u64> { unsafe { let guard = Z3_MUTEX.lock().unwrap(); let mut tmp: ::std::os::raw::c_ulonglong = 0; if Z3_get_numeral_uint64(self.ctx.z3_ctx, self.z3_ast, &mut tmp) { Some(tmp) } else { None } } } /// Create a bit vector from an integer. /// /// The bit vector will have width `sz`. /// /// # Examples /// ``` /// # use z3::{ast, Config, Context, SatResult, Solver}; /// # use z3::ast::Ast; /// # let cfg = Config::new(); /// # let ctx = Context::new(&cfg); /// # let solver = Solver::new(&ctx); /// let i = ast::Int::new_const(&ctx, "x"); /// solver.assert(&i._eq(&ast::Int::from_i64(&ctx, -3))); /// /// let x = ast::BV::from_int(&i, 64); /// assert_eq!(64, x.get_size()); /// /// assert_eq!(solver.check(), SatResult::Sat); /// let model = solver.get_model(); /// /// assert_eq!(-3, model.eval(&x.to_int(true)).unwrap().as_i64().expect("as_i64() shouldn't fail")); /// ``` pub fn from_int(ast: &Int<'ctx>, sz: u32) -> BV<'ctx> { Self::new(ast.ctx, unsafe { let guard = Z3_MUTEX.lock().unwrap(); Z3_mk_int2bv(ast.ctx.z3_ctx, sz, ast.z3_ast) }) } /// Create an integer from a bitvector. /// This is just a convenience wrapper around /// [`Int::from_bv`](struct.Int.html#method.from_bv); see notes there pub fn to_int(&self, signed: bool) -> Int<'ctx> { Int::from_bv(self, signed) } /// Get the size of the bitvector (in bits) pub fn get_size(&self) -> u32 { let sort = self.get_sort(); unsafe { Z3_get_bv_sort_size(self.ctx.z3_ctx, sort.z3_sort) } } // TODO: this should be on the Ast trait, but I don't know how to return Self<'dest_ctx>. // When I try, it gives the error E0109 "lifetime arguments are not allowed for this type". pub fn translate<'dest_ctx>(&self, dest: &'dest_ctx Context) -> BV<'dest_ctx> { BV::new(dest, unsafe { let guard = Z3_MUTEX.lock().unwrap(); Z3_translate(self.ctx.z3_ctx, self.z3_ast, dest.z3_ctx) }) } // Bitwise ops /// Bitwise negation unop!(bvnot, Z3_mk_bvnot, Self); /// Two's complement negation unop!(bvneg, Z3_mk_bvneg, Self); /// Bitwise and binop!(bvand, Z3_mk_bvand, Self); /// Bitwise or binop!(bvor, Z3_mk_bvor, Self); /// Bitwise exclusive-or binop!(bvxor, Z3_mk_bvxor, Self); /// Bitwise nand binop!(bvnand, Z3_mk_bvnand, Self); /// Bitwise nor binop!(bvnor, Z3_mk_bvnor, Self); /// Bitwise xnor binop!(bvxnor, Z3_mk_bvxnor, Self); /// Conjunction of all the bits in the vector. Returns a BV with size (bitwidth) 1. unop!(bvredand, Z3_mk_bvredand, Self); /// Disjunction of all the bits in the vector. Returns a BV with size (bitwidth) 1. unop!(bvredor, Z3_mk_bvredor, Self); // Arithmetic ops /// Addition binop!(bvadd, Z3_mk_bvadd, Self); /// Subtraction binop!(bvsub, Z3_mk_bvsub, Self); /// Multiplication binop!(bvmul, Z3_mk_bvmul, Self); /// Unsigned division binop!(bvudiv, Z3_mk_bvudiv, Self); /// Signed division binop!(bvsdiv, Z3_mk_bvsdiv, Self); /// Unsigned remainder binop!(bvurem, Z3_mk_bvurem, Self); /// Signed remainder (sign follows dividend) binop!(bvsrem, Z3_mk_bvsrem, Self); /// Signed remainder (sign follows divisor) binop!(bvsmod, Z3_mk_bvsmod, Self); // Comparison ops /// Unsigned less than binop!(bvult, Z3_mk_bvult, Bool<'ctx>); /// Signed less than binop!(bvslt, Z3_mk_bvslt, Bool<'ctx>); /// Unsigned less than or equal binop!(bvule, Z3_mk_bvule, Bool<'ctx>); /// Signed less than or equal binop!(bvsle, Z3_mk_bvsle, Bool<'ctx>); /// Unsigned greater or equal binop!(bvuge, Z3_mk_bvuge, Bool<'ctx>); /// Signed greater or equal binop!(bvsge, Z3_mk_bvsge, Bool<'ctx>); /// Unsigned greater than binop!(bvugt, Z3_mk_bvugt, Bool<'ctx>); /// Signed greater than binop!(bvsgt, Z3_mk_bvsgt, Bool<'ctx>); // Shift ops /// Shift left binop!(bvshl, Z3_mk_bvshl, Self); /// Logical shift right (add zeroes in the high bits) binop!(bvlshr, Z3_mk_bvlshr, Self); /// Arithmetic shift right (sign-extend in the high bits) binop!(bvashr, Z3_mk_bvashr, Self); /// Rotate left binop!(bvrotl, Z3_mk_ext_rotate_left, Self); /// Rotate right binop!(bvrotr, Z3_mk_ext_rotate_left, Self); /// Concatenate two bitvectors binop!(concat, Z3_mk_concat, Self); // overflow checks /// Check if addition overflows bv_overflow_check_signed!(bvadd_no_overflow, Z3_mk_bvadd_no_overflow); /// Check if addition underflows binop!(bvadd_no_underflow, Z3_mk_bvadd_no_underflow, Bool<'ctx>); /// Check if subtraction overflows binop!(bvsub_no_overflow, Z3_mk_bvsub_no_overflow, Bool<'ctx>); /// Check if subtraction underflows bv_overflow_check_signed!(bvsub_no_underflow, Z3_mk_bvsub_no_underflow); /// Check if signed division overflows binop!(bvsdiv_no_overflow, Z3_mk_bvsdiv_no_overflow, Bool<'ctx>); /// Check if negation overflows unop!(bvneg_no_overflow, Z3_mk_bvneg_no_overflow, Bool<'ctx>); /// Check if multiplication overflows bv_overflow_check_signed!(bvmul_no_overflow, Z3_mk_bvmul_no_overflow); /// Check if multiplication underflows binop!(bvmul_no_underflow, Z3_mk_bvmul_no_underflow, Bool<'ctx>); /// Extract the bits `high` down to `low` from the bitvector. /// Returns a bitvector of size `n`, where `n = high - low + 1`. pub fn extract(&self, high: u32, low: u32) -> Self { Self::new(self.ctx, unsafe { let guard = Z3_MUTEX.lock().unwrap(); Z3_mk_extract(self.ctx.z3_ctx, high, low, self.z3_ast) }) } /// Sign-extend the bitvector to size `m+i`, where `m` is the original size of the bitvector. /// That is, `i` bits will be added. pub fn sign_ext(&self, i: u32) -> Self { Self::new(self.ctx, unsafe { let guard = Z3_MUTEX.lock().unwrap(); Z3_mk_sign_ext(self.ctx.z3_ctx, i, self.z3_ast) }) } /// Zero-extend the bitvector to size `m+i`, where `m` is the original size of the bitvector. /// That is, `i` bits will be added. pub fn zero_ext(&self, i: u32) -> Self { Self::new(self.ctx, unsafe { let guard = Z3_MUTEX.lock().unwrap(); Z3_mk_zero_ext(self.ctx.z3_ctx, i, self.z3_ast) }) } } impl<'ctx> Array<'ctx> { /// Create an `Array` which maps from indices of the `domain` `Sort` to /// values of the `range` `Sort`. /// /// All values in the `Array` will be unconstrained. pub fn new_const<S: Into<Symbol>>( ctx: &'ctx Context, name: S, domain: &Sort<'ctx>, range: &Sort<'ctx>, ) -> Array<'ctx> { let sort = Sort::array(ctx, domain, range); Self::new(ctx, unsafe { let guard = Z3_MUTEX.lock().unwrap(); Z3_mk_const(ctx.z3_ctx, name.into().as_z3_symbol(ctx), sort.z3_sort) }) } pub fn fresh_const( ctx: &'ctx Context, prefix: &str, domain: &Sort<'ctx>, range: &Sort<'ctx>, ) -> Array<'ctx> { let sort = Sort::array(ctx, domain, range); Self::new(ctx, unsafe { let pp = CString::new(prefix).unwrap(); let p = pp.as_ptr(); let guard = Z3_MUTEX.lock().unwrap(); Z3_mk_fresh_const(ctx.z3_ctx, p, sort.z3_sort) }) } /// Create a "constant array", that is, an `Array` initialized so that all of the /// indices in the `domain` map to the given value `val` pub fn const_array( ctx: &'ctx Context, domain: &Sort<'ctx>, val: &Dynamic<'ctx>, ) -> Array<'ctx> { Self::new(ctx, unsafe { let guard = Z3_MUTEX.lock().unwrap(); Z3_mk_const_array(ctx.z3_ctx, domain.z3_sort, val.z3_ast) }) } // TODO: this should be on the Ast trait, but I don't know how to return Self<'dest_ctx>. // When I try, it gives the error E0109 "lifetime arguments are not allowed for this type". pub fn translate<'dest_ctx>(&self, dest: &'dest_ctx Context) -> Array<'dest_ctx> { Array::new(dest, unsafe { let guard = Z3_MUTEX.lock().unwrap(); Z3_translate(self.ctx.z3_ctx, self.z3_ast, dest.z3_ctx) }) } /// Get the value at a given index in the array. /// /// Note that the `index` _must be_ of the array's `domain` sort. /// The return type will be of the array's `range` sort. // // We avoid the binop! macro because the argument has a non-Self type pub fn select(&self, index: &Dynamic<'ctx>) -> Dynamic<'ctx> { // TODO: We could validate here that the index is of the correct type. // This would require us either to keep around the original `domain` argument // from when the Array was constructed, or to do an additional Z3 query // to find the domain sort first. // But if we did this check ourselves, we'd just panic, so it doesn't seem // like a huge advantage over just letting Z3 panic itself when it discovers the // problem. // This way we also avoid the redundant check every time this method is called. Dynamic::new(self.ctx, unsafe { let guard = Z3_MUTEX.lock().unwrap(); Z3_mk_select(self.ctx.z3_ctx, self.z3_ast, index.get_z3_ast()) }) } /// Update the value at a given index in the array. /// /// Note that the `index` _must be_ of the array's `domain` sort, /// and the `value` _must be_ of the array's `range` sort. // // We avoid the trinop! macro because the arguments have non-Self types pub fn store(&self, index: &Dynamic<'ctx>, value: &Dynamic<'ctx>) -> Self { Self::new(self.ctx, unsafe { let guard = Z3_MUTEX.lock().unwrap(); Z3_mk_store( self.ctx.z3_ctx, self.z3_ast, index.get_z3_ast(), value.get_z3_ast(), ) }) } } impl<'ctx> Set<'ctx> { pub fn new_const<S: Into<Symbol>>( ctx: &'ctx Context, name: S, eltype: &Sort<'ctx>, ) -> Set<'ctx> { let sort = Sort::set(ctx, eltype); Self::new(ctx, unsafe { let guard = Z3_MUTEX.lock().unwrap(); Z3_mk_const(ctx.z3_ctx, name.into().as_z3_symbol(ctx), sort.z3_sort) }) } pub fn fresh_const(ctx: &'ctx Context, prefix: &str, eltype: &Sort<'ctx>) -> Set<'ctx> { let sort = Sort::set(ctx, eltype); Self::new(ctx, unsafe { let pp = CString::new(prefix).unwrap(); let p = pp.as_ptr(); let guard = Z3_MUTEX.lock().unwrap(); Z3_mk_fresh_const(ctx.z3_ctx, p, sort.z3_sort) }) } // TODO: this should be on the Ast trait, but I don't know how to return Self<'dest_ctx>. // When I try, it gives the error E0109 "lifetime arguments are not allowed for this type". pub fn translate<'dest_ctx>(&self, dest: &'dest_ctx Context) -> Set<'dest_ctx> { Set::new(dest, unsafe { let guard = Z3_MUTEX.lock().unwrap(); Z3_translate(self.ctx.z3_ctx, self.z3_ast, dest.z3_ctx) }) } /// Add an element to the set. /// /// Note that the `element` _must be_ of the `Set`'s `eltype` sort. // // We avoid the binop! macro because the argument has a non-Self type pub fn add(&self, element: &Dynamic<'ctx>) -> Set<'ctx> { Set::new(self.ctx, unsafe { let guard = Z3_MUTEX.lock().unwrap(); Z3_mk_set_add(self.ctx.z3_ctx, self.z3_ast, element.get_z3_ast()) }) } /// Remove an element from the set. /// /// Note that the `element` _must be_ of the `Set`'s `eltype` sort. // // We avoid the binop! macro because the argument has a non-Self type pub fn del(&self, element: &Dynamic<'ctx>) -> Set<'ctx> { Set::new(self.ctx, unsafe { let guard = Z3_MUTEX.lock().unwrap(); Z3_mk_set_add(self.ctx.z3_ctx, self.z3_ast, element.get_z3_ast()) }) } /// Check if an item is a member of the set. /// /// Note that the `element` _must be_ of the `Set`'s `eltype` sort. // // We avoid the binop! macro because the argument has a non-Self type pub fn member(&self, element: &Dynamic<'ctx>) -> Bool<'ctx> { Bool::new(self.ctx, unsafe { let guard = Z3_MUTEX.lock().unwrap(); Z3_mk_set_add(self.ctx.z3_ctx, self.z3_ast, element.get_z3_ast()) }) } /// Take the intersection of a list of sets. varop!(intersect, Z3_mk_set_intersect, Self); /// Take the union of a list of sets. varop!(set_union, Z3_mk_set_union, Self); /// Check if the set is a subset of another set. binop!(set_subset, Z3_mk_set_subset, Bool<'ctx>); /// Take the complement of the set. unop!(complement, Z3_mk_set_complement, Self); /// Take the set difference between two sets. binop!(difference, Z3_mk_set_difference, Self); } impl<'ctx> Dynamic<'ctx> { pub fn from_ast(ast: &impl Ast<'ctx>) -> Self { Self::new(ast.get_ctx(), ast.get_z3_ast()) } fn sort_kind(&self) -> SortKind { unsafe { Z3_get_sort_kind(self.ctx.z3_ctx, Z3_get_sort(self.ctx.z3_ctx, self.z3_ast)) } } /// Returns `None` if the `Dynamic` is not actually a `Bool` pub fn as_bool(&self) -> Option<Bool<'ctx>> { match self.sort_kind() { SortKind::Bool => Some(Bool::new(self.ctx, self.z3_ast)), _ => None, } } /// Returns `None` if the `Dynamic` is not actually an `Int` pub fn as_int(&self) -> Option<Int<'ctx>> { match self.sort_kind() { SortKind::Int => Some(Int::new(self.ctx, self.z3_ast)), _ => None, } } /// Returns `None` if the `Dynamic` is not actually a `Real` pub fn as_real(&self) -> Option<Real<'ctx>> { match self.sort_kind() { SortKind::Real => Some(Real::new(self.ctx, self.z3_ast)), _ => None, } } /// Returns `None` if the `Dynamic` is not actually a `BV` pub fn as_bv(&self) -> Option<BV<'ctx>> { match self.sort_kind() { SortKind::BV => Some(BV::new(self.ctx, self.z3_ast)), _ => None, } } /// Returns `None` if the `Dynamic` is not actually an `Array` pub fn as_array(&self) -> Option<Array<'ctx>> { match self.sort_kind() { SortKind::Array => Some(Array::new(self.ctx, self.z3_ast)), _ => None, } } pub fn as_datatype(&self) -> Option<Datatype<'ctx>> { match self.sort_kind() { SortKind::Datatype => Some(Datatype::new(self.ctx, self.z3_ast)), _ => None, } } // TODO as_set. SortKind::Set does not exist } impl<'ctx> Datatype<'ctx> { pub fn new_const<S: Into<Symbol>>(ctx: &'ctx Context, name: S, sort: &Sort<'ctx>) -> Self { assert_eq!(ctx, sort.ctx); assert_eq!(sort.kind(), SortKind::Datatype); Self::new(ctx, unsafe { Z3_mk_const(ctx.z3_ctx, name.into().as_z3_symbol(ctx), sort.z3_sort) }) } pub fn fresh_const(ctx: &'ctx Context, prefix: &str, sort: &Sort<'ctx>) -> Self { assert_eq!(ctx, sort.ctx); assert_eq!(sort.kind(), SortKind::Datatype); Self::new(ctx, unsafe { let pp = CString::new(prefix).unwrap(); let p = pp.as_ptr(); Z3_mk_fresh_const(ctx.z3_ctx, p, sort.z3_sort) }) } // TODO: this should be on the Ast trait, but I don't know how to return Self<'dest_ctx>. // When I try, it gives the error E0109 "lifetime arguments are not allowed for this type". pub fn translate<'dest_ctx>(&self, dest: &'dest_ctx Context) -> Datatype<'dest_ctx> { Datatype::new(dest, unsafe { Z3_translate(self.ctx.z3_ctx, self.z3_ast, dest.z3_ctx) }) } } /// Create a universal quantifier. /// /// # Examples /// ``` /// # use z3::{ast, Config, Context, FuncDecl, Pattern, SatResult, Solver, Sort, Symbol}; /// # use z3::ast::Ast; /// # use std::convert::TryInto; /// # let cfg = Config::new(); /// # let ctx = Context::new(&cfg); /// # let solver = Solver::new(&ctx); /// let f = FuncDecl::new(&ctx, "f", &[&Sort::int(&ctx)], &Sort::int(&ctx)); /// /// let x = ast::Int::new_const(&ctx, "x"); /// let f_x: ast::Int = f.apply(&[&x.clone().into()]).try_into().unwrap(); /// let f_x_pattern: Pattern = Pattern::new(&ctx, &[ &f_x.clone().into() ]); /// let forall: ast::Bool = ast::forall_const( /// &ctx, /// &[&x.clone().into()], /// &[&f_x_pattern], /// &x._eq(&f_x).into() /// ).try_into().unwrap(); /// solver.assert(&forall); /// /// assert_eq!(solver.check(), SatResult::Sat); /// let model = solver.get_model(); /// /// let f_f_3: ast::Int = f.apply(&[&f.apply(&[&ast::Int::from_u64(&ctx, 3).into()])]).try_into().unwrap(); /// assert_eq!(3, model.eval(&f_f_3).unwrap().as_u64().unwrap()); /// ``` pub fn forall_const<'ctx>( ctx: &'ctx Context, bounds: &[&Dynamic<'ctx>], patterns: &[&Pattern<'ctx>], body: &Dynamic<'ctx>, ) -> Dynamic<'ctx> { assert!(bounds.iter().all(|a| a.get_ctx() == ctx)); assert!(patterns.iter().all(|p| p.ctx == ctx)); assert_eq!(ctx, body.get_ctx()); if bounds.is_empty() { return body.clone(); } let bounds: Vec<_> = bounds.iter().map(|a| a.get_z3_ast()).collect(); let patterns: Vec<_> = patterns.iter().map(|p| p.z3_pattern).collect(); Ast::new(ctx, unsafe { Z3_mk_forall_const( ctx.z3_ctx, 0, bounds.len().try_into().unwrap(), bounds.as_ptr() as *const Z3_app, patterns.len().try_into().unwrap(), patterns.as_ptr() as *const Z3_pattern, body.get_z3_ast(), ) }) } /// Create an existential quantifier. /// /// # Examples /// ``` /// # use z3::{ast, Config, Context, FuncDecl, SatResult, Solver, Sort, Symbol, Pattern}; /// # use z3::ast::Ast; /// # use std::convert::TryInto; /// # let cfg = Config::new(); /// # let ctx = Context::new(&cfg); /// # let solver = Solver::new(&ctx); /// let f = FuncDecl::new(&ctx, "f", &[&Sort::int(&ctx)], &Sort::int(&ctx)); /// /// let x = ast::Int::new_const(&ctx, "x"); /// let f_x: ast::Int = f.apply(&[&x.clone().into()]).try_into().unwrap(); /// let f_x_pattern: Pattern = Pattern::new(&ctx, &[ &f_x.clone().into() ]); /// let exists: ast::Bool = ast::exists_const( /// &ctx, /// &[&x.clone().into()], /// &[&f_x_pattern], /// &x._eq(&f_x).not().into() /// ).try_into().unwrap(); /// solver.assert(&exists.not()); /// /// assert_eq!(solver.check(), SatResult::Sat); /// let model = solver.get_model(); /// /// let f_f_3: ast::Int = f.apply(&[&f.apply(&[&ast::Int::from_u64(&ctx, 3).into()])]).try_into().unwrap(); /// assert_eq!(3, model.eval(&f_f_3).unwrap().as_u64().unwrap()); /// ``` pub fn exists_const<'ctx>( ctx: &'ctx Context, bounds: &[&Dynamic<'ctx>], patterns: &[&Pattern<'ctx>], body: &Dynamic<'ctx>, ) -> Dynamic<'ctx> { assert!(bounds.iter().all(|a| a.get_ctx() == ctx)); assert!(patterns.iter().all(|p| p.ctx == ctx)); assert_eq!(ctx, body.get_ctx()); let bounds: Vec<_> = bounds.iter().map(|a| a.get_z3_ast()).collect(); let patterns: Vec<_> = patterns.iter().map(|p| p.z3_pattern).collect(); Ast::new(ctx, unsafe { Z3_mk_exists_const( ctx.z3_ctx, 0, bounds.len().try_into().unwrap(), bounds.as_ptr() as *const Z3_app, patterns.len().try_into().unwrap(), patterns.as_ptr() as *const Z3_pattern, body.get_z3_ast(), ) }) }
use std::fs::File; use std::io::BufReader; use std::io::prelude::*; fn main() -> std::io::Result<()> { let mut input = vec![]; BufReader::new(File::open("src/bin/day05.txt")?).read_to_end(&mut input)?; assert_eq!(input.len() % 2, 0); let mut min_len = std::usize::MAX; for polymer in 65..91 { let mut units = input.clone(); let mut new_units = vec![]; for i in units { if i == polymer || i == polymer + 32u8 { continue; } new_units.push(i); } units = new_units; assert_eq!(units.len() % 2, 0); let mut stack = vec![]; for c in units { if let Some(top) = stack.last() { if (*top as i32 - c as i32).abs() == 32 { stack.pop(); continue; } } stack.push(c); } println!("Reaction Without {}{}, length: {}", polymer as char, (polymer + 32u8) as char, stack.len()); if stack.len() < min_len { min_len = stack.len(); } } assert_eq!(min_len, 6946); Ok(()) }
use sdl2::keyboard::Keycode; use std::collections::HashSet; use std::mem::swap; pub use super::*; #[derive(Debug, Default)] pub struct UpgradeControlSystem { prev_keys: HashSet<Keycode>, new_keys: HashSet<Keycode>, } impl<'a> System<'a> for UpgradeControlSystem { type SystemData = ( Write<'a, Vec<Keycode>>, Write<'a, UI>, Write<'a, SpawnedUpgrades>, WriteExpect<'a, UIState>, Read<'a, AvaliableUpgrades>, WriteExpect<'a, Vec<UpgradeType>>, Write<'a, AppState>, ); fn run(&mut self, data: Self::SystemData) { let ( mut key_codes, mut ui, mut spawned_upgrades, mut ui_state, avaliable_upgrades, mut upgrade_types, mut app_state ) = data; let upgrades = spawned_upgrades.last().map(|x| x.clone()); let widget_ids = [Widgets::Upgrade1, Widgets::Upgrade2]; let widget_selector = Widgets::UpgradeSelector as usize; swap(&mut self.prev_keys, &mut self.new_keys); self.new_keys.clear(); for key in key_codes.drain(..) { self.new_keys.insert(key); // do something here } let new_pressed = &self.new_keys - &self.prev_keys; for key in new_pressed.iter() { match key { Keycode::Left | Keycode::Right => { if let Some(upgrades) = upgrades { if ui.selected(widget_selector, widget_ids[0] as usize) || ui.free_selector(widget_selector) { ui_state.choosed_upgrade = Some(upgrades[1]); ui.select(widget_selector, widget_ids[1] as usize) } else if ui.selected(widget_selector, widget_ids[1] as usize) { ui_state.choosed_upgrade = Some(upgrades[0]); ui.select(widget_selector, widget_ids[0] as usize) } } } Keycode::Space => { if let Some(upgrade) = ui_state.choosed_upgrade { ui_state.choosed_upgrade = None; spawned_upgrades.pop(); upgrade_types.push(avaliable_upgrades[upgrade].upgrade_type); } else { *app_state = AppState::Play(PlayState::Action); } } // Keycode::Right => { // dbg!("sd"); // } _ => () } } } }
#![recursion_limit = "1024"] use wasm_bindgen::prelude::*; extern crate failure; extern crate yew; use anyhow::Error; extern crate console_error_panic_hook; use std::panic; use http::{Request, Response}; use stdweb::js; use yew::format::Json; use yew::html::ComponentLink; use yew::prelude::*; use yew::services::fetch; use yew::services::ConsoleService; use serde_json::json; struct Model { link: ComponentLink<Model>, game_id: String, // text in our input box name: String, server_data: String, // data received from the server task: Option<fetch::FetchTask>, } enum Msg { GameIdInput(String), // text was input in the input box NameInput(String), // text was input in the input box SendReq, Receieved(String), } impl Component for Model { type Message = Msg; type Properties = (); fn create(_: Self::Properties, link: ComponentLink<Self>) -> Self { Model { link, game_id: String::new(), name: String::new(), server_data: String::new(), task: None, } } fn update(&mut self, msg: Self::Message) -> ShouldRender { match msg { Msg::GameIdInput(e) => { self.game_id = e; // note input box value true } Msg::NameInput(e) => { self.name = e; // note input box value true } Msg::SendReq => { let json = json!({"username": self.name, "viewtype": "viewer", "game_id": self.game_id, "password": ""}); let post_request = Request::post("/cookies") .header("Content-Type", "application/json") .body(Json(&json)) .unwrap(); let callback = self .link .callback(|response: Response<Result<String, Error>>| { if response.status().is_success() { ConsoleService::log("Sent Request and Received Response with code: "); ConsoleService::log(response.status().as_str()); if response.body().as_ref().unwrap() == "Success" { js! { document.getElementById("link").click(); } } Msg::Receieved(response.body().as_ref().unwrap().to_string()) } else { ConsoleService::log("Failed to Send Request"); Msg::Receieved(format!("Failed to send request: {}", response.status())) } }); let task = fetch::FetchService::fetch( post_request, callback, ) .unwrap(); self.task = Some(task); false } Msg::Receieved(data) => { self.server_data = data; true } } } fn view(&self) -> Html { let input_gameid = self.link.callback(|e: InputData| Msg::GameIdInput(e.value)); let input_name = self.link.callback(|e: InputData| Msg::NameInput(e.value)); let sendreq = self.link.callback(|_| Msg::SendReq); html! { <> <div class="container"> <input id="game_id" type="text" oninput=input_gameid placeholder="Game ID" class="input" maxlength="6" autocomplete="off"/> <br/> <input id="name" oninput=input_name placeholder="Username" class="input" maxlength="20" autocomplete="off"/> <br/> <button id="enter" class="enter" onclick=sendreq>{"Enter"}</button> <p class="message">{self.server_data.clone()}</p> </div> <a id="link" class="link" href="/redirect"></a> </> } } fn change(&mut self, _: <Self as yew::Component>::Properties) -> bool { todo!() } fn rendered(&mut self, first_render: bool) { if first_render { js! { document.getElementById("name").addEventListener("keyup", function(event) { // Number 13 is the "Enter" key on the keyboard if (event.keyCode === 13) { // Cancel the default action, if needed event.preventDefault(); // Trigger the button element with a click document.getElementById("enter").click(); } }); } } } } #[wasm_bindgen(start)] pub fn run_app() { panic::set_hook(Box::new(console_error_panic_hook::hook)); App::<Model>::new().mount_to_body(); }
/// An enum to represent all characters in the CJKStrokes block. #[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)] pub enum CJKStrokes { /// \u{31c0}: '㇀' CjkStrokeT, /// \u{31c1}: '㇁' CjkStrokeWg, /// \u{31c2}: '㇂' CjkStrokeXg, /// \u{31c3}: '㇃' CjkStrokeBxg, /// \u{31c4}: '㇄' CjkStrokeSw, /// \u{31c5}: '㇅' CjkStrokeHzz, /// \u{31c6}: '㇆' CjkStrokeHzg, /// \u{31c7}: '㇇' CjkStrokeHp, /// \u{31c8}: '㇈' CjkStrokeHzwg, /// \u{31c9}: '㇉' CjkStrokeSzwg, /// \u{31ca}: '㇊' CjkStrokeHzt, /// \u{31cb}: '㇋' CjkStrokeHzzp, /// \u{31cc}: '㇌' CjkStrokeHpwg, /// \u{31cd}: '㇍' CjkStrokeHzw, /// \u{31ce}: '㇎' CjkStrokeHzzz, /// \u{31cf}: '㇏' CjkStrokeN, /// \u{31d0}: '㇐' CjkStrokeH, /// \u{31d1}: '㇑' CjkStrokeS, /// \u{31d2}: '㇒' CjkStrokeP, /// \u{31d3}: '㇓' CjkStrokeSp, /// \u{31d4}: '㇔' CjkStrokeD, /// \u{31d5}: '㇕' CjkStrokeHz, /// \u{31d6}: '㇖' CjkStrokeHg, /// \u{31d7}: '㇗' CjkStrokeSz, /// \u{31d8}: '㇘' CjkStrokeSwz, /// \u{31d9}: '㇙' CjkStrokeSt, /// \u{31da}: '㇚' CjkStrokeSg, /// \u{31db}: '㇛' CjkStrokePd, /// \u{31dc}: '㇜' CjkStrokePz, /// \u{31dd}: '㇝' CjkStrokeTn, /// \u{31de}: '㇞' CjkStrokeSzz, /// \u{31df}: '㇟' CjkStrokeSwg, /// \u{31e0}: '㇠' CjkStrokeHxwg, /// \u{31e1}: '㇡' CjkStrokeHzzzg, /// \u{31e2}: '㇢' CjkStrokePg, /// \u{31e3}: '㇣' CjkStrokeQ, } impl Into<char> for CJKStrokes { fn into(self) -> char { match self { CJKStrokes::CjkStrokeT => '㇀', CJKStrokes::CjkStrokeWg => '㇁', CJKStrokes::CjkStrokeXg => '㇂', CJKStrokes::CjkStrokeBxg => '㇃', CJKStrokes::CjkStrokeSw => '㇄', CJKStrokes::CjkStrokeHzz => '㇅', CJKStrokes::CjkStrokeHzg => '㇆', CJKStrokes::CjkStrokeHp => '㇇', CJKStrokes::CjkStrokeHzwg => '㇈', CJKStrokes::CjkStrokeSzwg => '㇉', CJKStrokes::CjkStrokeHzt => '㇊', CJKStrokes::CjkStrokeHzzp => '㇋', CJKStrokes::CjkStrokeHpwg => '㇌', CJKStrokes::CjkStrokeHzw => '㇍', CJKStrokes::CjkStrokeHzzz => '㇎', CJKStrokes::CjkStrokeN => '㇏', CJKStrokes::CjkStrokeH => '㇐', CJKStrokes::CjkStrokeS => '㇑', CJKStrokes::CjkStrokeP => '㇒', CJKStrokes::CjkStrokeSp => '㇓', CJKStrokes::CjkStrokeD => '㇔', CJKStrokes::CjkStrokeHz => '㇕', CJKStrokes::CjkStrokeHg => '㇖', CJKStrokes::CjkStrokeSz => '㇗', CJKStrokes::CjkStrokeSwz => '㇘', CJKStrokes::CjkStrokeSt => '㇙', CJKStrokes::CjkStrokeSg => '㇚', CJKStrokes::CjkStrokePd => '㇛', CJKStrokes::CjkStrokePz => '㇜', CJKStrokes::CjkStrokeTn => '㇝', CJKStrokes::CjkStrokeSzz => '㇞', CJKStrokes::CjkStrokeSwg => '㇟', CJKStrokes::CjkStrokeHxwg => '㇠', CJKStrokes::CjkStrokeHzzzg => '㇡', CJKStrokes::CjkStrokePg => '㇢', CJKStrokes::CjkStrokeQ => '㇣', } } } impl std::convert::TryFrom<char> for CJKStrokes { type Error = (); fn try_from(c: char) -> Result<Self, Self::Error> { match c { '㇀' => Ok(CJKStrokes::CjkStrokeT), '㇁' => Ok(CJKStrokes::CjkStrokeWg), '㇂' => Ok(CJKStrokes::CjkStrokeXg), '㇃' => Ok(CJKStrokes::CjkStrokeBxg), '㇄' => Ok(CJKStrokes::CjkStrokeSw), '㇅' => Ok(CJKStrokes::CjkStrokeHzz), '㇆' => Ok(CJKStrokes::CjkStrokeHzg), '㇇' => Ok(CJKStrokes::CjkStrokeHp), '㇈' => Ok(CJKStrokes::CjkStrokeHzwg), '㇉' => Ok(CJKStrokes::CjkStrokeSzwg), '㇊' => Ok(CJKStrokes::CjkStrokeHzt), '㇋' => Ok(CJKStrokes::CjkStrokeHzzp), '㇌' => Ok(CJKStrokes::CjkStrokeHpwg), '㇍' => Ok(CJKStrokes::CjkStrokeHzw), '㇎' => Ok(CJKStrokes::CjkStrokeHzzz), '㇏' => Ok(CJKStrokes::CjkStrokeN), '㇐' => Ok(CJKStrokes::CjkStrokeH), '㇑' => Ok(CJKStrokes::CjkStrokeS), '㇒' => Ok(CJKStrokes::CjkStrokeP), '㇓' => Ok(CJKStrokes::CjkStrokeSp), '㇔' => Ok(CJKStrokes::CjkStrokeD), '㇕' => Ok(CJKStrokes::CjkStrokeHz), '㇖' => Ok(CJKStrokes::CjkStrokeHg), '㇗' => Ok(CJKStrokes::CjkStrokeSz), '㇘' => Ok(CJKStrokes::CjkStrokeSwz), '㇙' => Ok(CJKStrokes::CjkStrokeSt), '㇚' => Ok(CJKStrokes::CjkStrokeSg), '㇛' => Ok(CJKStrokes::CjkStrokePd), '㇜' => Ok(CJKStrokes::CjkStrokePz), '㇝' => Ok(CJKStrokes::CjkStrokeTn), '㇞' => Ok(CJKStrokes::CjkStrokeSzz), '㇟' => Ok(CJKStrokes::CjkStrokeSwg), '㇠' => Ok(CJKStrokes::CjkStrokeHxwg), '㇡' => Ok(CJKStrokes::CjkStrokeHzzzg), '㇢' => Ok(CJKStrokes::CjkStrokePg), '㇣' => Ok(CJKStrokes::CjkStrokeQ), _ => Err(()), } } } impl Into<u32> for CJKStrokes { fn into(self) -> u32 { let c: char = self.into(); let hex = c .escape_unicode() .to_string() .replace("\\u{", "") .replace("}", ""); u32::from_str_radix(&hex, 16).unwrap() } } impl std::convert::TryFrom<u32> for CJKStrokes { type Error = (); fn try_from(u: u32) -> Result<Self, Self::Error> { if let Ok(c) = char::try_from(u) { Self::try_from(c) } else { Err(()) } } } impl Iterator for CJKStrokes { type Item = Self; fn next(&mut self) -> Option<Self> { let index: u32 = (*self).into(); use std::convert::TryFrom; Self::try_from(index + 1).ok() } } impl CJKStrokes { /// The character with the lowest index in this unicode block pub fn new() -> Self { CJKStrokes::CjkStrokeT } /// The character's name, in sentence case pub fn name(&self) -> String { let s = std::format!("CJKStrokes{:#?}", self); string_morph::to_sentence_case(&s) } }
use crate::arch::OpCode::*; use crate::arch::RegMnem::*; use crate::arch::*; use std::io::{self, Write}; use std::process; const SP_INIT: u16 = 0x8000; const CHAR_OUT_ADDR: u16 = 0x8000; const CHAR_IN_ADDR: u16 = 0x8001; const END_PROG_ADDR: u16 = 0xFFFF; #[derive(Debug)] pub struct TeenyAT { mem: Memory, ins: Instruction, pc: Register, r1: Register, r2: Register, r3: Register, r4: Register, r5: Register, r6: Register, sp: Register, op_code: OpCode, ra: RegMnem, rb: RegMnem, imm: u16, addr: u16, pub debug_mode: bool, } impl TeenyAT { pub fn new(program: Memory) -> Self { let ins = Instruction::new(0, 0); let pc = Register::new(RegMnem::Pc); let r1 = Register::new(RegMnem::R1); let r2 = Register::new(RegMnem::R2); let r3 = Register::new(RegMnem::R3); let r4 = Register::new(RegMnem::R4); let r5 = Register::new(RegMnem::R5); let r6 = Register::new(RegMnem::R6); let mut sp = Register::new(RegMnem::Sp); sp.val = SP_INIT; Self { mem: program, ins, pc, r1, r2, r3, r4, r5, r6, sp, op_code: OpCode::Set, ra: RegMnem::default(), rb: RegMnem::default(), imm: 0, addr: 0, debug_mode: false, } } pub fn run(&mut self) -> Result<(), ArchError> { if self.debug_mode { self.mem.print_program(); } loop { self.fetch()?; self.decode()?; self.execute()?; } } fn fetch(&mut self) -> Result<(), ArchError> { let word1 = self.mem.read(self.pc.val)?; let word2 = self.mem.read(self.pc.val + 1)?; self.ins = Instruction::new(word1, word2); self.pc.val += 2; Ok(()) } fn decode(&mut self) -> Result<(), ArchError> { self.op_code = self.ins.get_op_code()?; let num_regs = self.op_code.num_regs(); if num_regs >= 1 { self.ra = self.ins.get_ra()?; } if num_regs == 2 { self.rb = self.ins.get_rb()?; } self.imm = self.ins.word_imm; self.addr = self.imm; Ok(()) } fn execute(&mut self) -> Result<(), ArchError> { match self.op_code { Set => self.set(), Copy => self.copy(), Load => self.load()?, Stor => self.stor()?, PLoad => self.pload()?, PStor => self.pstor()?, Push => self.push()?, Pop => self.pop()?, Add => self.add(), Sub => self.sub(), Mult => self.mult(), Div => self.div(), Mod => self.divmod(), Neg => self.neg(), Inc => self.inc(), Dec => self.dec(), And => self.and(), Or => self.or(), Xor => self.xor(), Inv => self.inv(), Shl => self.shl(), Shr => self.shr(), Call => self.call()?, Jl => self.jl(), Jle => self.jle(), Je => self.je(), Jne => self.jne(), Jge => self.jge(), Jg => self.jg(), } Ok(()) } fn get_ra(&mut self) -> &mut Register { match self.ra { _R0 | Pc => &mut self.pc, R1 | Ax => &mut self.r1, R2 | Bx => &mut self.r2, R3 | Cx => &mut self.r3, R4 | Dx => &mut self.r4, R5 | Ex => &mut self.r5, R6 | Fx => &mut self.r6, R7 | Sp => &mut self.sp, } } fn rb_val(&self) -> u16 { match self.rb { _R0 | Pc => self.pc.val, R1 | Ax => self.r1.val, R2 | Bx => self.r2.val, R3 | Cx => self.r3.val, R4 | Dx => self.r4.val, R5 | Ex => self.r5.val, R6 | Fx => self.r6.val, R7 | Sp => self.sp.val, } } fn set(&mut self) { let imm = self.imm; let ra = self.get_ra(); ra.val = imm; } fn copy(&mut self) { let rb_val = match self.rb { _R0 | Pc => self.pc.val, R1 | Ax => self.r1.val, R2 | Bx => self.r2.val, R3 | Cx => self.r3.val, R4 | Dx => self.r4.val, R5 | Ex => self.r5.val, R6 | Fx => self.r6.val, R7 | Sp => self.sp.val, }; let ra = self.get_ra(); ra.val = rb_val; } fn load(&mut self) -> Result<(), ArchError> { let addr = self.addr; let ra = match self.ra { _R0 | Pc => &mut self.pc, R1 | Ax => &mut self.r1, R2 | Bx => &mut self.r2, R3 | Cx => &mut self.r3, R4 | Dx => &mut self.r4, R5 | Ex => &mut self.r5, R6 | Fx => &mut self.r6, R7 | Sp => &mut self.sp, }; if addr == CHAR_IN_ADDR { ra.val = input_char(); } else if addr == END_PROG_ADDR { process::exit(ra.val as i32); } else { ra.val = self.mem.read(addr)?; } Ok(()) } fn stor(&mut self) -> Result<(), ArchError> { let ra = match self.ra { _R0 | Pc => &mut self.pc, R1 | Ax => &mut self.r1, R2 | Bx => &mut self.r2, R3 | Cx => &mut self.r3, R4 | Dx => &mut self.r4, R5 | Ex => &mut self.r5, R6 | Fx => &mut self.r6, R7 | Sp => &mut self.sp, }; if self.addr == CHAR_OUT_ADDR { output_char(ra.val); } else if self.addr == END_PROG_ADDR { process::exit(ra.val as i32); } else { self.mem.write(self.addr, ra.val)?; } Ok(()) } fn pload(&mut self) -> Result<(), ArchError> { let rb = self.rb_val(); let ra = match self.ra { _R0 | Pc => &mut self.pc, R1 | Ax => &mut self.r1, R2 | Bx => &mut self.r2, R3 | Cx => &mut self.r3, R4 | Dx => &mut self.r4, R5 | Ex => &mut self.r5, R6 | Fx => &mut self.r6, R7 | Sp => &mut self.sp, }; if rb == CHAR_IN_ADDR { ra.val = input_char(); } else if rb == END_PROG_ADDR { process::exit(ra.val as i32); } else { ra.val = self.mem.read(rb)?; } Ok(()) } fn pstor(&mut self) -> Result<(), ArchError> { let rb = self.rb_val(); let ra = match self.ra { _R0 | Pc => &mut self.pc, R1 | Ax => &mut self.r1, R2 | Bx => &mut self.r2, R3 | Cx => &mut self.r3, R4 | Dx => &mut self.r4, R5 | Ex => &mut self.r5, R6 | Fx => &mut self.r6, R7 | Sp => &mut self.sp, }; if ra.val == CHAR_OUT_ADDR { output_char(rb); } else if ra.val == END_PROG_ADDR { process::exit(ra.val as i32); } else { self.mem.write(ra.val, rb)?; } Ok(()) } fn push(&mut self) -> Result<(), ArchError> { let ra = self.get_ra(); let temp = ra.val; self.sp.val -= 1; self.mem.write(self.sp.val, temp)?; Ok(()) } fn pop(&mut self) -> Result<(), ArchError> { let addr = self.sp.val; let ra = match self.ra { _R0 | Pc => &mut self.pc, R1 | Ax => &mut self.r1, R2 | Bx => &mut self.r2, R3 | Cx => &mut self.r3, R4 | Dx => &mut self.r4, R5 | Ex => &mut self.r5, R6 | Fx => &mut self.r6, R7 | Sp => &mut self.sp, }; ra.val = self.mem.read(addr)?; self.sp.val += 1; Ok(()) } fn add(&mut self) { let rb = self.rb_val(); let ra = self.get_ra(); ra.val += rb; } fn sub(&mut self) { let rb = self.rb_val(); let ra = self.get_ra(); ra.val -= rb; } fn mult(&mut self) { let rb = self.rb_val(); let ra = self.get_ra(); ra.val *= rb; } fn div(&mut self) { let rb = self.rb_val(); let ra = self.get_ra(); ra.val /= rb; } fn divmod(&mut self) { let rb = self.rb_val(); let ra = self.get_ra(); ra.val %= rb; } fn neg(&mut self) { let ra = self.get_ra(); ra.val = -(ra.val as i16) as u16; } fn inc(&mut self) { let ra = self.get_ra(); ra.val += 1; } fn dec(&mut self) { let ra = self.get_ra(); ra.val -= 1; } fn and(&mut self) { let rb = self.rb_val(); let ra = self.get_ra(); ra.val &= rb; } fn or(&mut self) { let rb = self.rb_val(); let ra = self.get_ra(); ra.val |= rb; } fn xor(&mut self) { let rb = self.rb_val(); let ra = self.get_ra(); ra.val ^= rb; } fn inv(&mut self) { let ra = self.get_ra(); ra.val = !ra.val; } fn shl(&mut self) { let imm = self.imm; let ra = self.get_ra(); ra.val <<= imm; } fn shr(&mut self) { let imm = self.imm; let ra = self.get_ra(); ra.val >>= imm; } fn call(&mut self) -> Result<(), ArchError> { self.push()?; self.pc.val = self.addr; Ok(()) } fn jl(&mut self) { let rb = self.rb_val(); let ra = self.get_ra(); if (ra.val as i16) < (rb as i16) { self.pc.val = self.addr; } } fn jle(&mut self) { let rb = self.rb_val(); let ra = self.get_ra(); if (ra.val as i16) <= (rb as i16) { self.pc.val = self.addr; } } fn je(&mut self) { let rb = self.rb_val(); let ra = self.get_ra(); if (ra.val as i16) == (rb as i16) { self.pc.val = self.addr; } } fn jne(&mut self) { let rb = self.rb_val(); let ra = self.get_ra(); if (ra.val as i16) != (rb as i16) { self.pc.val = self.addr; } } fn jge(&mut self) { let rb = self.rb_val(); let ra = self.get_ra(); if (ra.val as i16) >= (rb as i16) { self.pc.val = self.addr; } } fn jg(&mut self) { let rb = self.rb_val(); let ra = self.get_ra(); if (ra.val as i16) > (rb as i16) { self.pc.val = self.addr; } } } fn input_char() -> u16 { let input = io::stdin(); let mut buf: String = String::new(); let _line = input.read_line(&mut buf); buf.bytes().next().unwrap() as u16 } fn output_char(chr: u16) { let mut output = io::stdout(); let buf: Vec<u8> = vec![chr as u8]; output.write(&buf).unwrap(); output.flush().unwrap(); }
//! The `tvu` module implements the Transaction Validation Unit, a //! 3-stage transaction validation pipeline in software. //! //! ```text //! .------------------------------------------. //! | TVU | //! | | //! | | .------------. //! | .------------------------>| Validators | //! | .-------. | | `------------` //! .--------. | | | .----+---. .-----------. | //! | Leader |--------->| Blob | | Window | | Replicate | | //! `--------` | | Fetch |-->| Stage |-->| Stage | | //! .------------. | | Stage | | | | | | //! | Validators |----->| | `--------` `----+------` | //! `------------` | `-------` | | //! | | | //! | | | //! | | | //! `--------------------------------|---------` //! | //! v //! .------. //! | Bank | //! `------` //! ``` //! //! 1. Fetch Stage //! - Incoming blobs are picked up from the replicate socket and repair socket. //! 2. Window Stage //! - Blobs are windowed until a contiguous chunk is available. This stage also repairs and //! retransmits blobs that are in the queue. //! 3. Replicate Stage //! - Transactions in blobs are processed and applied to the bank. //! - TODO We need to verify the signatures in the blobs. use bank::Bank; use blob_fetch_stage::BlobFetchStage; use crdt::Crdt; use packet; use replicate_stage::ReplicateStage; use std::net::UdpSocket; use std::sync::atomic::AtomicBool; use std::sync::{Arc, RwLock}; use std::thread::JoinHandle; use streamer; use window_stage::WindowStage; pub struct Tvu { pub thread_hdls: Vec<JoinHandle<()>>, } impl Tvu { /// This service receives messages from a leader in the network and processes the transactions /// on the bank state. /// # Arguments /// * `bank` - The bank state. /// * `crdt` - The crdt state. /// * `window` - The window state. /// * `replicate_socket` - my replicate socket /// * `repair_socket` - my repair socket /// * `retransmit_socket` - my retransmit socket /// * `exit` - The exit signal. pub fn new( bank: Arc<Bank>, crdt: Arc<RwLock<Crdt>>, window: streamer::Window, replicate_socket: UdpSocket, repair_socket: UdpSocket, retransmit_socket: UdpSocket, exit: Arc<AtomicBool>, ) -> Self { let blob_recycler = packet::BlobRecycler::default(); let fetch_stage = BlobFetchStage::new_multi_socket( vec![replicate_socket, repair_socket], exit.clone(), blob_recycler.clone(), ); //TODO //the packets coming out of blob_receiver need to be sent to the GPU and verified //then sent to the window, which does the erasure coding reconstruction let window_stage = WindowStage::new( crdt, window, retransmit_socket, exit.clone(), blob_recycler.clone(), fetch_stage.blob_receiver, ); let replicate_stage = ReplicateStage::new(bank, exit, window_stage.blob_receiver, blob_recycler); let mut threads = vec![replicate_stage.thread_hdl]; threads.extend(fetch_stage.thread_hdls.into_iter()); threads.extend(window_stage.thread_hdls.into_iter()); Tvu { thread_hdls: threads, } } } #[cfg(test)] pub mod tests { use bank::Bank; use bincode::serialize; use crdt::{Crdt, TestNode}; use entry::Entry; use hash::{hash, Hash}; use logger; use mint::Mint; use ncp::Ncp; use packet::BlobRecycler; use result::Result; use signature::{KeyPair, KeyPairUtil}; use std::collections::VecDeque; use std::net::UdpSocket; use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::mpsc::channel; use std::sync::{Arc, RwLock}; use std::time::Duration; use streamer; use transaction::Transaction; use tvu::Tvu; fn new_ncp( crdt: Arc<RwLock<Crdt>>, listen: UdpSocket, exit: Arc<AtomicBool>, ) -> Result<(Ncp, streamer::Window)> { let window = streamer::default_window(); let send_sock = UdpSocket::bind("0.0.0.0:0").expect("bind 0"); let ncp = Ncp::new(crdt, window.clone(), listen, send_sock, exit)?; Ok((ncp, window)) } /// Test that message sent from leader to target1 and replicated to target2 #[test] fn test_replicate() { logger::setup(); let leader = TestNode::new(); let target1 = TestNode::new(); let target2 = TestNode::new(); let exit = Arc::new(AtomicBool::new(false)); //start crdt_leader let mut crdt_l = Crdt::new(leader.data.clone()); crdt_l.set_leader(leader.data.id); let cref_l = Arc::new(RwLock::new(crdt_l)); let dr_l = new_ncp(cref_l, leader.sockets.gossip, exit.clone()).unwrap(); //start crdt2 let mut crdt2 = Crdt::new(target2.data.clone()); crdt2.insert(&leader.data); crdt2.set_leader(leader.data.id); let leader_id = leader.data.id; let cref2 = Arc::new(RwLock::new(crdt2)); let dr_2 = new_ncp(cref2, target2.sockets.gossip, exit.clone()).unwrap(); // setup some blob services to send blobs into the socket // to simulate the source peer and get blobs out of the socket to // simulate target peer let recv_recycler = BlobRecycler::default(); let resp_recycler = BlobRecycler::default(); let (s_reader, r_reader) = channel(); let t_receiver = streamer::blob_receiver( exit.clone(), recv_recycler.clone(), target2.sockets.replicate, s_reader, ).unwrap(); // simulate leader sending messages let (s_responder, r_responder) = channel(); let t_responder = streamer::responder( leader.sockets.requests, exit.clone(), resp_recycler.clone(), r_responder, ); let starting_balance = 10_000; let mint = Mint::new(starting_balance); let replicate_addr = target1.data.replicate_addr; let bank = Arc::new(Bank::new(&mint)); //start crdt1 let mut crdt1 = Crdt::new(target1.data.clone()); crdt1.insert(&leader.data); crdt1.set_leader(leader.data.id); let cref1 = Arc::new(RwLock::new(crdt1)); let dr_1 = new_ncp(cref1.clone(), target1.sockets.gossip, exit.clone()).unwrap(); let tvu = Tvu::new( bank.clone(), cref1, dr_1.1, target1.sockets.replicate, target1.sockets.repair, target1.sockets.retransmit, exit.clone(), ); let mut alice_ref_balance = starting_balance; let mut msgs = VecDeque::new(); let mut cur_hash = Hash::default(); let num_blobs = 10; let transfer_amount = 501; let bob_keypair = KeyPair::new(); for i in 0..num_blobs { let b = resp_recycler.allocate(); let b_ = b.clone(); let mut w = b.write().unwrap(); w.set_index(i).unwrap(); w.set_id(leader_id).unwrap(); let entry0 = Entry::new(&cur_hash, i, vec![]); bank.register_entry_id(&cur_hash); cur_hash = hash(&cur_hash); let tx0 = Transaction::new( &mint.keypair(), bob_keypair.pubkey(), transfer_amount, cur_hash, ); bank.register_entry_id(&cur_hash); cur_hash = hash(&cur_hash); let entry1 = Entry::new(&cur_hash, i + num_blobs, vec![tx0]); bank.register_entry_id(&cur_hash); cur_hash = hash(&cur_hash); alice_ref_balance -= transfer_amount; let serialized_entry = serialize(&vec![entry0, entry1]).unwrap(); w.data_mut()[..serialized_entry.len()].copy_from_slice(&serialized_entry); w.set_size(serialized_entry.len()); w.meta.set_addr(&replicate_addr); drop(w); msgs.push_back(b_); } // send the blobs into the socket s_responder.send(msgs).expect("send"); // receive retransmitted messages let timer = Duration::new(1, 0); let mut msgs: Vec<_> = Vec::new(); while let Ok(msg) = r_reader.recv_timeout(timer) { trace!("msg: {:?}", msg); msgs.push(msg); } let alice_balance = bank.get_balance(&mint.keypair().pubkey()).unwrap(); assert_eq!(alice_balance, alice_ref_balance); let bob_balance = bank.get_balance(&bob_keypair.pubkey()).unwrap(); assert_eq!(bob_balance, starting_balance - alice_ref_balance); exit.store(true, Ordering::Relaxed); for t in tvu.thread_hdls { t.join().expect("join"); } for t in dr_l.0.thread_hdls { t.join().expect("join"); } for t in dr_2.0.thread_hdls { t.join().expect("join"); } for t in dr_1.0.thread_hdls { t.join().expect("join"); } t_receiver.join().expect("join"); t_responder.join().expect("join"); } }
use crate::file::*; use crate::raft::*; use std::collections::BTreeMap; use std::rc::Rc; /// Used for when we are the leader and appending messages. #[allow(dead_code)] pub(crate) struct MessageWriteFileAppend { file_storage_directory: String, file_prefix: String, path: String, file_id: u32, file_size: usize, start_message_id: u64, writer: MessageWriteAppend, } #[allow(dead_code)] impl MessageWriteFileAppend { /// The file directory to open. /// # Arguments /// `file_storage_directory` - The file storage directory. /// `file_prefix` - The file prefix. /// `file_size` - The file size for the messages. pub(crate) fn open(file_storage_directory: String, file_prefix: String, file_id: u32, file_size: usize) {} } /// Used for random writes. #[allow(dead_code)] pub(crate) struct MessageWriteFileSeek { path: String, file_id: u32, writer: MessageFileStoreWrite, } #[allow(dead_code)] pub(crate) struct MessageWriteCollection { /// The collection of files. files: BTreeMap<u32, Rc<MessageFileInfo>>, /// The storage directory of the files. file_storage_directory: String, /// The file prefix. file_prefix: String, /// The size of the message to create. file_size: usize, } impl MessageWriteCollection { /// Opens a directory and reads in the files containing the messages. /// # Arguments /// `file_storage_directory` - The directory containing the files. /// `file_prefix` - The file prefix. /// `file_size` - The size of the file being created. /// # Returns /// The message write collection. #[allow(dead_code)] pub(crate) fn open_dir( file_storage_directory: &str, file_prefix: &str, file_size: usize, ) -> crate::file::Result<Self> { let directory_path = Path::new(file_storage_directory); if !directory_path.is_dir() || !directory_path.exists() { create_dir(directory_path)?; } let starts_with_commit = format!("{}.{}", file_prefix, COMMIT_FILE_POSTIX); let mut collection = BTreeMap::new(); for entry in read_dir(directory_path)? { let file = entry?; let path: PathBuf = file.path(); if path.is_file() { match path.file_name() { Some(p) => match p.to_str() { Some(p) => { if p.starts_with(&starts_with_commit) { match read_file_id(p) { Some(id) => { let message_file = get_message_file_info(path.to_str().unwrap())?; collection.insert(id, Rc::new(message_file)); } _ => {} } } } _ => {} }, _ => {} } } } Ok(MessageWriteCollection { files: collection, file_prefix: file_prefix.to_string(), file_storage_directory: file_storage_directory.to_string(), file_size, }) } /// Used to get a file with the specified id. /// # Arguments /// `file_id` - The id of the file to get. /// # Returns /// The file message file info. #[allow(dead_code)] pub(crate) fn get_message_file<'a>(&'a self, file_id: &u32) -> Option<&'a Rc<MessageFileInfo>> { self.files.get(file_id) } /// Used to get the current append file. /// # Arguments /// `max_message_id` - The maximum message id to stop at. /// # Returns /// A tuple containing the file to write to and the message file information. #[allow(dead_code)] pub(crate) fn get_current_append<'a>( &'a mut self, max_message_id: u64, ) -> crate::file::Result<MessageWriteAppend> { if self.files.is_empty() { let (file_info, writer) = new_message_file( &self.file_storage_directory, &self.file_prefix, 1, max_message_id, self.file_size, )?; let file_info = Rc::new(file_info); self.files.insert(1, file_info.clone()); let r = MessageWriteAppend::open(file_info, max_message_id, writer)?; match r { OpenFileResult::Opened(app) => Ok(app), _ => panic!("Creating a new file should have room!"), } } else { let mut iter = self.files.iter().rev(); loop { if let Some((_, file)) = iter.next() { let file: &Rc<MessageFileInfo> = file; if file.message_id_start <= max_message_id { let writer = file.open_write(&self.file_size)?; match MessageWriteAppend::open(file.clone(), max_message_id, writer)? { OpenFileResult::Full => { let file_id = file.file_id + 1; if let Some(file) = self.files.get(&file_id) { let writer = file.open_write(&self.file_size)?; if let OpenFileResult::Opened(w) = MessageWriteAppend::open(file.clone(), max_message_id, writer)? { break Ok(w) } else { panic!("File is saying full when we should be at the top!"); } } else { let (file_info, writer) = new_message_file( &self.file_storage_directory, &self.file_prefix, file_id, max_message_id, self.file_size, )?; let file_info = Rc::new(file_info); self.files.insert(file_id, file_info.clone()); if let OpenFileResult::Opened(w) = MessageWriteAppend::open(file_info, max_message_id, writer)? { break Ok(w) } else { panic!("Creating a new file should have room!") } } } OpenFileResult::Opened(w) => break Ok(w), } } } else { panic!("Can't find a starting point!"); } } } } } /// Represents a message write append. #[derive(Debug)] pub(crate) struct MessageWriteAppend { last_commit_id: u64, writer: MessageFileStoreWrite, next_pos: usize, message_file_info: Rc<MessageFileInfo>, } #[derive(Debug)] enum OpenFileResult { Full, Opened(MessageWriteAppend), } impl MessageWriteAppend { /// Used to open file at the end of the position. /// # Arguments /// `path` - The path of the file to open. /// `last_committed_id` - The last committed id. /// `writer` - The writer associated with the file. /// # Returns /// Either the message append or full. If its full need to go to the next file. #[allow(dead_code)] fn open( msg_file: Rc<MessageFileInfo>, last_commited_id: u64, writer: MessageFileStoreWrite, ) -> crate::file::Result<OpenFileResult> { let mut pos = 0; let buffer = unsafe { MessageFileStore::open_readonly(&msg_file.path)? }; loop { match buffer.read_new(pos) { Ok(msg) => { if msg.message_id() >= last_commited_id { pos = msg.next_pos(); if buffer.is_end(pos) { break Ok(OpenFileResult::Full); } else { break Ok(OpenFileResult::Opened(Self { writer, last_commit_id: last_commited_id, next_pos: pos, message_file_info: msg_file, })); } } else if msg.message_id() == std::u64::MAX { break Ok(OpenFileResult::Full); } else { pos = msg.next_pos(); } } Err(e) => match e { crate::file::Error::NoMessage => { break Ok(OpenFileResult::Opened(Self { writer, last_commit_id: last_commited_id, next_pos: pos, message_file_info: msg_file, })) } crate::file::Error::PositionOutOfRange(_) => break Ok(OpenFileResult::Full), _ => Err(e)?, }, } } } /// Used to append a message to the current log. pub(crate) fn append(&mut self, message_id: u64, msg_type_id: u32, bytes: &[u8]) { } } /// Used to crate a new message file. /// # Arguments fn new_message_file( file_storage_directory: &str, file_prefix: &str, file_id: u32, message_id: u64, file_size: usize, ) -> crate::file::Result<(MessageFileInfo, MessageFileStoreWrite)> { let path = create_commit_name(file_storage_directory, file_prefix, &file_id); let path_p = Path::new(&path); if path_p.exists() { // Check and see if we are able to read the file in. let (read, _) = unsafe { MessageFileStore::new(&path, file_size)? }; match read.read(0, |_, _, _| {}) { Ok(_) => Err(crate::file::Error::AlreadyExists)?, _ => { // Not able to so delete the file so we can create it. remove_file(&path)?; } } } let (_, writer) = unsafe { MessageFileStore::new(&path, file_size)? }; Ok(( MessageFileInfo { path: path.to_string(), file_id, message_id_start: message_id, }, writer, )) } /// Gets the information about the file. /// # Arguments /// `path` - The path of the file to get the information from. fn get_message_file_info(path: &str) -> crate::file::Result<MessageFileInfo> { match read_file_id(&path) { Some(id) => { let (read, _) = unsafe { MessageFileStore::open(&path)? }; let mut msg_id: u64 = 0; let result = read.read(0, |_, id, _| { if id > 0 { msg_id = id; } })?; Ok(MessageFileInfo { path: path.to_owned(), file_id: id, message_id_start: msg_id, }) } _ => Err(crate::file::Error::InvalidFile), } } #[cfg(test)] mod test { use crate::raft::write_message::*; use serial_test::serial; use std::fs::*; use std::path::Path; const FILE_STORAGE_DIRECTORY: &str = "../../../cargo/tests/message_write_test"; const FILE_PREFIX: &str = "write"; fn cleanup() { let path = Path::new(FILE_STORAGE_DIRECTORY); if path.exists() { remove_dir_all(&path).unwrap(); } create_dir(&path).unwrap(); } #[test] #[serial] pub fn new_file_test_none_exists() { cleanup(); let (file_info, r) = new_message_file(FILE_STORAGE_DIRECTORY, FILE_PREFIX, 1, 1, 32 * 1000).unwrap(); } #[test] #[serial] pub fn new_file_test_exists_empty() { cleanup(); let (file_info, r) = new_message_file(FILE_STORAGE_DIRECTORY, FILE_PREFIX, 1, 1, 32 * 1000).unwrap(); let (file_info, r) = new_message_file(FILE_STORAGE_DIRECTORY, FILE_PREFIX, 1, 1, 32 * 1000).unwrap(); } #[test] #[serial] pub fn new_file_test_not_empty() { cleanup(); let (file_info, r) = new_message_file(FILE_STORAGE_DIRECTORY, FILE_PREFIX, 1, 1, 32 * 1000).unwrap(); r.write(0, 1, 1, &[2; 50]).unwrap(); match new_message_file(FILE_STORAGE_DIRECTORY, FILE_PREFIX, 1, 1, 32 * 1000) { Ok(_) => assert!(false), Err(e) => match e { crate::file::Error::AlreadyExists => { // Success } _ => { assert!(false); } }, } } #[test] #[serial] pub fn read_existing_file() { cleanup(); { let (file_info, r) = new_message_file(FILE_STORAGE_DIRECTORY, FILE_PREFIX, 1, 1, 32 * 1000).unwrap(); r.write(0, 1, 1, &[2; 50]).unwrap(); r.flush().unwrap(); } let path = create_commit_name(FILE_STORAGE_DIRECTORY, FILE_PREFIX, &1); let file_info = get_message_file_info(&path).unwrap(); assert_eq!(file_info.file_id, 1); assert_eq!(file_info.message_id_start, 1); assert_eq!(path, file_info.path); } #[test] #[serial] pub fn get_current_file_collection() { cleanup(); { let (file_info, r) = new_message_file(FILE_STORAGE_DIRECTORY, FILE_PREFIX, 1, 1, 32 * 1000).unwrap(); r.write(0, 1, 1, &[2; 50]).unwrap(); r.flush().unwrap(); let (file_info, r) = new_message_file(FILE_STORAGE_DIRECTORY, FILE_PREFIX, 2, 1, 32 * 1000).unwrap(); r.write(0, 1, 2, &[2; 50]).unwrap(); r.flush().unwrap(); } let message_file = MessageWriteCollection::open_dir(FILE_STORAGE_DIRECTORY, FILE_PREFIX, 32 * 1000) .unwrap(); assert_eq!(message_file.files.len(), 2); let file = message_file.get_message_file(&1).unwrap(); assert_eq!(file.file_id, 1); assert_eq!(file.message_id_start, 1); let file = message_file.get_message_file(&2).unwrap(); assert_eq!(file.file_id, 2); assert_eq!(file.message_id_start, 2); } #[test] #[serial] pub fn get_current_file_empty() { remove_dir_all(FILE_STORAGE_DIRECTORY).unwrap(); let message_file = MessageWriteCollection::open_dir(FILE_STORAGE_DIRECTORY, FILE_PREFIX, 32 * 1000) .unwrap(); } #[test] #[serial] pub fn new_file_test() { cleanup(); let mut message_file = MessageWriteCollection::open_dir(FILE_STORAGE_DIRECTORY, FILE_PREFIX, 32 * 1000) .unwrap(); let writer = message_file.get_current_append(1).unwrap(); assert_eq!(1, writer.message_file_info.file_id); } #[test] #[serial] pub fn existing_file_test() { cleanup(); { let (file_info, r) = new_message_file(FILE_STORAGE_DIRECTORY, FILE_PREFIX, 1, 1, 32 * 1000).unwrap(); r.write(0, 1, 1, &[2; 50]).unwrap(); r.flush().unwrap(); let (file_info, r) = new_message_file(FILE_STORAGE_DIRECTORY, FILE_PREFIX, 2, 1, 32 * 1000).unwrap(); r.write(0, 1, 2, &[2; 50]).unwrap(); r.flush().unwrap(); } let mut message_file = MessageWriteCollection::open_dir(FILE_STORAGE_DIRECTORY, FILE_PREFIX, 32 * 1000) .unwrap(); let writer = message_file.get_current_append(1).unwrap(); assert_eq!(1, writer.message_file_info.file_id); } #[test] #[serial] pub fn open_file_test() { cleanup(); let (file_info, r) = new_message_file(FILE_STORAGE_DIRECTORY, FILE_PREFIX, 1, 1, 32 * 1000).unwrap(); r.write(0, 1, 1, &[2; 50]).unwrap(); r.flush().unwrap(); let writer = MessageWriteAppend::open(Rc::new(file_info), 1, r).unwrap(); match writer { OpenFileResult::Full => assert!(false), OpenFileResult::Opened(append) => assert!(append.next_pos > 0), } } #[test] #[serial] pub fn open_file_full_test() { cleanup(); let buffer = [2; 31960]; let (file_info, r) = new_message_file(FILE_STORAGE_DIRECTORY, FILE_PREFIX, 1, 1, 32 * 100).unwrap(); let next_pos = r.write(0, 1, 1, &[2; 3150]).unwrap(); r.write(next_pos, 1, 2, &[2; 30]).unwrap_or_default(); r.flush().unwrap(); let writer = MessageWriteAppend::open(Rc::new(file_info), 1, r).unwrap(); match writer { OpenFileResult::Full => {} OpenFileResult::Opened(a) => { assert_eq!(0, a.next_pos); } } } }
use ::ftdi_library::ftdi::ftdi_context::ftdi_context; use ::ftdi_library::ftdi::ftdi_device_list::ftdi_device_list; use log::{debug, info, error}; use log4rs; use signal_hook; use std::sync::Arc; use std::{ {thread, time}, sync::{ atomic, atomic::{AtomicBool, Ordering} } }; use clap::{value_t, Arg, App}; use ftdi_library::ftdi::constants::{ftdi_interface, ftdi_stopbits_type, ftdi_bits_type, ftdi_parity_type}; use ftdi_library::ftdi::core::FtdiError; use snafu::{GenerateBacktrace}; #[cfg(target_os = "linux")] const PATH_TO_YAML_LOG_CONFIG:&'static str = "log4rs.yaml"; // string path to log config #[cfg(any(target_os = "windows", target_os = "macos"))] const PATH_TO_YAML_LOG_CONFIG:&'static str = "log4rs.yaml"; #[cfg(not(windows))] // not for windows ! fn main() -> Result<(), Box<dyn std::error::Error>> { // construction for command line parameters let matches = App::new("Simple serial test check read/write.") .version("v 0.1") .author("Blandger <blandger@gmail.com>") .about("FTDI serial read/write test") .arg(Arg::with_name("interface") .short("i") .long("interface") .value_name("INTERFACE") .help("INTERFACE_ANY | A | B | C | D, values: 0 - 4") .default_value("0") ) .arg(Arg::with_name("v") .short("v") .long("vendorId") .value_name("Vendor ID") .help("Vendor ID usb value, default is '0403' for FTDI") .default_value("0x0403")) .arg(Arg::with_name("p") .short("p") .long("productId") .value_name("Product ID") .help("Product ID usb value, usual FTDI values are :0x6001, 0x6010, 0x6011, 0x6014, 0x6015") .required(true)) .arg(Arg::with_name("b") .short("b") .long("baudrate") .value_name("Baud rate / speed") .help("Baudrate usb value, default is '115200'") .default_value("115200")) .arg(Arg::with_name("w") .short("w") .long("pattern") .value_name("pattern one byte value to write") .help("Write a pattern as one byte value")) .get_matches(); // try to load yaml logging config file match log4rs::init_file(PATH_TO_YAML_LOG_CONFIG, Default::default()) { Ok(_) => println!("log4rs config file is found - OK"), Err(error) => println!("Log config not found as \'{}\', error: \'{}\'", PATH_TO_YAML_LOG_CONFIG, error), } info!("booting up..."); // validate incoming command line parameters let interface = value_t!(matches.value_of("i"), ftdi_interface).unwrap_or(ftdi_interface::INTERFACE_ANY); // let vid = value_t!(matches.value_of("v"), u16).unwrap_or_else(|e| { println!("vid Error = {:?}", e); e.exit() } ); let mut vid= 0; if matches.is_present("v") { vid = ftdi_context::parse_number_str(matches.value_of("v").unwrap()).unwrap_or_default(); } // let pid = value_t!(matches.value_of("p"), u16).unwrap_or_else(|e| { println!("pid Error = {:?}", e); e.exit() } ); let mut pid = 0; if matches.is_present("p") { pid = ftdi_context::parse_number_str(matches.value_of("p").unwrap()).unwrap_or_default(); } let baudrate = value_t!(matches.value_of("b"), i32).unwrap_or(115200 ); // if tha is READ or WRITE operation ? let do_write = matches.is_present("w"); let pattern_to_write = value_t!(matches.value_of("w"), u8).unwrap_or(0xff); // setup to default 255 value if pattern_to_write > 0xff { let error = FtdiError::UsbCommonError { code: -80, message: "a pattern to write should be a valid byte (u8) value".to_string(), backtrace: GenerateBacktrace::generate() }; error!("{}", error); return Err(Box::new(error)); } println!("Usage with values: i='{:?}', vid:pid='{:?}:{:?}', b={:?}, write='{}', w='{}'\n", interface, vid, pid, baudrate, do_write, pattern_to_write); let mut buffer:Vec<u8> = Vec::with_capacity(1024); if do_write { buffer = (0..1024).map(|_| pattern_to_write).collect(); } let mut ftdi = ftdi_context::new_with_log_level(Some(4))?; info!("ftdi context in created - OK"); if vid != 0 && pid != 0 && interface != ftdi_interface::INTERFACE_ANY { ftdi.ftdi_set_interface(ftdi_interface::INTERFACE_ANY); info!("start find all usb device(s)..."); let mut ftdi_list = ftdi_device_list::new(&ftdi)?; let list = ftdi_list.ftdi_usb_find_all(&mut ftdi,0, 0)?; info!("Number of FTDI devices found: [{}] - OK", list.number_found_devices); info!("List of FTDI usb devices found: \'{:?}\' - OK", list.system_device_list); for (index, device) in list.system_device_list.iter().enumerate() { info!("Checking device: [{}]", index); let manufacturer_description = ftdi.ftdi_usb_get_strings(*device)?; info!("FTDI chip Manufacturer: {:?}, Description: {:?}, Serial: {:?}\n\n", manufacturer_description.0, manufacturer_description.1, manufacturer_description.2); } } else { ftdi.ftdi_set_interface(interface); // Open device ftdi.ftdi_usb_open(vid, pid)?; } // first to check if USB was really opened if ftdi.usb_dev.is_some() { ftdi.ftdi_set_baudrate(baudrate)?; ftdi.ftdi_set_line_property(ftdi_bits_type::BITS_8, ftdi_stopbits_type::STOP_BIT_1, ftdi_parity_type::NONE)?; } let term = Arc::new(AtomicBool::new(false)); signal_hook::flag::register(signal_hook::SIGTERM, Arc::clone(&term))?; let mut write_read_result = 0; while !term.load(Ordering::Relaxed) && ftdi.usb_dev.is_some() /* if USB opened */ { // Do some time-limited stuff here // (if this could block forever, then there's no guarantee the signal will have any // effect). atomic::spin_loop_hint(); if do_write { let size_to_write = if (baudrate / 512) > buffer.len() as i32 { buffer.len() as i32 } else { if (baudrate / 512) > 0 { baudrate / 512 as i32 } else { 1 as i32 } }; let write_result = ftdi.ftdi_write_data(&mut buffer, size_to_write as u32); match write_result { Err(err) => { error!("Write {:?}", err); write_read_result = 0; }, Ok(written_number) => { debug!("written bytes = {}", written_number); write_read_result = written_number; }, } } else { let size_to_read = buffer.len(); let read_result = ftdi.ftdi_read_data(&mut buffer, size_to_read); match read_result { Err(err) => { error!("{}", err); write_read_result = 0; }, Ok(read_number) => { debug!("read bytes = {}", read_number); write_read_result = read_number; }, } } /*if write_read_result == 0 { let sleep_millis = time::Duration::from_millis(1_000_000); thread::sleep(sleep_millis); } else */if write_read_result > 0 && !do_write { info!("read {} bytes", write_read_result); // fwrite(buf, f, 1, stdout); // fflush(stderr); // fflush(stdout); } } debug!("got signal to exit !"); Ok(()) }
//! Game collection related functions and structs use crate::database::get_database; use crate::utils; use std::collections::{HashMap, HashSet}; use chrono::Utc; use mongodb::{ bson::{self, doc, oid::ObjectId, Document}, Collection, }; use serde::{Deserialize, Serialize}; use thiserror::Error; const GAME_COLLECTION: &str = "thavalon_games"; const FRIEND_CODE_LENGTH: usize = 4; /// Contains errors related to database games. #[derive(PartialEq, Error, Debug)] pub enum DBGameError { #[error("The game could not be created.")] CreationError, #[error("An error occurred while updating the game in the database.")] UpdateError, #[error("Invalid state for the requested update.")] InvalidStateError, #[error("The display name is already in use.")] DuplicateDisplayName, } /// Enum representing the three possible states of a game in the DB. #[derive(PartialEq, Serialize, Deserialize, Debug, Clone)] pub enum DBGameStatus { Lobby, InProgress, Finished, } impl Drop for DatabaseGame { fn drop(&mut self) { if self.status == DBGameStatus::Lobby { log::info!("Deleting game {}.", self.friend_code); let friend_code = self.friend_code.clone(); let _id = self._id.clone(); tokio::spawn(async move { let collection = DatabaseGame::get_collection().await; let doc = doc! { "_id": bson::to_bson(&_id).unwrap(), }; if let Err(e) = collection.delete_one(doc, None).await { log::error!("Error while deleting game {}. {}.", friend_code, e); } }); } } } /// Struct representing a single database game. #[derive(Debug, Serialize, Deserialize)] pub struct DatabaseGame { _id: ObjectId, friend_code: String, players: HashSet<String>, display_names: HashSet<String>, players_to_display_names: HashMap<String, String>, status: DBGameStatus, created_time: i64, start_time: Option<i64>, end_time: Option<i64>, snapshot_id: Option<String>, } impl DatabaseGame { /// Creates a new DB game entry and returns a DatabaseGame /// /// # Returns /// /// * `DatabaseGame` on success. `GameError::CreationError` on failure. pub async fn new() -> Result<Self, DBGameError> { log::info!("Creating a new database game."); let collection = DatabaseGame::get_collection().await; let _id: ObjectId = match collection.insert_one(doc! {}, None).await { Ok(result) => { bson::from_bson(result.inserted_id).expect("Could not deserialize new game _id.") } Err(e) => { log::error!("ERROR: failed to create new game. {}.", e); return Err(DBGameError::CreationError); } }; let friend_code = utils::generate_letter_string(FRIEND_CODE_LENGTH); let game = DatabaseGame { friend_code, _id, players: HashSet::with_capacity(10), display_names: HashSet::with_capacity(10), players_to_display_names: HashMap::with_capacity(10), status: DBGameStatus::Lobby, created_time: Utc::now().timestamp(), start_time: None, end_time: None, snapshot_id: None, }; collection .replace_one( doc! {"_id": &game._id}, bson::to_document(&game).unwrap(), None, ) .await .unwrap(); log::info!("Successfully created DB entry for game {}.", game._id); Ok(game) } /// Starts the database game, updating the DB as needed. Once started, /// no players may be added or removed. /// /// # Returns /// /// Empty type on success, `DBGameError` on failure. pub async fn start_game(&mut self) -> Result<(), DBGameError> { log::info!("Starting DB game {}.", self._id); self.start_time = Some(Utc::now().timestamp()); self.status = DBGameStatus::InProgress; // TODO: update snapshot ID as well once we have snapshot code. let update_doc = doc! { "$set": { "start_time": bson::to_bson(&self.start_time).unwrap(), "status": bson::to_bson(&self.status).unwrap(), "snapshot_id": bson::to_bson(&self.snapshot_id).unwrap(), } }; self.update_db(update_doc).await } /// Ends the database game, updating the DB as needed. /// /// # Returns /// /// Empty type on success, `DBGameError` on failure. pub async fn end_game(&mut self) -> Result<(), DBGameError> { // Remove friend code, since games can only be looked up by friend // code while active. self.friend_code.clear(); self.end_time = Some(Utc::now().timestamp()); self.status = DBGameStatus::Finished; let update_doc = doc! { "$set": { "friend_code": bson::to_bson(&self.friend_code).unwrap(), "end_time": bson::to_bson(&self.end_time).unwrap(), "status": bson::to_bson(&self.status).unwrap() } }; self.update_db(update_doc).await } /// Adds a player to the DB game instance, updating the DB accordingly. /// Players can only be added if the game status is `Lobby`. /// /// # Arguments /// /// * `player_id` - The player ID to add to the game /// * `display_name` - The display name of the joining player /// /// # Returns /// /// * Empty type on success /// * `DBGameError::InvalidStateError` if the game state isn't `Lobby` /// * `DBGameError::UpdateError` if a DB update fails pub async fn add_player( &mut self, player_id: String, display_name: String, ) -> Result<(), DBGameError> { log::info!("Adding player {} to game {}.", player_id, self._id); if self.status != DBGameStatus::Lobby { log::error!( "Attempted to add player {} to game {} while in state {:?}. Players may only be added during the Lobby phase.", player_id, self._id, self.status ); return Err(DBGameError::InvalidStateError); } if self.display_names.contains(&display_name) { log::warn!( "Name {} is already in game {}. Display names must be unique.", display_name, self._id ); return Err(DBGameError::DuplicateDisplayName); } self.players.insert(player_id.clone()); self.display_names.insert(display_name.clone()); self.players_to_display_names .insert(player_id, display_name); let update_doc = doc! { "$set": { "players": bson::to_bson(&self.players).unwrap(), "display_names": bson::to_bson(&self.display_names).unwrap() } }; self.update_db(update_doc).await } /// Removes a player to the DB game instance, updating the DB accordingly. /// Players can only be removed if the game status is `Lobby`. /// /// # Arguments /// /// * `player_id` - The player ID to add to the game /// * `display_name` - The display name of the joining player /// /// # Returns /// /// * `String` - Player display name on success /// * `DBGameError::InvalidStateError` if the game state isn't `Lobby` /// * `DBGameError::UpdateError` if a DB update fails pub async fn remove_player( &mut self, player_id: &String, ) -> Result<Option<String>, DBGameError> { log::info!("Removing player {} from game {}.", player_id, self._id); if self.status != DBGameStatus::Lobby { log::error!( "ERROR: attempted to remove player {} to game {} while in state {:?}. Players may only be removed during the Lobby phase.", player_id, self._id, self.status ); return Err(DBGameError::InvalidStateError); } self.players.remove(player_id); let display_name = match self.players_to_display_names.remove(player_id) { Some(name) => name, None => { log::warn!( "Tried to remove nonexistant player {} from game {}.", player_id, self._id ); return Ok(None); } }; self.display_names.remove(&display_name); self.players_to_display_names.remove(player_id); let update_doc = doc! { "$set": { "players": bson::to_bson(&self.players).unwrap(), "display_names": bson::to_bson(&self.display_names).unwrap() } }; self.update_db(update_doc).await?; Ok(Some(display_name)) } /// Helper function to get a handle to the game collection. /// /// # Returns /// /// `Collection` of Thavalon DB games async fn get_collection() -> Collection { get_database().await.collection(GAME_COLLECTION) } /// Updates the game database using the provided update document. /// /// # Arguments /// /// * `update_doc` - The document with fields to update /// /// # Returns /// /// * Empty type on success, `DBGameError` on failure async fn update_db(&self, update_doc: Document) -> Result<(), DBGameError> { let collection = DatabaseGame::get_collection().await; if let Err(e) = collection .update_one(doc! {"_id": &self._id}, update_doc, None) .await { log::error!("ERROR: failed to update database game. {}.", e); return Err(DBGameError::UpdateError); } log::info!("DB game {} updated successfully.", self._id); Ok(()) } /// Getter for the friend_code field. /// /// # Returns /// /// A `string` representing the friend code pub fn get_friend_code(&self) -> &String { &self.friend_code } }
use crate::{ internal_rpc::InternalRPCHandle, options::{BasicAckOptions, BasicNackOptions, BasicRejectOptions}, types::{ChannelId, DeliveryTag}, Promise, PromiseResolver, Result, }; #[derive(Default, Debug, Clone)] pub struct Acker { channel_id: ChannelId, delivery_tag: DeliveryTag, internal_rpc: Option<InternalRPCHandle>, } impl Acker { pub(crate) fn new( channel_id: ChannelId, delivery_tag: DeliveryTag, internal_rpc: Option<InternalRPCHandle>, ) -> Self { Self { channel_id, delivery_tag, internal_rpc, } } pub async fn ack(&self, options: BasicAckOptions) -> Result<()> { self.rpc(|internal_rpc, resolver| { internal_rpc.basic_ack(self.channel_id, self.delivery_tag, options, resolver) }) .await } pub async fn nack(&self, options: BasicNackOptions) -> Result<()> { self.rpc(|internal_rpc, resolver| { internal_rpc.basic_nack(self.channel_id, self.delivery_tag, options, resolver) }) .await } pub async fn reject(&self, options: BasicRejectOptions) -> Result<()> { self.rpc(|internal_rpc, resolver| { internal_rpc.basic_reject(self.channel_id, self.delivery_tag, options, resolver) }) .await } async fn rpc<F: Fn(&InternalRPCHandle, PromiseResolver<()>)>(&self, f: F) -> Result<()> { let (promise, resolver) = Promise::new(); if let Some(internal_rpc) = self.internal_rpc.as_ref() { f(internal_rpc, resolver); } promise.await } } impl PartialEq for Acker { fn eq(&self, other: &Acker) -> bool { self.channel_id == other.channel_id && self.delivery_tag == other.delivery_tag } }
pub mod module; mod builder;
// This file is part of Substrate. // Copyright (C) 2019-2020 Parity Technologies (UK) Ltd. // SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0 // This program is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // You should have received a copy of the GNU General Public License // along with this program. If not, see <https://www.gnu.org/licenses/>. use crate::chain_spec::ChainSpec; use browser_utils::{ browser_configuration, init_console_log, set_console_error_panic_hook, Client, }; use log::info; use std::str::FromStr; use wasm_bindgen::prelude::*; /// Starts the client. #[wasm_bindgen] pub async fn start_client( chain_spec: Option<String>, log_level: String, ) -> Result<Client, JsValue> { start_inner(chain_spec, log_level).await.map_err(|err| JsValue::from_str(&err.to_string())) } async fn start_inner( chain_spec: Option<String>, log_level: String, ) -> Result<Client, Box<dyn std::error::Error>> { set_console_error_panic_hook(); init_console_log(log::Level::from_str(&log_level)?)?; let chain_spec = match chain_spec { Some(chain_spec) => ChainSpec::from_json_bytes(chain_spec.as_bytes().to_vec()) .map_err(|e| format!("{:?}", e))?, None => crate::chain_spec::development_config(), }; let config = browser_configuration(chain_spec).await?; info!("Substrate browser node"); info!("✌️ version {}", config.impl_version); info!("❤️ by Parity Technologies, 2017-2020"); info!("📋 Chain specification: {}", config.chain_spec.name()); info!("🏷 Node name: {}", config.network.node_name); info!("👤 Role: {:?}", config.role); // Create the service. This is the most heavy initialization step. let (task_manager, rpc_handlers) = crate::service::new_light_base(config) .map(|(components, rpc_handlers, _, _, _)| (components, rpc_handlers)) .map_err(|e| format!("{:?}", e))?; Ok(browser_utils::start_client(task_manager, rpc_handlers)) }
use crate::*; use shoji::*; use std::rc::Rc; use std::cell::RefCell; // TODO Why isn't Visual Root implementing CanDoLayoutStuff? pub struct VisualRoot { element_tree: Option<Box<dyn Element>>, layout_tree:Rc<RefCell<shoji::Shoji>>, root_layout_node:shoji::NodeIndex, } impl VisualRoot { pub fn new() -> Self { let mut shoji = Shoji::new(); let root_node = shoji.new_node( LayoutStyle { ..Default::default() }, vec![], ); VisualRoot { element_tree:None, layout_tree: Rc::new(RefCell::new(shoji)), root_layout_node: root_node } } pub fn set_root(&mut self, node: impl Element + 'static) -> Result<(),&'static str> { let mut root_node = node; root_node.attach_layout(Some(self.layout_tree.clone()),Some(self.root_layout_node)); self.element_tree = Some(Box::new(root_node)); Ok(()) } // Can Visual Root use CanDoLayoutStuff instead of this? pub fn compute_layout(&self, size: Size) -> Result<(),&'static str> { self.layout_tree.borrow_mut().compute_layout(self.root_layout_node,LayoutSize::new(size.width-1 as f64, size.height-1 as f64))?; Ok(()) } pub fn render(&self, renderer: &mut dyn Renderer){ if let Some(r) = &self.element_tree { r.render(renderer) } } pub fn on_keyboard(&self, key: u32, scancode: u32, action: u32, modifiers: u32) { //panic!("this should be implemented") } pub fn on_character(&self, codepoint: u32) { //panic!("this should be implemented") } pub fn on_mouse_move(&self, xpos: f64, ypos: f64) { // todo calculate if mouse is entering any elements and then send down the enter/exit event } pub fn on_mouse_button(&self, button: i32, action: i32, mods: i32) { //panic!("this should be implemented") } pub fn on_mouse_wheel(&self, xoffset: f64, yoffset: f64) { //panic!("this should be implemented") } }
// machine.rs --- // // Filename: machine.rs // Author: Jules <archjules> // Created: Thu Mar 30 22:36:45 2017 (+0200) // Last-Updated: Thu Apr 6 00:53:32 2017 (+0200) // By: Jules <archjules> // use std::io::Read; use opcode::Opcode; use frame::Frame; use zstring::zstring_read; use instructions::INSTRUCTIONS; use traits::ReadMemory; use traits::WriteMemory; #[derive(Debug)] pub struct Machine { file: Vec<u8>, version: u8, base_hm: u16, base_sm: u16, frame: Frame, call_stack: Vec<CallElement>, stack: Vec<u16>, pos_gv: usize, pos_ob: usize, pos_ab: usize, } impl Machine { pub fn new_from_file<T: Read>(file: &mut T) -> Machine { let mut buffer: Vec<u8> = vec![]; if let Err(e) = file.read_to_end(&mut buffer) { panic!("Error while reading the file : {}", e) }; let version = buffer.read_byte(0x00); let base_hm = buffer.read_word(0x04); let base_sm = buffer.read_word(0x0e); let pc = buffer.read_word(0x06) as u32; let pos_gv = buffer.read_word(0x0c) as usize; let pos_ob = buffer.read_word(0x0a) as usize; let pos_ab = buffer.read_word(0x18) as usize; Machine { file: buffer, version: version, base_hm: base_hm, base_sm: base_sm, frame: Frame::new_with_pc(pc), call_stack: vec![], stack: vec![], pos_gv: pos_gv, pos_ob: pos_ob, pos_ab: pos_ab } } // Memory read pub fn read_byte(&self, address: usize) -> u8 { self.file.read_byte(address) } pub fn read_word(&self, address: usize) -> u16 { self.file.read_word(address) } pub fn next_byte(&mut self) -> u8 { let pc = self.frame.pc() as usize; let va = self.read_byte(pc); self.frame.set_pc((pc + 1) as u32); va } pub fn next_word(&mut self) -> u16 { let pc = self.frame.pc() as usize; let va = self.read_word(pc); self.frame.set_pc((pc + 2) as u32); va } // Memory write pub fn write_byte(&mut self, address: usize, value: u8) { if address < self.base_sm as usize { self.file.write_byte(address, value); } else { panic!("Trying to write to static memory"); } } pub fn write_word(&mut self, address: usize, value: u16) { if address < self.base_sm as usize { self.file.write_word(address, value); } else { // println!("{:?}", self); panic!("Trying to write to static memory : {:04x}", address); } } // Variables pub fn push(&mut self, value: u16) { self.stack.push(value); } pub fn pull(&mut self) -> u16 { if let Some(a) = self.stack.pop() { a } else { panic!("Popping from an empty stack"); } } pub fn set_variable(&mut self, n: usize, value: u16) { match n { 0x00 => { self.stack.push(value); }, 0x01...0x0f => { self.frame[n - 1] = value; }, 0x10...0xff => { let address = self.pos_gv + ((n as usize - 0x10) << 1); self.write_word(address, value); } _ => unreachable!(), } } pub fn get_variable(&mut self, n: usize) -> u16 { match n { 0x00 => { match self.stack.pop() { Some(t) => t, None => panic!("Popping from an empty stack"), } }, 0x01...0x0f => { self.frame[n - 1] }, 0x10...0xff => { let address = self.pos_gv + ((n as usize - 0x10) << 1); self.read_word(address) } _ => unimplemented!(), } } pub fn store(&mut self, value: u16) { let n = self.next_byte(); self.set_variable(n as usize, value); } // Objects pub fn get_object_address(&self, n: u8) -> usize { println!("{:04x}", self.frame.pc()); self.pos_ob + 62 + (((n.wrapping_sub(1)) as usize) * 9) } pub fn set_object_parent(&mut self, n: u8, v: u8) { let a = self.get_object_address(n) + 4; self.write_byte(a, v); } pub fn set_object_sibling(&mut self, n: u8, v: u8) { let a = self.get_object_address(n) + 5; self.write_byte(a, v); } pub fn set_object_child(&mut self, n: u8, v: u8) { let a = self.get_object_address(n) + 6; self.write_byte(a, v); } pub fn set_object_attribute(&mut self, n: u8, attr: u8, v: bool) { let a = attr / 8; let b = attr % 8; let address = self.get_object_address(n) + a as usize; let x = self.read_byte(address); if v { self.write_byte(address, x | (0x80 >> b)); } else { self.write_byte(address, x & !(0x80 >> b)); } } pub fn get_object_parent(&mut self, n: u8) -> u8 { let a = self.get_object_address(n) + 4; self.read_byte(a) } pub fn get_object_sibling(&mut self, n: u8) -> u8 { let a = self.get_object_address(n) + 5; self.read_byte(a) } pub fn get_object_child(&mut self, n: u8) -> u8 { let a = self.get_object_address(n) + 6; self.read_byte(a) } pub fn get_object_attribute(&mut self, n: u8, attr: u8) -> bool { let a = attr / 8; let b = attr % 8; let address = self.get_object_address(n) + a as usize; let x = self.read_byte(address); (x & (0x80 >> b)) != 0 } // Abbreviations pub fn get_abbreviation(&self, n: u16) -> String { let f_address = self.pos_ab + (n as usize * 2); let s_address = self.read_word(f_address) as usize; zstring_read(self, s_address * 2).0 } // Program stream pub fn pc(&self) -> u32 { self.frame.pc() } pub fn set_pc(&mut self, pc: u32) { self.frame.set_pc(pc) } fn set_up_frame(&mut self, address: u32) { self.frame = Frame::new_with_pc(address); let number = self.next_byte(); self.frame.create_locals(number as usize); for i in 0..number as usize { let value = self.next_word(); self.frame[i] = value; } } pub fn call_s(&mut self, address: u32, params: Vec<u16>) { let element = CallElement::Store(self.frame.clone()); self.call_stack.push(element); self.set_up_frame(address); for i in 0..params.len() { if i < self.frame.len() { self.frame[i] = params[i]; } } } pub fn branch(&mut self, condition: bool) { let first = self.next_byte(); let offset = if (first & 0x40) == 0 { let second = self.next_byte() as u32; ((first as u32 & 0x3f) << 8) | second } else { first as u32 & 0x3f }; if ((first & 0x80) != 0) == condition { self.jump(offset) } } pub fn jump(&mut self, offset: u32) { match offset { 0 => self.ret(0), 1 => unimplemented!(), _ => { let n_pc = self.frame.pc().wrapping_add(offset - 2); self.frame.set_pc(n_pc); } } } pub fn ret(&mut self, value: u16) { if let Some(element) = self.call_stack.pop() { match element { CallElement::Store(f) => { self.frame = f; let n = self.next_byte(); self.set_variable(n as usize, value); } } } else { panic!("Return from a non-routine"); } } pub fn step(&mut self) { // println!("{:04x}", self.frame.pc()); let op = Opcode::next_opcode(self); let fu = INSTRUCTIONS[op.opcode as usize]; fu(op, self); } } #[derive(Debug)] enum CallElement { Store(Frame), }
#[macro_use] mod macros; pub(crate) mod cli; pub(crate) mod common; pub(crate) mod cutils; pub(crate) mod defaults; pub(crate) mod env; pub(crate) mod exec; pub(crate) mod log; pub(crate) mod pam; pub(crate) mod sudoers; pub(crate) mod system; mod su; mod sudo; mod visudo; pub use su::main as su_main; pub use sudo::main as sudo_main; pub use visudo::main as visudo_main;
pub fn run() { let closures_var = | | { println!("I am using anonymous function called closures"); }; closures_var(); }
#[macro_use] use crate::resources; use crate::image_data::ImageData; use crate::rect::Rect; use std; use std::any::Any; use std::sync::Arc; use std::fmt; uione_graphic_resource!(TextureResource, get_texture_resource, TEXTURE_RESOURCE_HANDLE); pub trait TextureHandle: Send + Sync + std::fmt::Debug { fn as_any(&self) -> &Any; fn blit<'image>(&self, image: &ImageData<'image>, rect: Rect<isize>) -> bool; } pub struct TextureResource { resource_impl: Box<TextureResourceImpl>, } impl fmt::Debug for TextureResource { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "TextureResource") } } impl TextureResource { pub fn new(resource_impl: Box<TextureResourceImpl>) -> TextureResource { TextureResource { resource_impl, } } pub fn make_texture(&self, image_data: &ImageData) -> Arc<TextureHandle> { self.resource_impl.make_texture(image_data) } } pub trait TextureResourceImpl { fn make_texture(&self, image_data: &ImageData) -> Arc<TextureHandle>; }
#[macro_use] extern crate conrod; #[macro_use] extern crate conrod_derive; extern crate find_folder; #[cfg(all(feature="winit", feature="glium"))] mod support; pub mod paint; use conrod::backend::glium::Renderer; use conrod::glium; use conrod::glium::Display; use conrod::glium::texture::Texture2d; use conrod::glium::glutin::{ContextBuilder, WindowBuilder, EventsLoop, VirtualKeyCode}; use conrod::image::Map; use conrod::input::keyboard; use conrod::backend::glium::glium::Surface; use conrod::backend::winit::convert_event; use support::EventLoop; use paint::PaintWindow; use paint::WindowAction; #[cfg(all(feature="winit", feature="glium"))] fn main() { use conrod::{self, Sizeable, Widget}; const WIDTH: u32 = 1000; const HEIGHT: u32 = 600; // Build the window. let mut events_loop = EventsLoop::new(); let window = WindowBuilder::new() .with_dimensions(WIDTH, HEIGHT) .with_title("Rust Paint"); let context = ContextBuilder::new() .with_vsync(true) .with_multisampling(4); let display = Display::new(window, context, &events_loop).unwrap(); // construct our `Ui`. let mut ui = conrod::UiBuilder::new([WIDTH as f64, HEIGHT as f64]).build(); // The `widget_ids` macro is a easy, safe way of generating a type for producing `widget::Id`s. widget_ids! { struct Ids { paint, } } let ids = Ids::new(ui.widget_id_generator()); // Add a `Font` to the `Ui`'s `font::Map` from file. let assets = find_folder::Search::KidsThenParents(3, 5).for_folder("assets").unwrap(); let font_path = assets.join("fonts/NotoSans/NotoSans-Regular.ttf"); let regular = ui.fonts.insert_from_file(font_path).unwrap(); // A type used for converting `conrod::render::Primitives` into `Command`s that can be used // for drawing to the glium `Surface`. let mut renderer = Renderer::new(&display).unwrap(); // The image map describing each of our widget->image mappings (in our case, none). let image_map = Map::<Texture2d>::new(); let mut cmd_pressed = false; // TODO1 -- horribly hacky way of dealing with Mac cmd modifier 'main: loop { let action = handle_events(&mut ui, &display, &mut events_loop, cmd_pressed); match action { WindowAction::Quit => { println!("QUIT"); break 'main }, // TODO1 -- remove when possible WindowAction::CmdPress => { cmd_pressed = true; }, WindowAction::CmdRelease => { cmd_pressed = false; }, _ => () } { let ui = &mut ui.set_widgets(); for action in PaintWindow::new() .w_h(WIDTH as f64, HEIGHT as f64) .set(ids.paint, ui) { println!("Click! {:?}", action); } } render(&mut ui, &mut renderer, &display, &image_map); } } fn handle_events(ui: &mut conrod::Ui, display: &Display, mut events_loop: &mut EventsLoop, cmd_pressed: bool) -> WindowAction { // Handle all events. let mut event_loop = EventLoop::new(); for event in event_loop.next(&mut events_loop) { // Use the `winit` backend feature to convert the winit event to a conrod one. if let Some(event) = convert_event(event.clone(), display) { ui.handle_event(event); event_loop.needs_update(); } match event { glium::glutin::Event::WindowEvent { event, .. } => match event { // Break from the loop upon `Escape`. glium::glutin::WindowEvent::Closed => { return WindowAction::Quit }, glium::glutin::WindowEvent::KeyboardInput { input: glium::glutin::KeyboardInput { virtual_keycode: Some(VirtualKeyCode::Escape), state: glium::glutin::ElementState::Pressed, .. }, .. } => (), glium::glutin::WindowEvent::KeyboardInput { input: glium::glutin::KeyboardInput { virtual_keycode: Some(VirtualKeyCode::Q), state: glium::glutin::ElementState::Pressed, .. }, .. } => { println!("{:?}", ui.global_input().current.touch); if ui.global_input().current.modifiers.contains(keyboard::CTRL) || cmd_pressed { return WindowAction::Quit } () }, // TODO1 -- remove when possible glium::glutin::WindowEvent::KeyboardInput { input: glium::glutin::KeyboardInput { virtual_keycode: Some(VirtualKeyCode::LWin), state, .. }, .. } => { if state == glium::glutin::ElementState::Pressed { return WindowAction::CmdPress } else { return WindowAction::CmdRelease } } _ => (), }, _ => (), } } return WindowAction::None } fn render(ui: &mut conrod::Ui, renderer: &mut Renderer, display: &Display, image_map: &Map<Texture2d>) { if let Some(primitives) = ui.draw_if_changed() { renderer.fill(&display, primitives, &image_map); let mut target = display.draw(); target.clear_color(0.0, 0.0, 0.0, 1.0); renderer.draw(display, &mut target, image_map).unwrap(); target.finish().unwrap() } } #[cfg(not(all(feature="winit", feature="glium")))] fn main() { println!("This example requires the `winit` and `glium` features. \ Try running `cargo run --release --features=\"winit glium\"`"); }
use crate::name_resolution::TopLevelContext; use crate::rustspec::*; use core::iter::IntoIterator; use core::slice::Iter; use heck::TitleCase; use itertools::Itertools; use pretty::RcDoc; use rustc_session::Session; use rustc_span::DUMMY_SP; use std::fs::File; use std::io::Write; use std::path; use crate::name_resolution::{FnKey, FnValue}; use crate::rustspec_to_coq_base::*; fn make_get_binding<'a>(pat: Pattern, typ: Option<RcDoc<'a, ()>>) -> RcDoc<'a, ()> { match typ { Some(typ) => RcDoc::as_string("'").append(make_paren( translate_pattern(pat.clone()).append(" : ").append(typ), )), None => translate_pattern(pat.clone()), } .append(RcDoc::space()) .append(RcDoc::as_string("←")) .append(RcDoc::space()) .append(RcDoc::as_string("get")) .append(RcDoc::space()) .append( translate_pattern(pat.clone()) .append(RcDoc::as_string("_loc")) .group(), ) .append(RcDoc::space()) .append(RcDoc::as_string(";;")) .append(RcDoc::line()) } fn make_put_binding<'a>(pat: Pattern, typ: Option<BaseTyp>, expr: RcDoc<'a, ()>) -> RcDoc<'a, ()> { make_let_binding(pat.clone(), typ, expr.group(), true) .append(RcDoc::as_string("#put")) .append(RcDoc::space()) .append( translate_pattern(pat.clone()) .append(RcDoc::as_string("_loc")) .group(), ) .append(RcDoc::space()) .append(RcDoc::as_string(":= ")) .group() .append(translate_pattern(pat.clone())) .nest(2) .append(RcDoc::space()) .append(RcDoc::as_string(";;")) .append(RcDoc::hardline()) } fn make_let_binding<'a>( pat: Pattern, typ: Option<BaseTyp>, expr: RcDoc<'a, ()>, do_bind: bool, ) -> RcDoc<'a, ()> { let mut typed_bound_tuple = None; RcDoc::as_string(if do_bind { "" } else { "let" }) .append(RcDoc::space()) .append( match typ.clone() { None => translate_pattern_tick(pat.clone()), Some(tau) => match pat.clone() { // If the pattern is a tuple, expand it Pattern::Tuple(v) if v.len() > 1 => { if do_bind { let temp_name = translate_ident(Ident::Local(LocalIdent { id: fresh_codegen_id(), name: String::from("temp"), mutable: false, })); typed_bound_tuple = Some(temp_name.clone()); temp_name } else { translate_pattern_tick(pat.clone()) } } _ => { if do_bind { RcDoc::as_string("'").append(make_paren( translate_pattern(pat.clone()) .append(RcDoc::space()) .append(RcDoc::as_string(":")) .append(RcDoc::space()) .append(translate_base_typ(tau)), )) } else { translate_pattern(pat.clone()) .append(RcDoc::space()) .append(RcDoc::as_string(":")) .append(RcDoc::space()) .append(translate_base_typ(tau)) } } }, } .group(), ) .append(RcDoc::space()) .append(if do_bind { RcDoc::as_string("←") } else { RcDoc::as_string(":=") }) .group() .append(RcDoc::line().append(make_paren(expr.group()))) .nest(2) .append(if let Pattern::Tuple(_) = pat.clone() { if !do_bind { match typ.clone() { None => RcDoc::nil(), Some(tau) => RcDoc::space() .append(RcDoc::as_string(":")) .append(RcDoc::space()) .append(translate_base_typ(tau)), } } else { RcDoc::nil() } } else { RcDoc::nil() }) .append( RcDoc::space() .append(if do_bind { RcDoc::as_string(";;") } else { RcDoc::as_string("in") }) .append(RcDoc::hardline()), ) .append(match typed_bound_tuple { None => RcDoc::nil(), Some(temp_name) => make_let_binding(pat, typ, temp_name, false), }) } pub(crate) fn make_equations<'a>( name: RcDoc<'a, ()>, typ: Option<RcDoc<'a, ()>>, expr: RcDoc<'a, ()>, ) -> RcDoc<'a, ()> { RcDoc::as_string("Program Definition") .append(RcDoc::space()) .append(make_definition_inner(name.clone(), typ, expr)) } pub(crate) fn make_definition_inner<'a>( name: RcDoc<'a, ()>, typ: Option<RcDoc<'a, ()>>, expr: RcDoc<'a, ()>, ) -> RcDoc<'a, ()> { name.clone() .append(RcDoc::space()) .append( match typ.clone() { None => RcDoc::nil(), Some(tau) => RcDoc::as_string(":").append(RcDoc::space()).append(tau), } .group(), ) .append(RcDoc::space()) .append(RcDoc::as_string(":=")) .group() .append(RcDoc::line().append(expr.group())) .nest(2) .append(RcDoc::as_string(".")) } pub(crate) fn make_definition<'a>( name: RcDoc<'a, ()>, typ: Option<RcDoc<'a, ()>>, expr: RcDoc<'a, ()>, ) -> RcDoc<'a, ()> { RcDoc::as_string("Definition") .append(RcDoc::space()) .append(make_definition_inner(name, typ, expr)) } fn code_block_wrap<'a>( expr: RcDoc<'a, ()>, location_vars: Option<RcDoc<'a, ()>>, interface: Option<RcDoc<'a, ()>>, result_typ: Option<RcDoc<'a, ()>>, ) -> RcDoc<'a, ()> { make_paren( RcDoc::as_string("{ code ") .append(expr) .append(RcDoc::as_string(" } :")) .append(RcDoc::space()) .append(RcDoc::as_string("code ")) .append(match location_vars { Some(a) => a, None => RcDoc::as_string("_"), }) .append(RcDoc::space()) .append(match interface { Some(a) => a, None => RcDoc::as_string("_"), }) .append(RcDoc::space()) .append(match result_typ { Some(a) => a, None => RcDoc::as_string("_"), }), ) } fn bind_code<'a>( expr: RcDoc<'a, ()>, early_return_typ: Option<CarrierTyp>, typ: Option<BaseTyp>, mutable: bool, fun_pat: Pattern, fun_body: RcDoc<'a, ()>, smv_total: ScopeMutableVars, ) -> RcDoc<'a, ()> { let local_vars_total = fset_from_scope(smv_total.clone()); RcDoc::as_string("bnd") .append(if mutable { RcDoc::as_string("m") } else { RcDoc::nil() }) .append(make_paren( match early_return_typ.clone() { Some(CarrierTyp::Result(_, (c, _))) => { RcDoc::as_string("ChoiceEqualityMonad.result_bind_code ") .append(translate_base_typ(c)) } Some(CarrierTyp::Option(_)) => { RcDoc::as_string("ChoiceEqualityMonad.option_bind_code") } None => RcDoc::as_string("_"), } .append(RcDoc::as_string(" , ")) .append(match typ { Some(typ) => translate_base_typ(typ), None => RcDoc::as_string("_"), }) .append(RcDoc::as_string(" , ")) .append(match early_return_typ.clone() { _ => RcDoc::as_string("_"), }) .append(RcDoc::as_string(" , ")) .append(local_vars_total.clone()), )) .append(RcDoc::space()) .append(translate_pattern_tick(fun_pat)) .append(RcDoc::space()) .append(RcDoc::as_string("⇠")) .append(RcDoc::line()) .append(make_paren(expr)) .append(RcDoc::space()) .append(RcDoc::as_string("in")) .append(RcDoc::line()) .append(fun_body) } fn translate_constructor<'a>(enum_name: TopLevelIdent) -> RcDoc<'a> { RcDoc::as_string(match enum_name.string.as_str() { "Ok" => String::from("inl"), "Err" => String::from("inr"), _ => enum_name.string, }) } fn translate_enum_name<'a>(enum_name: TopLevelIdent) -> RcDoc<'a> { translate_toplevel_ident(enum_name) } fn translate_enum_case_name<'a>( enum_name: BaseTyp, case_name: TopLevelIdent, explicit: bool, ) -> RcDoc<'a> { match enum_name { BaseTyp::Named(name, opts) => match opts { None => translate_constructor(case_name), Some(tyvec) => if explicit && tyvec.len() != 0 { RcDoc::as_string("@") } else { RcDoc::nil() } .append(translate_constructor(case_name)) .append( if (name.0).string == "Option" || (name.0).string == "Result" { RcDoc::nil() } else { make_paren(translate_toplevel_ident(name.0.clone())) }, ) .append(if explicit && tyvec.len() != 0 { RcDoc::space().append(RcDoc::intersperse( tyvec .into_iter() .map(|(x, _)| make_paren(translate_base_typ(x))), RcDoc::space(), )) } else { RcDoc::nil() }), }, _ => panic!("should not happen"), } } pub(crate) fn translate_base_typ<'a>(tau: BaseTyp) -> RcDoc<'a, ()> { match tau { BaseTyp::Bool => RcDoc::as_string("bool_ChoiceEquality"), BaseTyp::UInt8 => RcDoc::as_string("int8"), BaseTyp::Int8 => RcDoc::as_string("int8"), BaseTyp::UInt16 => RcDoc::as_string("int16"), BaseTyp::Int16 => RcDoc::as_string("int16"), BaseTyp::UInt32 => RcDoc::as_string("int32"), BaseTyp::Int32 => RcDoc::as_string("int32"), BaseTyp::UInt64 => RcDoc::as_string("int64"), BaseTyp::Int64 => RcDoc::as_string("int64"), BaseTyp::UInt128 => RcDoc::as_string("int128"), BaseTyp::Int128 => RcDoc::as_string("int128"), BaseTyp::Usize => RcDoc::as_string("uint_size"), BaseTyp::Isize => RcDoc::as_string("int_size"), BaseTyp::Str => RcDoc::as_string("string"), BaseTyp::Seq(tau) => { let tau: BaseTyp = tau.0; RcDoc::as_string("seq") .append(RcDoc::space()) .append(translate_base_typ(tau)) .group() } BaseTyp::Enum(_cases, _type_args) => { unimplemented!() } BaseTyp::Array(size, tau) => { let tau = tau.0; RcDoc::as_string("nseq") .append(RcDoc::space()) .append(translate_base_typ(tau)) .append(RcDoc::space()) .append(RcDoc::as_string(match &size.0 { ArraySize::Ident(id) => format!("{}", id), ArraySize::Integer(i) => format!("{}", i), })) .group() } BaseTyp::Named((ident, _span), None) => translate_ident(Ident::TopLevel(ident)), BaseTyp::Named((ident, _span), Some(args)) if ident.string == "Result" => make_paren( translate_ident(Ident::TopLevel(ident)) .append(RcDoc::space()) .append(RcDoc::intersperse( args.iter() .rev() .map(|arg| make_paren(translate_base_typ(arg.0.clone()))), RcDoc::space(), )), ), BaseTyp::Named((ident, _span), Some(args)) => make_paren( translate_ident(Ident::TopLevel(ident)) .append(RcDoc::space()) .append(RcDoc::intersperse( args.iter() .map(|arg| make_paren(translate_base_typ(arg.0.clone()))), RcDoc::space(), )), ), BaseTyp::Variable(id) => RcDoc::as_string(format!("t{}", id.0)), BaseTyp::Tuple(args) => { if args.len() == 0 { RcDoc::as_string("unit_ChoiceEquality") } else { make_typ_tuple(args.into_iter().map(|(arg, _)| translate_base_typ(arg))) } } BaseTyp::NaturalInteger(_secrecy, modulo, _bits) => RcDoc::as_string("nat_mod") .append(RcDoc::space()) .append(RcDoc::as_string(format!("0x{}", &modulo.0))) .append(RcDoc::hardline()), BaseTyp::Placeholder => panic!("Got unexpected type `Placeholder`: this should have been filled by during the typechecking phase."), } } pub(crate) fn translate_typ<'a>((_, (tau, _)): Typ) -> RcDoc<'a, ()> { translate_base_typ(tau) } fn translate_literal<'a>(lit: Literal) -> RcDoc<'a, ()> { match lit { Literal::Unit => RcDoc::as_string("(tt : unit_ChoiceEquality)"), Literal::Bool(true) => RcDoc::as_string("(true : bool_ChoiceEquality)"), Literal::Bool(false) => RcDoc::as_string("(false : bool_ChoiceEquality)"), Literal::Int128(x) => RcDoc::as_string(format!("@repr U128 {}", x)), Literal::UInt128(x) => RcDoc::as_string(format!("@repr U128 {}", x)), Literal::Int64(x) => RcDoc::as_string(format!("@repr U64 {}", x)), Literal::UInt64(x) => RcDoc::as_string(format!("@repr U64 {}", x)), Literal::Int32(x) => RcDoc::as_string(format!("@repr U32 {}", x)), Literal::UInt32(x) => RcDoc::as_string(format!("@repr U32 {}", x)), Literal::Int16(x) => RcDoc::as_string(format!("@repr U16 {}", x)), Literal::UInt16(x) => RcDoc::as_string(format!("@repr U16 {}", x)), Literal::Int8(x) => RcDoc::as_string(format!("@repr U8 {}", x)), Literal::UInt8(x) => RcDoc::as_string(format!("@repr U8 {}", x)), Literal::Isize(x) => RcDoc::as_string(format!("isize {}", x)), Literal::Usize(x) => RcDoc::as_string(format!("usize {}", x)), Literal::UnspecifiedInt(_) => panic!("Got a `UnspecifiedInt` literal: those should have been resolved into concrete types during the typechecking phase"), Literal::Str(msg) => RcDoc::as_string(format!("\"{}\"", msg)), } } fn translate_pattern_tick<'a>(p: Pattern) -> RcDoc<'a, ()> { match p { // If the pattern is a tuple, expand it Pattern::Tuple(_) => RcDoc::as_string("'").append(translate_pattern(p)), _ => translate_pattern(p), } } fn translate_pattern<'a>(p: Pattern) -> RcDoc<'a, ()> { match p { Pattern::EnumCase(ty_name, name, None) => { translate_enum_case_name(ty_name, name.0.clone(), false) } Pattern::EnumCase(ty_name, name, Some(inner_pat)) => { translate_enum_case_name(ty_name, name.0.clone(), false) .append(RcDoc::space()) .append(make_paren(translate_pattern(inner_pat.0))) } Pattern::IdentPat(x, _) => translate_ident(x.clone()), Pattern::LiteralPat(x) => translate_literal(x.clone()), Pattern::WildCard => RcDoc::as_string("_"), Pattern::Tuple(pats) => make_tuple(pats.into_iter().map(|(pat, _)| translate_pattern(pat))), } } /// Returns the func name, as well as additional arguments to add when calling /// the function in Coq pub fn translate_func_name<'a>( prefix: Option<Spanned<BaseTyp>>, name: Ident, top_ctx: &'a TopLevelContext, mut args: Vec<RcDoc<'a, ()>>, args_ty: Vec<BaseTyp>, inline: bool, ) -> ( RcDoc<'a, ()>, Vec<RcDoc<'a, ()>>, Option<BaseTyp>, (Vec<RcDoc<'a, ()>>, Vec<RcDoc<'a, ()>>), ) { let mut result_typ = match name.clone() { Ident::TopLevel(n) => match top_ctx.functions.get(&FnKey::Independent(n.clone())) { Some(FnValue::Local(sig)) => Some(sig.ret.0.clone()), Some(FnValue::External(_)) => None, Some(FnValue::ExternalNotInHacspec(_)) | None => None, }, _ => None, }; match prefix.clone() { None => { let name = translate_ident(name.clone()); match format!("{}", name.pretty(0)).as_str() { // In this case, we're trying to apply a secret // int constructor. The value it is applied to is // a public integer of the same kind. So in Coq, that // will amount to a classification operation // TODO: may need to add type annotation here x @ ("uint128" | "uint64" | "uint32" | "uint16" | "uint8" | "int128" | "int64" | "int32" | "int16" | "int8") => ( RcDoc::as_string("secret"), vec![], Some(match x { "uint128" => BaseTyp::UInt128, "uint64" => BaseTyp::UInt64, "uint32" => BaseTyp::UInt32, "uint16" => BaseTyp::UInt16, "uint8" => BaseTyp::UInt8, "int128" => BaseTyp::Int128, "int64" => BaseTyp::Int64, "int32" => BaseTyp::Int32, "int16" => BaseTyp::Int16, "int8" => BaseTyp::Int8, _ => panic!("Should not happen"), }), (vec![], args), ), x => ( name, vec![], match x { "uint128_from_le_bytes" | "uint128_from_be_bytes" => Some(BaseTyp::UInt128), "uint64_from_le_bytes" | "uint64_from_be_bytes" => Some(BaseTyp::UInt64), "uint32_from_le_bytes" | "uint32_from_be_bytes" => Some(BaseTyp::UInt32), "uint16_from_le_bytes" | "uint16_from_be_bytes" => Some(BaseTyp::UInt16), "uint8_from_le_bytes" | "uint8_from_be_bytes" => Some(BaseTyp::UInt8), "uint128_to_le_bytes" | "uint128_to_be_bytes" => Some(BaseTyp::Named( ( TopLevelIdent { string: String::from("U128Word"), kind: TopLevelIdentKind::Type, }, DUMMY_SP.into(), ), None, )), "uint64_to_le_bytes" | "uint64_to_be_bytes" => Some(BaseTyp::Named( ( TopLevelIdent { string: String::from("U64Word"), kind: TopLevelIdentKind::Type, }, DUMMY_SP.into(), ), None, )), "uint32_to_le_bytes" | "uint32_to_be_bytes" => Some(BaseTyp::Named( ( TopLevelIdent { string: String::from("U32Word"), kind: TopLevelIdentKind::Type, }, DUMMY_SP.into(), ), None, )), "uint16_to_le_bytes" | "uint16_to_be_bytes" => Some(BaseTyp::Named( ( TopLevelIdent { string: String::from("U16Word"), kind: TopLevelIdentKind::Type, }, DUMMY_SP.into(), ), None, )), "uint8_to_le_bytes" | "uint8_to_be_bytes" => Some(BaseTyp::Named( ( TopLevelIdent { string: String::from("U8Word"), kind: TopLevelIdentKind::Type, }, DUMMY_SP.into(), ), None, )), _ => result_typ, }, (vec![], args), ), } } Some((prefix, _)) => { let (module_name, prefix_info) = translate_prefix_for_func_name(prefix.clone(), top_ctx); let func_ident = translate_ident(name.clone()); let mut additional_args = Vec::new(); let mut args_ass = Vec::new(); // We add the modulo value for nat_mod match ( format!("{}", module_name.pretty(0)).as_str(), format!("{}", func_ident.pretty(0)).as_str(), ) { (NAT_MODULE, "from_literal") | (NAT_MODULE, "pow2") => { match &prefix_info { FuncPrefix::NatMod(modulo, _) => { if modulo == "unknown" { additional_args.push(RcDoc::as_string("_")); } else { additional_args.push(RcDoc::as_string(format!("0x{}", modulo))); } } _ => panic!(), // should not happen } } _ => (), }; // And the encoding length for certain nat_mod related function match ( format!("{}", module_name.pretty(0)).as_str(), format!("{}", func_ident.pretty(0)).as_str(), ) { (NAT_MODULE, "to_public_byte_seq_le") | (NAT_MODULE, "to_public_byte_seq_be") => { match &prefix_info { FuncPrefix::NatMod(_, encoding_bits) => additional_args .push(RcDoc::as_string(format!("{}", (encoding_bits + 7) / 8))), _ => panic!(), // should not happen } } _ => (), }; // And decoding match ( format!("{}", module_name.pretty(0)).as_str(), format!("{}", func_ident.pretty(0)).as_str(), ) { (NAT_MODULE, "from_byte_seq_le") | (NAT_MODULE, "from_byte_seq_be") => { match &prefix_info { FuncPrefix::NatMod(_modulo, _) => { result_typ = Some(prefix.clone()); } _ => panic!(), // should not happen } } _ => (), }; // Then the default value for seqs match ( format!("{}", module_name.pretty(0)).as_str(), format!("{}", func_ident.pretty(0)).as_str(), ) { (ARRAY_MODULE, "new_") | (SEQ_MODULE, "new_") | (ARRAY_MODULE, "from_slice") | (ARRAY_MODULE, "from_slice_range") => { match &prefix_info { FuncPrefix::Array(_, bt) | FuncPrefix::Seq(bt) => { additional_args.push( RcDoc::as_string("default : ") .append(translate_base_typ(bt.clone())), ); } _ => panic!(), // should not happen } } _ => (), }; // Handle everything with the SeqTrait. match ( format!("{}", module_name.pretty(0)).as_str(), format!("{}", func_ident.pretty(0)).as_str(), ) { m @ ((ARRAY_MODULE, "from_slice") | (ARRAY_MODULE, "concat") | (ARRAY_MODULE, "from_slice_range") | (ARRAY_MODULE, "set_chunk") | (ARRAY_MODULE, "update_slice") | (ARRAY_MODULE, "update") | (ARRAY_MODULE, "update_start") | (ARRAY_MODULE, "from_seq") | (SEQ_MODULE, "from_slice") | (SEQ_MODULE, "concat") | (SEQ_MODULE, "from_slice_range") | (SEQ_MODULE, "set_chunk") | (SEQ_MODULE, "set_exact_chunk") | (SEQ_MODULE, "update_slice") | (SEQ_MODULE, "update") | (SEQ_MODULE, "update_start") | (SEQ_MODULE, "from_seq") | (SEQ_MODULE, "from_public_seq") | (NAT_MODULE, "from_byte_seq_le") | (NAT_MODULE, "from_byte_seq_be") | (NAT_MODULE, "to_public_byte_seq_le") | (NAT_MODULE, "to_public_byte_seq_be")) => { // position in arg list (does not count self) let position = match m { (ARRAY_MODULE, "from_slice") | (ARRAY_MODULE, "concat") | (ARRAY_MODULE, "from_slice_range") | (ARRAY_MODULE, "update_start") | (ARRAY_MODULE, "from_seq") | (SEQ_MODULE, "from_slice") | (SEQ_MODULE, "concat") | (SEQ_MODULE, "from_slice_range") | (SEQ_MODULE, "update_start") | (SEQ_MODULE, "from_seq") | (SEQ_MODULE, "from_public_seq") | (NAT_MODULE, "from_byte_seq_le") | (NAT_MODULE, "from_byte_seq_be") | (NAT_MODULE, "to_public_byte_seq_le") | (NAT_MODULE, "to_public_byte_seq_be") => 0, (ARRAY_MODULE, "update") | (SEQ_MODULE, "update") | (ARRAY_MODULE, "update_slice") | (SEQ_MODULE, "update_slice") => 1, (ARRAY_MODULE, "set_chunk") | (SEQ_MODULE, "set_chunk") | (SEQ_MODULE, "set_exact_chunk") => 2, _ => panic!(), }; let ty = match args_ty[position].clone() { BaseTyp::Named(p, _) => match top_ctx.typ_dict.get(&p.0) { Some(x) => ((x.0).1).0.clone(), None => args_ty[position].clone(), }, _ => args_ty[position].clone(), }; if let BaseTyp::Array(_, base_ty) = ty { let temp_name = Ident::Local(LocalIdent { id: fresh_codegen_id(), name: String::from("temp"), mutable: false, }); let temp_ass: RcDoc<'a, ()> = make_let_binding( Pattern::IdentPat(temp_name.clone(), false), Some(BaseTyp::Seq(base_ty)), RcDoc::as_string("array_to_seq (") .append(args[position].clone()) .append(RcDoc::as_string(")")), !inline, ); args_ass.push(temp_ass); args[position] = translate_ident(temp_name); } } _ => (), }; match ( format!("{}", module_name.pretty(0)).as_str(), format!("{}", func_ident.pretty(0)).as_str(), ) { // Then we add the size for arrays (ARRAY_MODULE, "new_") | (ARRAY_MODULE, "from_seq") | (ARRAY_MODULE, "from_slice") | (ARRAY_MODULE, "from_slice_range") => { match &prefix_info { FuncPrefix::Array(ArraySize::Ident(s), _) => { additional_args.push(translate_ident(Ident::TopLevel(s.clone()))) } FuncPrefix::Array(ArraySize::Integer(i), _) => { if *i == 0 { additional_args.push(RcDoc::as_string("_")) } else { additional_args.push(RcDoc::as_string(format!("{}", i))) } } FuncPrefix::Seq(_) => { // This is the Seq case, should be alright () } _ => panic!(), // should not happen } } _ => (), }; result_typ = match ( format!("{}", module_name.pretty(0)).as_str(), format!("{}", func_ident.pretty(0)).as_str(), prefix_info, ) { ( ARRAY_MODULE, "from_slice" | "from_slice_range" | "set_chunk" | "default" | "create" | "update_slice" | "update" | "update_start" | "from_seq" | "new_", _, ) | ( SEQ_MODULE, "slice" | "slice_range" | "from_slice" | "concat" | "concat_owned" | "push" | "push_owned" | "from_slice_range" | "get_exact_chunk" | "get_remainder_chunk" | "set_chunk" | "set_exact_chunk" | "create" | "update_slice" | "update" | "update_start" | "from_seq" | "from_public_seq" | "declassify", _, ) | ( NAT_MODULE, "zero" | "one" | "two" | "from_secret_literal" | "from_literal" | "pow" | "pow2", _, ) | ("uint128", "declassify", _) => Some(prefix.clone()), (ARRAY_MODULE, "length" | "num_chunks" | "get_chunk_len", _) | (SEQ_MODULE, "num_chunks" | "num_exact_chunks" | "len", _) => { Some(BaseTyp::Usize) } (ARRAY_MODULE, "concat" | "slice" | "slice_range", FuncPrefix::Array(_, typ)) => { Some(BaseTyp::Seq(Box::new((typ, DUMMY_SP.into())))) } (ARRAY_MODULE, "to_be_bytes", _) => { Some(BaseTyp::Seq(Box::new((BaseTyp::UInt8, DUMMY_SP.into())))) } (ARRAY_MODULE, "get_chunk", FuncPrefix::Array(_, typ)) | (SEQ_MODULE, "get_chunk", FuncPrefix::Seq(typ)) => Some(BaseTyp::Tuple(vec![ (BaseTyp::Usize, DUMMY_SP.into()), ( BaseTyp::Seq(Box::new((typ, DUMMY_SP.into()))), DUMMY_SP.into(), ), ])), (_, _, _) => result_typ, }; ( module_name .clone() .append(RcDoc::as_string("_")) .append(func_ident.clone()), additional_args, result_typ, (args_ass, args), ) } } } fn translate_expression<'a>( e: Expression, top_ctx: &'a TopLevelContext, inline: bool, ) -> (Vec<RcDoc<'a, ()>>, RcDoc<'a, ()>) { match e { Expression::QuestionMark(_, _) => todo!(), Expression::MonadicLet(_, _, _, _) => todo!(), Expression::Binary((op, _), e1, e2, op_typ) => { let e1 = e1.0; let e2 = e2.0; let (ass_e1, trans_e1) = translate_expression(e1, top_ctx, inline); let (ass_e2, trans_e2) = translate_expression(e2, top_ctx, inline); let mut ass = Vec::new(); ass.extend(ass_e1); ass.extend(ass_e2); let temp_name = Ident::Local(LocalIdent { id: fresh_codegen_id(), name: String::from("temp"), mutable: false, }); let temp_ass: RcDoc<'a, ()> = make_let_binding( Pattern::IdentPat(temp_name.clone(), false), match op { BinOpKind::Add | BinOpKind::Sub | BinOpKind::Mul | BinOpKind::Div | BinOpKind::Rem => op_typ.clone().map(|(_, (x, _))| x), BinOpKind::And | BinOpKind::Or => Some(BaseTyp::Bool), BinOpKind::Lt | BinOpKind::Le | BinOpKind::Ne | BinOpKind::Ge | BinOpKind::Gt | BinOpKind::Eq => Some(BaseTyp::Bool), _ => None, }, make_paren(trans_e1) .append(RcDoc::space()) .append(translate_binop( RcDoc::as_string("."), op, op_typ.as_ref().unwrap(), top_ctx, )) .append(RcDoc::space()) .append(make_paren(trans_e2)) .group(), !inline, ); ass.push(temp_ass); (ass, translate_ident(temp_name)) } Expression::MatchWith(arg, arms) => { let (ass_arg_0, trans_arg_0) = translate_expression(arg.0, top_ctx, inline); let (ass_e1_0_iter, trans_e1_0_iter): (Vec<_>, Vec<_>) = arms .into_iter() .map(|(pat, e1)| { let (ass_e1, trans_e1) = translate_expression(e1.0, top_ctx, inline); (ass_e1, (pat, trans_e1)) }) .unzip(); let mut ass = Vec::new(); ass.extend(ass_arg_0); let match_expr = make_paren(code_block_wrap( RcDoc::as_string("match") .append(RcDoc::space()) .append(trans_arg_0) .append(RcDoc::space()) .append(RcDoc::as_string("with")) .append(RcDoc::line()) .append(RcDoc::intersperse( ass_e1_0_iter .into_iter() .zip(trans_e1_0_iter.into_iter()) .map(|(enum_ass, (pat, trans_e1_0))| { RcDoc::as_string("|") .append(RcDoc::space()) .append(translate_pattern(pat.0.clone())) .append(RcDoc::space()) .append(RcDoc::as_string("=>")) .append(RcDoc::concat(enum_ass.into_iter())) .append(RcDoc::space()) .append(RcDoc::as_string("ret")) .append(RcDoc::space()) .append(make_paren(trans_e1_0)) }), RcDoc::line(), )) .append(RcDoc::line()) .append(RcDoc::as_string("end")), None, None, None, )); let temp_name = Ident::Local(LocalIdent { id: fresh_codegen_id(), name: String::from("temp"), mutable: false, }); let temp_ass: RcDoc<'a, ()> = make_let_binding( Pattern::IdentPat(temp_name.clone(), false), None, match_expr, !inline, ); ass.push(temp_ass); (ass, translate_ident(temp_name)) } Expression::FieldAccessor(e1, field) => { unimplemented!() } Expression::EnumInject(enum_name, case_name, payload) => { let (ass, trans) = match payload { None => (Vec::new(), RcDoc::nil()), Some(payload) => { let (ass, trans) = translate_expression(*payload.0.clone(), top_ctx, inline); (ass, RcDoc::space().append(make_paren(trans))) } }; ( ass, make_paren( translate_enum_case_name(enum_name.clone(), case_name.0.clone(), true) .append(trans) .append(RcDoc::as_string(" : ")) .append(translate_base_typ(enum_name)), ), ) } Expression::InlineConditional(cond, e_t, e_f) => { let cond = cond.0; let e_t = e_t.0; let e_f = e_f.0; let (ass_cond, trans_cond) = translate_expression(cond, top_ctx, inline); let (ass_e_t, trans_e_t) = translate_expression(e_t, top_ctx, inline); let (ass_e_f, trans_e_f) = translate_expression(e_f, top_ctx, inline); let mut ass = Vec::new(); ass.extend(ass_cond); ass.extend(ass_e_t); ass.extend(ass_e_f); ( ass, make_paren( RcDoc::as_string("if") .append(RcDoc::space()) .append(make_paren(trans_cond)) .append(RcDoc::as_string(":bool_ChoiceEquality")) .append(RcDoc::space()) .append(RcDoc::as_string("then (*inline*)")) .append(RcDoc::space()) .append(make_paren(trans_e_t)) .append(RcDoc::space()) .append(RcDoc::as_string("else")) .append(RcDoc::space()) .append(make_paren(trans_e_f)), ) .group(), ) } Expression::Unary(op, e1, op_typ) => { let e1 = e1.0; let (ass, trans) = translate_expression(e1, top_ctx, inline); ( ass, translate_unop(op, op_typ.as_ref().unwrap().clone()) .append(RcDoc::space()) .append(make_paren(trans)) .group(), ) } Expression::Lit(lit) => (Vec::new(), translate_literal(lit.clone())), Expression::Tuple(es) => { let (ass_iter, trans_iter): (Vec<_>, Vec<_>) = es .into_iter() .map(|(e, _)| { let (ass, trans) = translate_expression(e, top_ctx, inline); (ass, trans) }) .unzip(); ( ass_iter.into_iter().fold(Vec::new(), |mut v, x| { v.extend(x); v }), { let iter = trans_iter.into_iter(); match &iter.size_hint().1 { Some(0) => RcDoc::as_string("tt"), Some(1) => { RcDoc::intersperse(iter, RcDoc::as_string(",").append(RcDoc::line())) .group() } // TODO: just get next, instead of using intersperse for a single element _ => RcDoc::as_string("prod_ce").append( RcDoc::as_string("(") .append( RcDoc::line_() .append(RcDoc::intersperse( iter, RcDoc::as_string(",").append(RcDoc::line()), )) .group() .nest(2), ) .append(RcDoc::line_()) .append(RcDoc::as_string(")")) .group(), ), } }, ) } Expression::Named(p) => (Vec::new(), translate_ident(p.clone())), Expression::FuncCall(prefix, name, args, arg_types) => { let (ass_arg_iter, trans_arg_iter): (Vec<_>, Vec<_>) = args .clone() .into_iter() .map(|((arg, _), _)| { let (ass, trans) = translate_expression(arg, top_ctx, inline); (ass, trans) }) .unzip(); let (func_name, additional_args, func_ret_ty, (trans_arg_ass, trans_arg_iter)) = translate_func_name( prefix.clone(), Ident::TopLevel(name.0.clone()), top_ctx, trans_arg_iter, arg_types.unwrap(), inline, ); let total_args = args.len() + additional_args.len(); let mut ass: Vec<RcDoc<'a, ()>> = Vec::new(); ass.extend(ass_arg_iter.into_iter().fold(Vec::new(), |mut v, x| { v.extend(x); v })); ass.extend(trans_arg_ass); let fun_expr = func_name // We append implicit arguments first .append(RcDoc::concat( additional_args .into_iter() .map(|arg| RcDoc::space().append(make_paren(arg))), )) // Then the explicit arguments .append(RcDoc::concat( trans_arg_iter .into_iter() .map(|trans_arg| RcDoc::space().append(make_paren(trans_arg))), )) .append(if total_args == 0 { RcDoc::space() } else { RcDoc::nil() }); let temp_name = Ident::Local(LocalIdent { id: fresh_codegen_id(), name: String::from("temp"), mutable: false, }); let temp_ass: RcDoc<'a, ()> = make_let_binding( Pattern::IdentPat(temp_name.clone(), false), func_ret_ty, fun_expr, !inline, ); ass.push(temp_ass); (ass, translate_ident(temp_name)) } Expression::MethodCall(sel_arg, sel_typ, (f, _), args, arg_types) => { let (ass_sel_arg_0_0, trans_sel_arg_0_0) = translate_expression((sel_arg.0).0, top_ctx, inline); let (ass_arg_iter, trans_arg_iter): (Vec<_>, Vec<_>) = args .into_iter() .map(|((arg, _), _)| { let (ass, trans) = translate_expression(arg, top_ctx, inline); (ass, trans) }) .unzip(); let ass_arg = ass_arg_iter.into_iter().fold(Vec::new(), |mut v, x| { v.extend(x); v }); let mut ass = Vec::new(); ass.extend(ass_sel_arg_0_0); ass.extend(ass_arg); let (func_name, additional_args, func_ret_ty, (trans_args_ass, trans_arg_iter)) = translate_func_name( sel_typ.clone().map(|x| x.1), Ident::TopLevel(f.clone()), top_ctx, trans_arg_iter, arg_types.unwrap(), inline, ); ass.extend(trans_args_ass); let arg_trans = // Then the self argument make_paren(trans_sel_arg_0_0) // And finally the rest of the arguments .append(RcDoc::concat(trans_arg_iter.into_iter().map( |trans_arg| { RcDoc::space().append(make_paren(trans_arg)) }, ))); // Ignore "clone" if f.string == "clone" { (ass, arg_trans) } else { let method_call_expr = func_name // We append implicit arguments first .append(RcDoc::concat( additional_args .into_iter() .map(|arg| RcDoc::space().append(make_paren(arg))), )) .append(RcDoc::space()) .append(arg_trans.clone()); let temp_name = Ident::Local(LocalIdent { id: fresh_codegen_id(), name: String::from("temp"), mutable: false, }); let temp_ass: RcDoc<'a, ()> = make_let_binding( Pattern::IdentPat(temp_name.clone(), false), func_ret_ty, method_call_expr, !inline, ); ass.push(temp_ass); (ass, translate_ident(temp_name)) } } Expression::ArrayIndex(x, e2, typ) => { let e2 = e2.0; let array_or_seq = array_or_seq(typ.clone().unwrap(), top_ctx); let (ass_e2, trans_e2) = translate_expression(e2, top_ctx, inline); let mut ass = Vec::new(); ass.extend(ass_e2); let temp_name = Ident::Local(LocalIdent { id: fresh_codegen_id(), name: String::from("temp"), mutable: false, }); let temp_ass: RcDoc<'a, ()> = make_let_binding( Pattern::IdentPat(temp_name.clone(), false), match typ.clone() { Some((_, (BaseTyp::Array(_, bt) | BaseTyp::Seq(bt), _))) => Some((*bt).0), _ => None, }, array_or_seq .append(RcDoc::as_string("_index")) .append(RcDoc::space()) .append(make_paren(translate_ident(x.0.clone()))) .append(RcDoc::space()) .append(make_paren(trans_e2)), !inline, ); ass.push(temp_ass); (ass, translate_ident(temp_name)) } // Expression::NewArray(_array_name, inner_ty, args) => { Expression::NewArray(_array_name, inner_ty, args) => { let inner_ty = inner_ty.unwrap(); // inner_ty is the type of the cell elements // TODO: do the case when _array_name is None (the Seq case) let (ass_iter, trans_iter): (Vec<_>, Vec<_>) = args .into_iter() .map(|(e, _)| { let (ass, trans) = translate_expression(e.clone(), top_ctx, inline); (ass, trans) }) .unzip(); let mut ass = ass_iter.into_iter().fold(Vec::new(), |mut v, x| { v.extend(x); v }); let array_typ = BaseTyp::Array( (ArraySize::Integer(trans_iter.len()), DUMMY_SP.into()), Box::new((inner_ty.clone(), DUMMY_SP.into())), ); let trans = make_list(trans_iter); let cast_trans = match _array_name { // Seq case None => RcDoc::as_string("seq_from_list _") .append(RcDoc::space()) .append(trans), Some(_) => // Array case { let temp_name = Ident::Local(LocalIdent { id: fresh_codegen_id(), name: String::from("temp"), mutable: false, }); let temp_ass: RcDoc<'a, ()> = make_let_binding( Pattern::IdentPat(temp_name.clone(), false), Some(array_typ), RcDoc::as_string(format!("@{}_from_list", ARRAY_MODULE)) .append(RcDoc::space()) .append(make_paren(translate_base_typ(inner_ty.clone()))) .append(RcDoc::space()) .append(trans), !inline, ); ass.push(temp_ass); translate_ident(temp_name) } }; (ass, cast_trans) } Expression::IntegerCasting(x, new_t, old_t) => { let old_t = old_t.unwrap(); match old_t { BaseTyp::Usize | BaseTyp::Isize => { let new_t_doc = match &new_t.0 { BaseTyp::UInt8 => RcDoc::as_string("pub_u8"), BaseTyp::UInt16 => RcDoc::as_string("pub_u16"), BaseTyp::UInt32 => RcDoc::as_string("pub_u32"), BaseTyp::UInt64 => RcDoc::as_string("pub_u64"), BaseTyp::UInt128 => RcDoc::as_string("pub_u128"), BaseTyp::Usize => RcDoc::as_string("usize"), BaseTyp::Int8 => RcDoc::as_string("pub_i8"), BaseTyp::Int16 => RcDoc::as_string("pub_i16"), BaseTyp::Int32 => RcDoc::as_string("pub_i32"), BaseTyp::Int64 => RcDoc::as_string("pub_i64"), BaseTyp::Int128 => RcDoc::as_string("pub_i28"), BaseTyp::Isize => RcDoc::as_string("isize"), _ => panic!(), // should not happen }; let (ass_x, trans_x) = translate_expression(x.0.clone(), top_ctx, inline); ( ass_x, new_t_doc.append(RcDoc::space()).append(make_paren(trans_x)), ) } _ => { let new_t_doc = match &new_t.0 { BaseTyp::UInt8 => String::from("uint8"), BaseTyp::UInt16 => String::from("uint16"), BaseTyp::UInt32 => String::from("uint32"), BaseTyp::UInt64 => String::from("uint64"), BaseTyp::UInt128 => String::from("uint128"), BaseTyp::Usize => String::from("uint32"), BaseTyp::Int8 => String::from("int8"), BaseTyp::Int16 => String::from("int16"), BaseTyp::Int32 => String::from("int32"), BaseTyp::Int64 => String::from("int64"), BaseTyp::Int128 => String::from("int128"), BaseTyp::Isize => String::from("int32"), BaseTyp::Named((TopLevelIdent { string: s, .. }, _), None) => s.clone(), _ => panic!(), // should not happen }; let _secret = match &new_t.0 { BaseTyp::Named(_, _) => true, _ => false, }; let (ass_x, trans_x) = translate_expression(x.as_ref().0.clone(), top_ctx, inline); ( ass_x, RcDoc::as_string("(fun x => lift_to_both0 (repr (unsigned x)))") .append(make_paren(trans_x)) .group(), ) } } } } } fn translate_statements<'a>( mut statements: Iter<Spanned<Statement>>, top_ctx: &'a TopLevelContext, inline: bool, smv: ScopeMutableVars, function_dependencies: FunctionDependencies, ) -> RcDoc<'a, ()> { let s = match statements.next() { None => return RcDoc::nil(), Some(s) => s.clone(), }; match s.0 { Statement::LetBinding((pat, _), typ, (expr, _), carrier, question_mark) => { let (ass_expr, trans_expr) = translate_expression(expr.clone(), top_ctx, inline); let trans_stmt = translate_statements( statements, top_ctx, inline, smv.clone(), function_dependencies.clone(), ); let expr = match question_mark { Some((smv_bind, function_dependencies_bind)) => bind_code( code_block_wrap( RcDoc::concat(ass_expr.into_iter()) .append(RcDoc::as_string("@ret _ ").append(make_paren(trans_expr))), Some(make_paren(fset_from_scope(smv_bind.clone()))), Some(function_dependencies_to_interface( function_dependencies_bind.clone(), top_ctx, )), None, ), carrier, typ.map(|((_, (x, _)), _)| x), if let Pattern::IdentPat(_i, true) = pat.clone() { true } else { false }, pat.clone(), code_block_wrap( trans_stmt, Some(make_paren(fset_from_scope(smv.clone()))), Some(function_dependencies_to_interface( function_dependencies.clone(), top_ctx, )), None, ), smv.clone(), ), None => if let Pattern::IdentPat(_i, true) = pat.clone() { // TODO: encapsulate in scope its own varaible make_put_binding( pat.clone(), typ.map(|((_, (base_typ, _)), _)| base_typ), RcDoc::concat(ass_expr.into_iter()).append( RcDoc::as_string("ret") .append(RcDoc::space()) .append(make_paren(trans_expr)), ), ) } else { make_let_binding( pat.clone(), typ.map(|((_, (base_typ, _)), _)| base_typ), RcDoc::concat(ass_expr.into_iter()).append( RcDoc::as_string("ret") .append(RcDoc::space()) .append(make_paren(trans_expr)), ), !inline, ) } .append(trans_stmt), }; expr } Statement::Reassignment((x, _), x_typ, (e1, _), carrier, question_mark) => { let (ass_e1, trans_e1) = translate_expression(e1.clone(), top_ctx, inline); let trans_stmt = translate_statements( statements, top_ctx, inline, smv.clone(), function_dependencies.clone(), ); let trans_e1 = make_paren( RcDoc::concat(ass_e1.into_iter()).append( RcDoc::as_string("ret") .append(RcDoc::space()) .append(make_paren(trans_e1)), ), ); let expr = match question_mark { Some((_smv_bind, function_dependencies_bind)) => bind_code( code_block_wrap(trans_e1, None, None, None), carrier, x_typ.clone().map(|((_, (base_typ, _)), _)| base_typ), true, Pattern::IdentPat(x.clone(), true), code_block_wrap( trans_stmt, Some(make_paren(fset_from_scope(smv.clone()))), Some(function_dependencies_to_interface( function_dependencies.clone(), top_ctx, )), None, ), smv.clone(), ), None => make_put_binding( Pattern::IdentPat(x.clone(), true), x_typ.clone().map(|((_, (base_typ, _)), _)| base_typ), trans_e1, ) .append(RcDoc::hardline()) .append(trans_stmt), }; expr } Statement::ArrayUpdate((x, _), (e1, _), (e2, _), carrier, question_mark, typ) => { let array_or_seq = array_or_seq(typ.clone().unwrap(), top_ctx); let (ass_e1, trans_e1) = translate_expression(e1.clone(), top_ctx, inline); let (ass_e2, trans_e2) = translate_expression(e2.clone(), top_ctx, inline); let trans_stmt = translate_statements( statements, top_ctx, inline, smv.clone(), function_dependencies.clone(), ); let expr = match question_mark { Some((_smv_bind, function_dependencies_bind)) => bind_code( trans_e2, carrier, typ.clone().map(|(_, (x, _))| x), false, Pattern::IdentPat( Ident::Local(LocalIdent { id: 0, name: String::from("_temp"), mutable: false, }), false, ), make_let_binding( Pattern::IdentPat(x.clone(), false), typ.clone().map(|(_, (base_typ, _))| base_typ), (RcDoc::concat(ass_e1.into_iter())) .append(RcDoc::concat(ass_e2.into_iter())) .append( RcDoc::as_string("ret") .append(RcDoc::space()) .append(make_paren( array_or_seq .append(RcDoc::as_string("_upd")) .append(RcDoc::space()) .append(translate_ident(x.clone())) .append(RcDoc::space()) .append(make_paren(trans_e1)) .append(RcDoc::space()) .append(RcDoc::as_string("_temp")), )), ), false, ) .append(RcDoc::hardline()) .append(trans_stmt), smv.clone(), ), None => { let array_upd_payload = RcDoc::as_string("ret") .append(RcDoc::space()) .append(make_paren( array_or_seq .append(RcDoc::as_string("_upd")) .append(RcDoc::space()) .append(translate_ident(x.clone())) .append(RcDoc::space()) .append(make_paren(trans_e1)) .append(RcDoc::space()) .append(make_paren(trans_e2)), )); make_let_binding( Pattern::IdentPat(x.clone(), false), typ.clone().map(|(_, (x, _))| x), (RcDoc::concat(ass_e1.into_iter())) .append(RcDoc::concat(ass_e2.into_iter())) .append(array_upd_payload), !inline, ) .append(RcDoc::hardline()) .append(trans_stmt) } }; expr } Statement::ReturnExp(e1, t1) => { let (ass_e1, trans_e1) = translate_expression(e1.clone(), top_ctx, inline); RcDoc::concat(ass_e1.into_iter()) .append(RcDoc::as_string("@ret ")) .append(make_paren(match t1 { Some((_, (x, _))) => translate_base_typ(x), None => RcDoc::as_string("_"), })) .append(RcDoc::space()) .append(make_paren(trans_e1)) } Statement::Conditional((cond, _), (mut b1, _), b2, mutated) => { let mutated_info = mutated.unwrap(); let pat = Pattern::Tuple( mutated_info .vars .0 .iter() .sorted() .map(|i| { ( Pattern::IdentPat(Ident::Local(i.clone()), false), DUMMY_SP.into(), ) }) .collect(), ); let b1_question_mark = *b1.contains_question_mark.as_ref().unwrap(); let b2_question_mark = match &b2 { None => false, Some(b2) => *b2.0.contains_question_mark.as_ref().unwrap(), }; let either_blocks_contains_question_mark = b1_question_mark || b2_question_mark; b1.stmts.push(add_ok_if_result( mutated_info.stmt.clone(), if either_blocks_contains_question_mark { mutated_info.early_return_type.clone() } else { None }, )); let (ass_cond, trans_cond) = translate_expression(cond.clone(), top_ctx, inline); let mut block_1 = translate_block(b1.clone(), true, top_ctx, inline, false); if !b1_question_mark { let local_vars_b1 = fset_from_scope(b1.mutable_vars.clone()); let interface_b1 = function_dependencies_to_interface(b1.function_dependencies.clone(), top_ctx); block_1 = RcDoc::as_string("let temp_then := ") .append(block_1) .append(RcDoc::as_string(" in")) .append(RcDoc::line()) .append(code_block_wrap( RcDoc::as_string("temp_then"), Some(make_paren(local_vars_b1)), Some(interface_b1), None, )); } let else_expr = match b2.clone() { None => translate_statements( vec![add_ok_if_result( mutated_info.stmt.clone(), if either_blocks_contains_question_mark { mutated_info.early_return_type.clone() } else { None }, )] .iter(), top_ctx, inline, smv.clone(), function_dependencies.clone(), ), Some((mut b2, _)) => { b2.stmts.push(add_ok_if_result( mutated_info.stmt.clone(), if either_blocks_contains_question_mark { mutated_info.early_return_type.clone() } else { None }, )); let block2_expr = translate_block(b2, true, top_ctx, inline, true); RcDoc::space().append(make_paren(block2_expr)) } }; let trans_stmt = translate_statements( statements.clone(), top_ctx, inline, smv.clone(), function_dependencies.clone(), ); let either_expr = if either_blocks_contains_question_mark { let expr = RcDoc::as_string("if") .append(RcDoc::space()) .append(trans_cond) .append(RcDoc::space()) .append(RcDoc::as_string(": bool_ChoiceEquality")) .append(RcDoc::line()) .append(RcDoc::as_string("then (*state*)")) .append(RcDoc::space()) .append(make_paren(code_block_wrap( block_1.clone(), Some(make_paren(fset_from_scope(b1.mutable_vars.clone()))), Some(function_dependencies_to_interface( b1.function_dependencies, top_ctx, )), None, ))) .append(RcDoc::line()) .append(RcDoc::as_string("else")) .append(RcDoc::space()) .append(code_block_wrap( else_expr, b2.clone() .map(|(b2, _)| make_paren(fset_from_scope(b2.mutable_vars.clone()))), b2.clone().map(|(b2, _)| { function_dependencies_to_interface(b2.function_dependencies, top_ctx) }), None, )) .append(RcDoc::space()); bind_code( expr, mutated_info.early_return_type.clone(), match mutated_info.stmt { Statement::ReturnExp(e, t) => t.clone().map(|(_, (base_typ, _))| base_typ), _ => None, }, false, pat, code_block_wrap( trans_stmt, Some(make_paren(fset_from_scope(smv.clone()))), Some(function_dependencies_to_interface( function_dependencies, top_ctx, )), None, ), smv.clone(), ) } else { let expr = RcDoc::as_string("if") .append(RcDoc::space()) .append(trans_cond.clone()) .append(RcDoc::as_string(":bool_ChoiceEquality")) .append(RcDoc::line()) .append(RcDoc::as_string("then (*not state*)")) .append(RcDoc::space()) .append(make_paren(block_1.clone())) .append(RcDoc::line()) .append(RcDoc::as_string("else")) .append(RcDoc::space()) .append(else_expr); make_let_binding( pat, match mutated_info.stmt { Statement::ReturnExp(e, t) => t.clone().map(|(_, (base_typ, _))| base_typ), _ => None, }, expr, !inline, ) .append(RcDoc::hardline()) .append(trans_stmt) }; RcDoc::concat(ass_cond.into_iter()).append(either_expr) } Statement::ForLoop(x, (e1, _), (e2, _), (mut b, _)) => { let mutated_info = b.mutated.clone().unwrap(); let b_question_mark = *b.contains_question_mark.as_ref().unwrap(); b.stmts.push(add_ok_if_result( mutated_info.stmt.clone(), if b_question_mark { mutated_info.early_return_type.clone() } else { None }, )); let mut_tuple = { // if there is only one element, just print the identifier instead of making a tuple if mutated_info.vars.0.len() == 1 { match mutated_info.vars.0.iter().next() { None => Pattern::WildCard, Some(i) => Pattern::IdentPat(Ident::Local(i.clone()), false), } } // print as tuple otherwise else { Pattern::Tuple( mutated_info .vars .0 .iter() .sorted() .map(|i| { ( Pattern::IdentPat(Ident::Local(i.clone()), false), DUMMY_SP.into(), ) }) .collect(), ) } }; let (ass_e1, trans_e1) = translate_expression(e1.clone(), top_ctx, inline); let (ass_e2, trans_e2) = translate_expression(e2.clone(), top_ctx, inline); let block_expr = translate_block(b.clone(), true, top_ctx, inline, true); let trans_stmt = translate_statements( statements, top_ctx, inline, smv.clone(), function_dependencies.clone(), ); let expr = if b_question_mark { let local_vars_bind = fset_from_scope(b.mutable_vars.clone()); let local_vars_fun = fset_from_scope(smv.clone()); let loop_expr = RcDoc::as_string("foldi_bind_code'") .append(RcDoc::space()) .append(make_paren(trans_e1)) .append(RcDoc::space()) .append(make_paren(trans_e2)) .append(RcDoc::space()) .append(make_paren(match mut_tuple.clone() { Pattern::Tuple(_) => { RcDoc::as_string("prod_ce").append(translate_pattern(mut_tuple.clone())) } _ => translate_pattern(mut_tuple.clone()), })) .append(RcDoc::space()) .append(RcDoc::as_string("(fun")) .append(RcDoc::space()) .append(match x { Some((x, _)) => translate_ident(x.clone()), None => RcDoc::as_string("_"), }) .append(RcDoc::space()) .append(translate_pattern_tick(mut_tuple.clone())) .append(RcDoc::space()) .append(RcDoc::as_string("=>")) .append(RcDoc::line()) .append(block_expr) .append(RcDoc::as_string(")")); bind_code( loop_expr, mutated_info.early_return_type.clone(), match mutated_info.stmt { Statement::ReturnExp(e, t) => t.clone().map(|(_, (base_typ, _))| base_typ), _ => None, }, false, mut_tuple.clone(), code_block_wrap( trans_stmt, Some(make_paren(fset_from_scope(b.mutable_vars.clone()))), Some(function_dependencies_to_interface( b.function_dependencies, top_ctx, )), None, ), smv.clone(), ) } else { let loop_expr = RcDoc::as_string("foldi'") .append(RcDoc::space()) .append(make_paren(trans_e1)) .append(RcDoc::space()) .append(make_paren(trans_e2)) .append(RcDoc::space()) .append(match mut_tuple.clone() { Pattern::Tuple(_) => { RcDoc::as_string("prod_ce").append(translate_pattern(mut_tuple.clone())) } _ => translate_pattern(mut_tuple.clone()), }) .append(RcDoc::space()) .append(make_paren( RcDoc::as_string("L2 := ").append(fset_from_scope(smv.clone())), )) .append(RcDoc::space()) .append(make_paren(RcDoc::as_string("I2 := ").append( function_dependencies_to_interface(function_dependencies, top_ctx), ))) .append(RcDoc::space()) .append(RcDoc::as_string("(H_loc_incl := _) (H_opsig_incl := _)")) .append(RcDoc::space()) .append(RcDoc::as_string("(fun")) .append(RcDoc::space()) .append(match x { Some((x, _)) => translate_ident(x.clone()), None => RcDoc::as_string("_"), }) .append(RcDoc::space()) .append(translate_pattern_tick(mut_tuple.clone())) .append(RcDoc::space()) .append(RcDoc::as_string("=>")) .append(RcDoc::line()) .append(block_expr) .append(RcDoc::as_string(")")) .group() .nest(2); make_let_binding( mut_tuple.clone(), match mutated_info.stmt { Statement::ReturnExp(e, t) => t.clone().map(|(_, (base_typ, _))| base_typ), _ => None, }, loop_expr, !inline, ) .append(RcDoc::hardline()) .append(trans_stmt) }; RcDoc::concat(ass_e1.into_iter()) .append(RcDoc::concat(ass_e2.into_iter())) .append(expr) } } } fn list_of_loc_vars<'a>(smvars: ScopeMutableVars) -> RcDoc<'a, ()> { let locals = smvars.local_vars.clone(); let mut all = smvars.external_vars.clone(); all.extend(locals.clone()); RcDoc::as_string("[") .append(RcDoc::intersperse( all.into_iter() .map(|(i, _)| i) .sorted() .map(|i| translate_ident(i.clone()).append("_loc")), RcDoc::space() .append(RcDoc::as_string(";")) .append(RcDoc::space()), )) .append(RcDoc::as_string("]")) } pub(crate) fn fset_from_scope<'a>(smvars: ScopeMutableVars) -> RcDoc<'a, ()> { let locals = smvars.local_vars.clone(); let mut all = smvars.external_vars.clone(); all.extend(locals.clone()); if all.len() == 0 { RcDoc::as_string("fset.fset0") } else { RcDoc::as_string("CEfset ").append(make_paren(list_of_loc_vars(smvars))) } } fn locations_from_scope<'a>(smvars: ScopeMutableVars) -> RcDoc<'a, ()> { let locals = smvars.local_vars.clone(); RcDoc::intersperse( locals.into_iter().map(|(i, typ)| { make_definition( translate_ident(i.clone()).append("_loc"), Some(RcDoc::as_string("ChoiceEqualityLocation")), make_paren( match typ { Some(typ) => translate_typ(typ), None => RcDoc::as_string("_"), } .append(RcDoc::space()) .append(RcDoc::as_string(";")) .append(RcDoc::space()) .append(RcDoc::as_string(fresh_codegen_id())) .append(RcDoc::as_string("%nat")), ), ) }), RcDoc::line(), ) } pub(crate) fn fset_and_locations<'a>(smvars: ScopeMutableVars) -> (RcDoc<'a, ()>, RcDoc<'a, ()>) { ( fset_from_scope(smvars.clone()), locations_from_scope(smvars.clone()), ) } pub fn function_dependencies_to_vec<'a>( function_dependencies: FunctionDependencies, top_ctx: &'a TopLevelContext, ) -> Vec<(TopLevelIdent, Vec<BaseTyp>, BaseTyp)> { let mut dep_info = vec![]; for x in function_dependencies.0 { match top_ctx.functions.get(&FnKey::Independent(x.clone())) { Some(FnValue::Local(fnsig)) => { let mut unspanned_args = vec![]; for ((_a, _), ((_, (bt, _)), _)) in fnsig.args.clone() { unspanned_args.push(bt) } dep_info.push((x, unspanned_args, fnsig.ret.0.clone())) } Some(FnValue::External(fnsig)) => { match x.string.as_str() { // hacspec library functions "ZERO" | "ONE" | "TWO" | "from_literal" | "from_hex_string" | "get_bit" | "set_bit" | "set" | "rotate_left" | "rotate_right" | "max_val" | "wrap_add" | "wrap_sub" | "wrap_mul" | "wrap_div" | "exp" | "pow_self" | "divide" | "inv" | "equal" | "greater_than" | "greater_than_or_qual" | "less_than" | "less_than_or_equal" | "not_equal_bm" | "equal_bm" | "greater_than_bm" | "greater_than_or_equal_bm" | "less_than_bm" | "less_than_or_equal_bm" | "sub_mod" | "add_mod" | "mul_mod" | "pow_mod" | "modulo" | "signed_modulo" | "absolute" | "classify" | "to_le_bytes" | "to_be_bytes" | "from_le_bytes" | "from_be_bytes" | "extended_euclid_internal" | "cswap_bit" | "cswap" | "cset_bit" | "cadd" | "csub" | "cmul" | "ct_div" | "poly_sub" | "poly_add" | "poly_mul" | "scalar_div" | "mul_poly_naive" | "invert_fermat" | "sub_poly" | "add_poly" | "div_scalar" | "degree_poly" | "extended_euclid" | "extended_euclid_invert" | "poly_to_ring" | "mul_poly_irr" | "new" | "length" | "from_array" | "from_native_slice" | "from_slice" | "concat" | "from_slice_range" | "slice" | "slice_range" | "num_chunks" | "get_chunk_len" | "get_chunk" | "set_chunk" | "default" | "create" | "len" | "iter" | "update_slice" | "update" | "update_start" | "index" | "index_mut" | "from_vec" | "from_seq" | "from_hex" | "fmt" | "declassify_eq" | "from_public_slice" | "from_public_array" | "add" | "sub" | "mul" | "rem" | "not" | "bitor" | "bitxor" | "bitand" | "shr" | "shl" | "to_be_U32s" | "to_le_U32s" | "to_be_U64s" | "to_le_U64s" | "to_U128s_be" | "to_U128s_le" | "to_hex" | "into_le_bytes" | "eq" | "partial_cmp" | "cmp" | "with_capacity" | "reserve" | "native_slice" | "into_slice" | "into_slice_range" | "split_off" | "truncate" | "concat_owned" | "push" | "push_owned" | "num_exact_chunks" | "get_exact_chunk" | "get_remainder_chunk" | "set_exact_chunk" | "from_string" | "from_public_seq" | "declassify" | "into_native" | "to_native" | "get_byte" | "U16_to_le_bytes" | "U16_to_be_bytes" | "U16_from_be_bytes" | "U16_from_le_bytes" | "U32_to_le_bytes" | "U32_to_be_bytes" | "U32_from_be_bytes" | "U32_from_le_bytes" | "U64_to_le_bytes" | "U64_to_be_bytes" | "U64_from_be_bytes" | "U64_from_le_bytes" | "U128_to_le_bytes" | "U128_to_be_bytes" | "U128_from_be_bytes" | "U128_from_le_bytes" | "u16_to_le_bytes" | "u16_to_be_bytes" | "u16_from_be_bytes" | "u16_from_le_bytes" | "u32_to_le_bytes" | "u32_to_be_bytes" | "u32_from_be_bytes" | "u32_from_le_bytes" | "u64_to_le_bytes" | "u64_to_be_bytes" | "u64_from_be_bytes" | "u64_from_le_bytes" | "u128_to_le_bytes" | "u128_to_be_bytes" | "u128_from_be_bytes" | "u128_from_le_bytes" | "hex_string_to_bytes" | "to_array" | "vec_poly_mul" | "vec_poly_add" | "vec_poly_sub" | "pad" | "make_fixed_length" | "monomial" | "normalize" | "leading_coefficient" | "is_zero" | "from_byte_seq_be" | "from_public_byte_seq_be" | "to_byte_seq_be" | "from_public" | "from_secret_declassify" | "to_public_byte_seq_be" | "from_byte_seq_le" | "from_public_byte_seq_le" | "to_byte_seq_le" | "to_public_byte_seq_le" | "from_secret_literal" => (), // Uncaught hacspec library functions? "U8" | "U16" | "U32" | "U64" | "U128" | "clone" | "U8_from_usize" | "declassify_usize_from_U8" => (), _ => { let mut unspanned_args = vec![]; for ((_a, _), (bt, _)) in fnsig.args.clone() { unspanned_args.push(bt) } dep_info.push((x, unspanned_args, fnsig.ret.clone())) } } } Some(_) => (), None => (), } } dep_info.sort_by(|(a, _, _), (b, _, _)| a.partial_cmp(b).unwrap()); dep_info } pub(crate) fn function_dependencies_to_interface<'a>( function_dependencies: FunctionDependencies, top_ctx: &'a TopLevelContext, ) -> RcDoc<'a, ()> { let dep_info = function_dependencies_to_vec(function_dependencies, top_ctx); RcDoc::as_string("[interface") .append(if dep_info.clone().is_empty() { RcDoc::nil() } else { RcDoc::softline() .append(RcDoc::intersperse( dep_info.clone().into_iter().map(|(x, _v, _r)| { RcDoc::as_string("#val #[ ") .append(RcDoc::as_string(x.clone().string.to_uppercase())) .append(RcDoc::as_string(" ] : ")) .append(translate_ident(Ident::TopLevel(x.clone())).append("_inp")) .append( RcDoc::as_string(" → ").append( translate_ident(Ident::TopLevel(x.clone())).append("_out"), ), ) }), RcDoc::space() .append(RcDoc::as_string(";")) .append(RcDoc::softline()), )) .append(RcDoc::softline()) }) .append("]") } pub(crate) fn function_dependencies_to_imports<'a>( function_dependencies: FunctionDependencies, top_ctx: &'a TopLevelContext, ) -> RcDoc<'a, ()> { let dep_info = function_dependencies_to_vec(function_dependencies, top_ctx); RcDoc::intersperse( dep_info.clone().into_iter().map(|(x, v, _r)| { RcDoc::as_string("#import {sig #[ ") .append(RcDoc::as_string(x.clone().string.to_uppercase())) .append(RcDoc::as_string(" ] : ")) .append(translate_ident(Ident::TopLevel(x.clone())).append("_inp")) .append(RcDoc::as_string(" → ")) .append(translate_ident(Ident::TopLevel(x.clone())).append("_out")) .append(RcDoc::space()) .append(RcDoc::as_string("} as")) .append(RcDoc::space()) .append(translate_ident(Ident::TopLevel(x.clone()))) .append(RcDoc::space()) .append(RcDoc::as_string(";;")) .append(RcDoc::line()) .append(RcDoc::as_string("let ")) .append(translate_ident(Ident::TopLevel(x.clone()))) .append(RcDoc::as_string(" := ")) .append(if v.len() > 0 { RcDoc::as_string("fun ") .append(RcDoc::intersperse( (0..v.len()) .into_iter() .map(|x| RcDoc::as_string(format!("x_{}", x))), RcDoc::space(), )) .append(RcDoc::as_string(" => ")) } else { RcDoc::nil() }) .append(translate_ident(Ident::TopLevel(x.clone()))) .append(RcDoc::space()) .append(if v.len() > 0 { make_paren(RcDoc::intersperse( (0..v.len()) .into_iter() .map(|x| RcDoc::as_string(format!("x_{}", x))), RcDoc::as_string(","), )) } else { RcDoc::as_string("tt") }) .append(RcDoc::as_string(" in")) .append(RcDoc::line()) }), RcDoc::nil(), ) } fn translate_block<'a>( b: Block, omit_extra_unit: bool, top_ctx: &'a TopLevelContext, inline: bool, wrap: bool, ) -> RcDoc<'a, ()> { let mut statements = b.stmts; match (&b.return_typ, omit_extra_unit) { (None, _) => panic!(), // should not happen, (Some(((Borrowing::Consumed, _), (BaseTyp::Tuple(args), _))), false) if args.is_empty() => { statements.push(( Statement::ReturnExp(Expression::Lit(Literal::Unit), b.return_typ), DUMMY_SP.into(), )); } (Some(_), _) => (), } let trans_stmt = translate_statements( statements.iter(), top_ctx, inline, b.mutable_vars.clone(), b.function_dependencies.clone(), ); let local_vars = fset_from_scope(b.mutable_vars); let interface = function_dependencies_to_interface(b.function_dependencies.clone(), top_ctx); if wrap { code_block_wrap( trans_stmt.group(), Some(make_paren(local_vars)), Some(interface), None, ) } else { trans_stmt.group() } } pub(crate) fn translate_item<'a>( item: DecoratedItem, top_ctx: &'a TopLevelContext, ) -> RcDoc<'a, ()> { match &item.item { Item::FnDecl((f, _), sig, (b, _)) => { let interface = function_dependencies_to_interface(sig.function_dependencies.clone(), top_ctx); let fun_imports = function_dependencies_to_imports(sig.function_dependencies.clone(), top_ctx); let block_exprs = translate_block(b.clone(), false, top_ctx, false, true); let (block_vars, block_var_loc_defs) = fset_and_locations(sig.mutable_vars.clone()); let inp_typ = if sig.args.is_empty() { translate_base_typ(UnitTyp) } else { RcDoc::intersperse( sig.args .iter() .map(|((_x, _), (tau, _))| translate_typ(tau.clone())), RcDoc::space() .append(RcDoc::as_string("'×")) .append(RcDoc::space()), ) }; let fun_inp_notation = RcDoc::as_string("Notation") .append(RcDoc::space()) .append(RcDoc::as_string("\"'")) .append( translate_ident(Ident::TopLevel(f.clone())).append(RcDoc::as_string("_inp")), ) .append(RcDoc::as_string("'\"")) .append(RcDoc::space()) .append(RcDoc::as_string(":=")) .append(RcDoc::space()) .append(make_paren( inp_typ.clone().append(RcDoc::as_string(" : choice_type")), )) .append(RcDoc::as_string(" (in custom pack_type at level 2).")); let fun_out_notation = RcDoc::as_string("Notation") .append(RcDoc::space()) .append(RcDoc::as_string("\"'")) .append( translate_ident(Ident::TopLevel(f.clone())).append(RcDoc::as_string("_out")), ) .append(RcDoc::as_string("'\"")) .append(RcDoc::space()) .append(RcDoc::as_string(":=")) .append(RcDoc::space()) .append(make_paren( translate_base_typ(sig.ret.0.clone()) .append(RcDoc::as_string(" : choice_type")), )) .append(RcDoc::as_string(" (in custom pack_type at level 2).")); let fun_ident_def = make_definition( RcDoc::as_string(f.clone().string.to_uppercase()), Some(RcDoc::as_string("nat")), RcDoc::as_string(fresh_codegen_id()), ); let fun_def_sig = translate_ident(Ident::TopLevel(f.clone())).append(RcDoc::line()); let fun_type = RcDoc::as_string("package") .append(RcDoc::space()) .append(make_paren(block_vars)) .append(RcDoc::space()) .append(interface) .append(RcDoc::space()) .append( RcDoc::as_string("[interface") .append(RcDoc::softline()) .append(RcDoc::as_string("#val #[ ")) .append(RcDoc::as_string(f.clone().string.to_uppercase())) .append(RcDoc::as_string(" ] : ")) .append(translate_ident(Ident::TopLevel(f.clone())).append("_inp")) .append( RcDoc::as_string(" → ") .append(translate_ident(Ident::TopLevel(f.clone())).append("_out")), ) .append(RcDoc::softline()) .append("]"), ); let package_wraped_code_block = RcDoc::as_string("[package #def #[ ") .append(RcDoc::as_string(f.clone().string.to_uppercase())) .append(RcDoc::as_string(" ] (temp_inp : ")) .append(translate_ident(Ident::TopLevel(f.clone())).append("_inp")) .append(RcDoc::as_string(") : ")) .append(translate_ident(Ident::TopLevel(f.clone())).append("_out")) .append(RcDoc::as_string(" { ")) .append(RcDoc::line()) .append(if !sig.args.is_empty() { RcDoc::as_string("let '") .append(make_paren(RcDoc::intersperse( sig.args .iter() .map(|((x, _), (_tau, _))| translate_ident(x.clone())), RcDoc::space() .append(RcDoc::as_string(",")) .append(RcDoc::space()), ))) .append(RcDoc::as_string(" := temp_inp : ")) .append(inp_typ) .append(RcDoc::as_string(" in")) .append(RcDoc::line()) } else { RcDoc::nil() }) .append(fun_imports) .append(block_exprs.group()) .append(RcDoc::line()) .append(RcDoc::as_string("}]")); let dep_vec = function_dependencies_to_vec(sig.function_dependencies.clone(), top_ctx); let package_def = make_equations( RcDoc::as_string("package_").append(translate_ident(Ident::TopLevel(f.clone()))), Some(RcDoc::as_string("package _ _ _")), if dep_vec.is_empty() { translate_ident(Ident::TopLevel(f.clone())) } else { RcDoc::as_string("seq_link") .append(RcDoc::space()) .append(translate_ident(Ident::TopLevel(f.clone()))) .append(RcDoc::space()) .append(RcDoc::as_string("link_rest")) .append(make_paren(RcDoc::intersperse( dep_vec.into_iter().map(|(x, _, _)| { RcDoc::as_string("package_").append(translate_toplevel_ident(x)) }), RcDoc::as_string(","), ))) }, ); block_var_loc_defs .append(RcDoc::line()) .append(fun_inp_notation) .append(RcDoc::line()) .append(fun_out_notation) .append(RcDoc::line()) .append(fun_ident_def) .append(RcDoc::line()) .append(make_equations( fun_def_sig, Some(fun_type), package_wraped_code_block, )) .append(RcDoc::hardline().append(RcDoc::as_string("Fail Next Obligation."))) .append(RcDoc::hardline()) .append(package_def) .append(RcDoc::hardline().append(RcDoc::as_string("Fail Next Obligation."))) } Item::EnumDecl(name, cases) => make_definition( translate_enum_name(name.0.clone()), Some(RcDoc::as_string("ChoiceEquality")), RcDoc::intersperse( cases.into_iter().map(|(case_name, case_typ)| { translate_base_typ(match case_typ { None => UnitTyp, Some((bty, _)) => bty.clone(), }) }), RcDoc::space() .append(RcDoc::as_string("'+")) .append(RcDoc::space()), ), ) .append(RcDoc::line()) .append(RcDoc::intersperse( cases .into_iter() .enumerate() .map(|(i, (case_name, case_typ))| { let name_ty = BaseTyp::Named(name.clone(), None); let index = if cases.len() == 1 { String::from("") } else { "0".repeat(cases.len() - 1 - i) + if i == 0 { "" } else { "1" } }; make_definition( translate_enum_case_name(name_ty.clone(), case_name.0.clone(), false) .append(match case_typ { Some((bty, _)) => RcDoc::space().append(make_paren( RcDoc::as_string("x : ") .append(translate_base_typ(bty.clone())), )), None => RcDoc::nil(), }), Some(translate_base_typ(name_ty)), RcDoc::intersperse( index.clone().chars().map(|c| match c { '0' => "inl", '1' => "inr", _ => panic!("Should be binary"), }), RcDoc::as_string(" ("), ) .append(match case_typ { Some((bty, _)) => RcDoc::space().append(RcDoc::as_string("x")), None => RcDoc::space().append(RcDoc::as_string("tt")), }) .append(RcDoc::intersperse( index.chars().map(|_| RcDoc::nil()), RcDoc::as_string(")"), )), ) }), RcDoc::line(), )), Item::ArrayDecl(name, size, cell_t, index_typ) => { let (ass_size_0, trans_size_0) = translate_expression(size.0.clone(), top_ctx, false); make_definition( translate_ident(Ident::TopLevel(name.0.clone())), None, RcDoc::concat(ass_size_0.into_iter()).append( RcDoc::line() .append(RcDoc::as_string("nseq")) .append(RcDoc::space()) .append(make_paren(translate_base_typ(cell_t.0.clone()))) .append(RcDoc::space()) .append(make_paren(trans_size_0.clone())) .group() .nest(2), ), ) .append(match index_typ { None => RcDoc::nil(), Some(index_typ) => RcDoc::hardline() .append(RcDoc::hardline()) .append(make_definition( translate_ident(Ident::TopLevel(index_typ.0.clone())), None, RcDoc::as_string("nat_mod") .append(RcDoc::space()) .append(make_paren(trans_size_0.clone())), )) .append(RcDoc::hardline()) .append(make_uint_size_coercion(translate_ident(Ident::TopLevel( index_typ.0.clone(), )))), }) } Item::ConstDecl(name, ty, e) => { let (ass_e_0, trans_e_0) = translate_expression(e.0.clone(), top_ctx, true); make_definition( translate_ident(Ident::TopLevel(name.0.clone())), Some(make_paren(translate_base_typ(ty.0.clone()))), RcDoc::concat(ass_e_0.into_iter()).append(make_paren(trans_e_0)), ) } Item::NaturalIntegerDecl(nat_name, _secrecy, canvas_size, info) => { let canvas_size = match &canvas_size.0 { Expression::Lit(Literal::Usize(size)) => size, _ => panic!(), // should not happen by virtue of typchecking }; let canvas_size_bytes = RcDoc::as_string(format!("{}", (canvas_size + 7) / 8)); (match info { Some((canvas_name, _modulo)) => make_definition( translate_ident(Ident::TopLevel(canvas_name.0.clone())), None, RcDoc::as_string("nseq") .append(RcDoc::space()) .append(make_paren(translate_base_typ(BaseTyp::UInt8))) .append(RcDoc::space()) .append(make_paren(canvas_size_bytes.clone())), ) .append(RcDoc::hardline()), None => RcDoc::nil(), }) .append(make_definition( translate_ident(Ident::TopLevel(nat_name.0.clone())), None, RcDoc::as_string("nat_mod") .append(RcDoc::space()) .append(match info { Some((_, modulo)) => RcDoc::as_string(format!("0x{}", &modulo.0)), None => RcDoc::as_string(format!("pow2 {}", canvas_size)), }), )) } Item::ImportedCrate((TopLevelIdent { string: kr, .. }, _)) => RcDoc::as_string(format!( "Require Import {}.", str::replace(&kr.to_title_case(), " ", "_"), )), // Aliases are translated to Coq Notations Item::AliasDecl((ident, _), (ty, _)) => RcDoc::as_string("Notation") .append(RcDoc::space()) .append(RcDoc::as_string("\"'")) .append(translate_ident(Ident::TopLevel(ident.clone()))) .append(RcDoc::as_string("'\"")) .append(RcDoc::space()) .append(RcDoc::as_string(":= (")) .append(translate_base_typ(ty.clone())) .append(RcDoc::as_string(") : hacspec_scope.")), } } fn translate_program<'a>(p: &'a Program, top_ctx: &'a TopLevelContext) -> RcDoc<'a, ()> { RcDoc::concat(p.items.iter().map(|(i, _)| { translate_item(i.clone(), top_ctx) .append(RcDoc::hardline()) .append(RcDoc::hardline()) })) } pub fn translate_and_write_to_file( sess: &Session, p: &Program, file: &str, top_ctx: &TopLevelContext, ) { let file = file.trim(); let path = path::Path::new(file); let mut file = match File::create(&path) { Err(why) => { sess.err(format!("Unable to write to output file {}: \"{}\"", file, why).as_str()); return; } Ok(file) => file, }; let width = 80; let mut w = Vec::new(); write!( file, "(** This file was automatically generated using Hacspec **)\n\ Set Warnings \"-notation-overridden,-ambiguous-paths\".\n\ From Crypt Require Import choice_type Package Prelude.\n\ Import PackageNotation.\n\ From extructures Require Import ord fset.\n\ From mathcomp.word Require Import ssrZ word.\n\ From Jasmin Require Import word.\n\ \n\ From Coq Require Import ZArith.\n\ Import List.ListNotations.\n\ Open Scope list_scope.\n\ Open Scope Z_scope.\n\ Open Scope bool_scope.\n\ \n\ Require Import ChoiceEquality.\n\ Require Import LocationUtility.\n\ Require Import Hacspec_Lib_Comparable.\n\ Require Import Hacspec_Lib_Pre.\n\ Require Import Hacspec_Lib.\n\ \n\ Open Scope hacspec_scope.\n\n\ Obligation Tactic := try timeout 8 solve_ssprove_obligations.\n", ) .unwrap(); translate_program(p, top_ctx).render(width, &mut w).unwrap(); write!(file, "{}", String::from_utf8(w).unwrap()).unwrap() }
#[doc = "Register `GPIOF_HWCFGR4` reader"] pub type R = crate::R<GPIOF_HWCFGR4_SPEC>; #[doc = "Field `OSPEED_RES` reader - OSPEED_RES"] pub type OSPEED_RES_R = crate::FieldReader<u32>; impl R { #[doc = "Bits 0:31 - OSPEED_RES"] #[inline(always)] pub fn ospeed_res(&self) -> OSPEED_RES_R { OSPEED_RES_R::new(self.bits) } } #[doc = "GPIO hardware configuration register 4\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`gpiof_hwcfgr4::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct GPIOF_HWCFGR4_SPEC; impl crate::RegisterSpec for GPIOF_HWCFGR4_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`gpiof_hwcfgr4::R`](R) reader structure"] impl crate::Readable for GPIOF_HWCFGR4_SPEC {} #[doc = "`reset()` method sets GPIOF_HWCFGR4 to value 0"] impl crate::Resettable for GPIOF_HWCFGR4_SPEC { const RESET_VALUE: Self::Ux = 0; }
#[doc = "Register `GICV_IIDR` reader"] pub type R = crate::R<GICV_IIDR_SPEC>; #[doc = "Field `IIDR` reader - IIDR"] pub type IIDR_R = crate::FieldReader<u32>; impl R { #[doc = "Bits 0:31 - IIDR"] #[inline(always)] pub fn iidr(&self) -> IIDR_R { IIDR_R::new(self.bits) } } #[doc = "The GICV_IIDR is an alias of GICC_IIDR.\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`gicv_iidr::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct GICV_IIDR_SPEC; impl crate::RegisterSpec for GICV_IIDR_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`gicv_iidr::R`](R) reader structure"] impl crate::Readable for GICV_IIDR_SPEC {} #[doc = "`reset()` method sets GICV_IIDR to value 0x0102_143b"] impl crate::Resettable for GICV_IIDR_SPEC { const RESET_VALUE: Self::Ux = 0x0102_143b; }
use std::hash::{Hash, Hasher}; use crate::{Number, Value}; impl Hash for Value { fn hash<H: Hasher>(&self, state: &mut H) { match &self { Value::Null => 0.hash(state), Value::Boolean(b) => b.hash(state), Value::String(s) => s.hash(state), Value::Char(c) => c.hash(state), Value::Number(n) => n.hash(state), Value::Bytes(b) => b.hash(state), Value::UUID(u) => u.hash(state) } } } impl Hash for Number { fn hash<H: Hasher>(&self, state: &mut H) { match &self { Number::Bit(v) => v.hash(state), Number::Unsigned8(n) => n.hash(state), Number::Signed8(n) => n.hash(state), Number::Unsigned16(n) => n.hash(state), Number::Signed16(n) => n.hash(state), Number::Unsigned32(n) => n.hash(state), Number::Signed32(n) => n.hash(state), Number::Unsigned64(n) => n.hash(state), Number::Signed64(n) => n.hash(state), Number::Unsigned128(n) => n.hash(state), Number::Signed128(n) => n.hash(state), Number::Float32(n) => (*n as u32).hash(state), Number::Float64(n) => (*n as u64).hash(state), } } } impl PartialEq for Number { fn eq(&self, other: &Self) -> bool { if let (Number::Bit(a), Number::Bit(b)) = (&self, &other) { a == b } else if let (Number::Unsigned8(a), Number::Unsigned8(b)) = (&self, &other) { a == b } else if let (Number::Signed8(a), Number::Signed8(b)) = (&self, &other) { a == b } else if let (Number::Unsigned16(a), Number::Unsigned16(b)) = (&self, &other) { a == b } else if let (Number::Signed16(a), Number::Signed16(b)) = (&self, &other) { a == b } else if let (Number::Unsigned32(a), Number::Unsigned32(b)) = (&self, &other) { a == b } else if let (Number::Signed32(a), Number::Signed32(b)) = (&self, &other) { a == b } else if let (Number::Unsigned64(a), Number::Unsigned64(b)) = (&self, &other) { a == b } else if let (Number::Signed64(a), Number::Signed64(b)) = (&self, &other) { a == b } else if let (Number::Unsigned128(a), Number::Unsigned128(b)) = (&self, &other) { a == b } else if let (Number::Signed128(a), Number::Signed128(b)) = (&self, &other) { a == b } else if let (Number::Float32(a), Number::Float32(b)) = (&self, &other) { a == b } else if let (Number::Float64(a), Number::Float64(b)) = (&self, &other) { a == b } else { false } } } impl Eq for Number {}
use std::convert::{TryFrom, TryInto}; use std::sync::Arc; use anyhow::Result; use prost::Message; use tokio::io::{AsyncRead, AsyncReadExt, AsyncWrite, AsyncWriteExt, BufReader, BufWriter}; use tokio::net::TcpStream; use crate::{ sensor::{SensorId, SensorMessage, Sensors}, Config, Global, VERSION, }; mod proto { include!(concat!(env!("OUT_DIR"), "/nino.net.rs")); } #[derive(Copy, Clone, Eq, PartialEq, Debug)] enum MessageId { Hello = 0, Ready = 1, Value = 2, Sensors = 3, SensorConfig = 4, AddSensor = 5, Pwm = 6, } impl TryFrom<u16> for MessageId { type Error = anyhow::Error; fn try_from(value: u16) -> Result<Self, anyhow::Error> { Ok(match value { 0 => MessageId::Hello, 1 => MessageId::Ready, 2 => MessageId::Value, 3 => MessageId::Sensors, 4 => MessageId::SensorConfig, 5 => MessageId::AddSensor, 6 => MessageId::Pwm, _ => anyhow::bail!("{} does not match MessageId", value), }) } } pub async fn handle( mut socket: TcpStream, broadcast: Arc<tokio::sync::broadcast::Sender<SensorMessage>>, pwm: crossbeam_channel::Sender<(crate::PwmChannel, f32)>, ) -> Result<()> { let (rdr, wrt) = socket.split(); let mut rdr = BufReader::new(rdr); let mut wrt = BufWriter::new(wrt); // Say hello to the client send_hello(&mut wrt).await?; // Expect the client to respond with a ready let (rdy, _) = receive_package(&mut rdr).await?; if rdy != MessageId::Ready { anyhow::bail!("Client did not respond with ready"); } send_sensors(&mut wrt).await?; let mut updates = broadcast.subscribe(); loop { tokio::select! { Ok(data) = updates.recv() => { use SensorMessage::*; match data { Update(id, value) => send_value(id, value, &mut wrt).await?, Config(_) | Error(_) | ClearError(_) => send_sensors(&mut wrt).await?, _ => {} } }, rdy = receive_package(&mut rdr) => { match rdy { Ok((id, buffer)) => handle_package(id, buffer, &pwm).await?, Err(e) => log::error!("Recv error {:?}", e), } } } } } async fn handle_package( id: MessageId, data: Vec<u8>, pwm: &crossbeam_channel::Sender<(crate::PwmChannel, f32)>, ) -> Result<()> { let sensors = Sensors::global(); match id { MessageId::SensorConfig => { let cfg = proto::SensorConfig::decode(data.as_slice())?; let id = SensorId::from_usize(cfg.id as usize); let rate = cfg .optional_rate .map(|proto::sensor_config::OptionalRate::Rate(r)| r as usize); let source = cfg .optional_source .map(|proto::sensor_config::OptionalSource::Source(s)| s.into()); sensors.reconfigure(&id, cfg.alias, cfg.unit, rate, source); } MessageId::AddSensor => { sensors.add_virtual(); } MessageId::Pwm => { let p = proto::SetPwm::decode(data.as_slice())?; let chan = match p.channel { 0 => crate::PwmChannel::Pwm0, 1 => crate::PwmChannel::Pwm1, _ => return Ok(()), }; if let Err(e) = pwm.try_send((chan, p.value)) { log::error!("Could not send to PWM\n{:?}", e); } } _ => { /* Simply ignore the rest, we dont deal with them here */ } } Ok(()) } async fn send_hello<T>(socket: &mut T) -> Result<()> where T: AsyncWrite + Unpin, { let cfg = Config::global(); let database = sled::Db::global(); let pwm0 = database.get("pwm0").ok().flatten().and_then(|v| { let value: &[u8] = &v; let value = f32::from_be_bytes(value.try_into().ok()?); Some(value) }).unwrap_or(0.6); let pwm1 = database.get("pwm1").ok().flatten().and_then(|v| { let value: &[u8] = &v; let value = f32::from_be_bytes(value.try_into().ok()?); Some(value) }).unwrap_or(0.28); let hello = proto::Hello { version: VERSION.into(), name: cfg.name.clone(), retention: cfg.retention as u32, pwm0, pwm1, }; send_package(socket, MessageId::Hello, hello).await?; Ok(()) } async fn send_value<T>(id: SensorId, value: f64, socket: &mut T) -> Result<()> where T: AsyncWrite + Unpin, { let value = proto::Value { id: id.to_usize() as u32, value, }; send_package(socket, MessageId::Value, value).await?; Ok(()) } async fn send_sensors<T>(socket: &mut T) -> Result<()> where T: AsyncWrite + Unpin, { let sensors = Sensors::global(); let data = sensors .iter() .map(|o| proto::sensors::Sensor { id: o.key().to_usize() as u32, rate: o.rate as u32, alias: (&o.alias).into(), unit: (&o.unit).into(), values: o.values.iter().map(|v| *v).collect(), optional_source: o .source .as_ref() .map(|s| proto::sensors::sensor::OptionalSource::Source(s.into())), optional_error: o .error .as_ref() .map(|e| proto::sensors::sensor::OptionalError::Error(e.into())), }) .collect(); let value = proto::Sensors { sensors: data }; send_package(socket, MessageId::Sensors, value).await?; Ok(()) } async fn receive_package<T>(socket: &mut T) -> Result<(MessageId, Vec<u8>)> where T: AsyncRead + Unpin, { let message_id = MessageId::try_from(socket.read_u16_le().await?)?; let data_len = (socket.read_u64_le().await?) as usize; if data_len > 1024 * 1024 * 10 { // Dont accept a payload over 10 mega bytes anyhow::bail!("Recv data_lengt exceeds maximum {}", data_len); } let mut out = vec![0; data_len]; socket.read_exact(&mut out).await?; Ok((message_id, out)) } async fn send_package<T, P>(socket: &mut T, id: MessageId, package: P) -> Result<()> where T: AsyncWrite + Unpin, P: prost::Message, { let mut buf = Vec::with_capacity(package.encoded_len()); package.encode(&mut buf)?; // Write the message id first socket.write_u16_le(id as u16).await?; // Write the length of the data then the data socket.write_u64_le(buf.len() as u64).await?; socket.write_all(&mut buf).await?; socket.flush().await?; Ok(()) }
#[doc = "Register `CONFCHR2` reader"] pub type R = crate::R<CONFCHR2_SPEC>; #[doc = "Register `CONFCHR2` writer"] pub type W = crate::W<CONFCHR2_SPEC>; #[doc = "Field `CONFCH8` reader - Channel 8 configuration"] pub type CONFCH8_R = crate::FieldReader; #[doc = "Field `CONFCH8` writer - Channel 8 configuration"] pub type CONFCH8_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>; impl R { #[doc = "Bits 0:1 - Channel 8 configuration"] #[inline(always)] pub fn confch8(&self) -> CONFCH8_R { CONFCH8_R::new((self.bits & 3) as u8) } } impl W { #[doc = "Bits 0:1 - Channel 8 configuration"] #[inline(always)] #[must_use] pub fn confch8(&mut self) -> CONFCH8_W<CONFCHR2_SPEC, 0> { CONFCH8_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "channel configuration register 2\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`confchr2::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`confchr2::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct CONFCHR2_SPEC; impl crate::RegisterSpec for CONFCHR2_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`confchr2::R`](R) reader structure"] impl crate::Readable for CONFCHR2_SPEC {} #[doc = "`write(|w| ..)` method takes [`confchr2::W`](W) writer structure"] impl crate::Writable for CONFCHR2_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets CONFCHR2 to value 0"] impl crate::Resettable for CONFCHR2_SPEC { const RESET_VALUE: Self::Ux = 0; }
use serde_derive::{Serialize, Deserialize}; use chrono::prelude::*; use std::fmt; use chrono::{DateTime, TimeZone}; // add index for editing #[derive(Serialize, Deserialize, Clone)] pub struct Routine { pub id: u16, pub name: String, pub last_done: DateTime<Local> // maybe an array of last done or notes for each time done } impl fmt::Display for Routine { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "[{}] {} last done on {}", self.id, self.name, self.last_done.format("%a %b%e, %Y")) } } // mock pub fn dummy_routines_data() -> [Routine; 3] { return [ Routine { id: 0, name: String::from("Running"), last_done: Local.ymd(2020, 8, 9).and_hms(14, 0, 0) }, Routine { id: 1, name: String::from("Reading"), last_done: Local.ymd(2020, 8, 10).and_hms(0, 0, 0) }, Routine { id: 2, name: String::from("Coding"), last_done: Local::now() } ] } impl Routine { pub fn store_config(&self) { let json = serde_json::to_string(&self).expect("Could not convert to json"); println!("Stored json : {}", json); } }
//! Utilities for working with the [General Security Profile](https://www.unicode.org/reports/tr39/#General_Security_Profile) //! for identifiers use crate::tables::identifier; pub use identifier::IdentifierType; /// Methods for determining characters not restricted from use for identifiers. pub trait GeneralSecurityProfile { /// Returns whether the character is not restricted from use for identifiers. fn identifier_allowed(self) -> bool; /// Returns the [identifier type](https://www.unicode.org/reports/tr39/#Identifier_Status_and_Type) fn identifier_type(self) -> Option<IdentifierType>; } impl GeneralSecurityProfile for char { #[inline] fn identifier_allowed(self) -> bool { identifier::identifier_status_allowed(self) } #[inline] fn identifier_type(self) -> Option<IdentifierType> { identifier::identifier_type(self) } }
//! GitHub issue redirect service #![forbid(unsafe_code)] #![deny(missing_debug_implementations, nonstandard_style, rust_2018_idioms)] use structopt::StructOpt; #[derive(Debug, StructOpt)] struct Opt { #[structopt(short = "p", long = "port", default_value = "80", env = "PORT")] port: u16, #[structopt(short = "H", long = "host", default_value = "0.0.0.0", env = "HOST")] host: String, } use chrono::Duration; use tide::Status; use tide::{http::Url, Request}; #[async_std::main] async fn main() -> std::io::Result<()> { let opt = Opt::from_args(); tide::log::start(); let mut app = tide::new(); app.at("/").get(index); app.at("/:username/issues/weeks/:weeks").get(issues); app.at("/:username/pulls/weeks/:weeks").get(pulls); app.listen((opt.host.as_str(), opt.port)).await?; Ok(()) } pub async fn index(_req: Request<()>) -> tide::Result { let body = r#" <body> <h1>Welcome to GitHub redirect</h1> <header> This page provides permalinks to GitHub queries. Because GitHub only supports absolute URLs in their issue search. So we've built a page that enables permalinks against <em>relative</em> queries. </header> <main> <h2>Examples</h2> <p>Visit <a href="/http-rs/issues/weeks/1">/http-rs/issues/weeks/1</a> to see all of the http-rs issues that have seen activity in the past week.<p> <p>Visit <a href="/http-rs/pulls/weeks/1">/http-rs/pulls/weeks/1</a> to see all of the http-rs pull-requests that have seen activity in the past week.</p> </main> </body> "#; let res = tide::Response::builder(200) .content_type(tide::http::mime::HTML) .body(html_index::Builder::new().raw_body(body).build()) .build(); Ok(res) } pub async fn issues(req: Request<()>) -> tide::Result { let username = req.param("username").unwrap(); let weeks = req.param("weeks").unwrap(); let weeks = Duration::weeks(weeks.parse().status(400)?); let now = chrono::offset::Utc::now(); let date = now - weeks; let date = date.to_rfc3339(); let date = date.split("T").next().unwrap(); let mut url = Url::parse("https://github.com/issues").unwrap(); let pairs = [ "user:USERNAME", "is:open", "is:pr", "archived:false", "sort:updated-desc", "-updated:<DATE", ] .join("+") .replace("USERNAME", username) .replace("DATE", date); url.set_query(Some(format!("q={}", pairs).as_str())); Ok(tide::Redirect::new(url.as_str()).into()) } pub async fn pulls(req: Request<()>) -> tide::Result { let username = req.param("username").unwrap(); let weeks = req.param("weeks").unwrap(); let weeks = Duration::weeks(weeks.parse().status(400)?); let now = chrono::offset::Utc::now(); let date = now - weeks; let date = date.to_rfc3339(); let date = date.split("T").next().unwrap(); let mut url = Url::parse("https://github.com/issues").unwrap(); let pairs = [ "user:USERNAME", "is:open", "archived:false", "sort:updated-desc", "-updated:<DATE", ] .join("+") .replace("USERNAME", username) .replace("DATE", date); url.set_query(Some(format!("q={}", pairs).as_str())); Ok(tide::Redirect::new(url.as_str()).into()) }
use crate::prelude::*; use crate::headers::from_headers::*; use azure_core::headers::{ content_type_from_headers, etag_from_headers, session_token_from_headers, }; use azure_core::{collect_pinned_stream, Request as HttpRequest, Response as HttpResponse}; use chrono::{DateTime, Utc}; #[derive(Debug, Clone)] pub struct GetCollectionOptions { consistency_level: Option<ConsistencyLevel>, } impl GetCollectionOptions { pub fn new() -> Self { Self { consistency_level: None, } } setters! { consistency_level: ConsistencyLevel => Some(consistency_level), } pub(crate) fn decorate_request(&self, request: &mut HttpRequest) -> crate::Result<()> { azure_core::headers::add_optional_header2(&self.consistency_level, request)?; Ok(()) } } #[derive(Debug, Clone)] pub struct GetCollectionResponse { pub collection: Collection, pub last_state_change: DateTime<Utc>, pub etag: String, pub collection_partition_index: u64, pub collection_service_index: u64, pub lsn: u64, pub schema_version: String, pub alt_content_path: String, pub content_path: String, pub global_committed_lsn: u64, pub number_of_read_regions: u32, pub item_lsn: u64, pub transport_request_id: u64, pub cosmos_llsn: u64, pub cosmos_item_llsn: u64, pub charge: f64, pub service_version: String, pub activity_id: uuid::Uuid, pub session_token: String, pub gateway_version: String, pub server: String, pub xp_role: u32, pub content_type: String, pub content_location: String, pub date: DateTime<Utc>, } impl GetCollectionResponse { pub async fn try_from(response: HttpResponse) -> crate::Result<Self> { let (_status_code, headers, pinned_stream) = response.deconstruct(); let body = collect_pinned_stream(pinned_stream).await?; Ok(Self { collection: serde_json::from_slice(&body)?, last_state_change: last_state_change_from_headers(&headers)?, etag: etag_from_headers(&headers)?, collection_partition_index: collection_partition_index_from_headers(&headers)?, collection_service_index: collection_service_index_from_headers(&headers)?, lsn: lsn_from_headers(&headers)?, schema_version: schema_version_from_headers(&headers)?.to_owned(), alt_content_path: alt_content_path_from_headers(&headers)?.to_owned(), content_path: content_path_from_headers(&headers)?.to_owned(), global_committed_lsn: global_committed_lsn_from_headers(&headers)?, number_of_read_regions: number_of_read_regions_from_headers(&headers)?, item_lsn: item_lsn_from_headers(&headers)?, transport_request_id: transport_request_id_from_headers(&headers)?, cosmos_llsn: cosmos_llsn_from_headers(&headers)?, cosmos_item_llsn: cosmos_item_llsn_from_headers(&headers)?, charge: request_charge_from_headers(&headers)?, service_version: service_version_from_headers(&headers)?.to_owned(), activity_id: activity_id_from_headers(&headers)?, session_token: session_token_from_headers(&headers)?, gateway_version: gateway_version_from_headers(&headers)?.to_owned(), server: server_from_headers(&headers)?.to_owned(), xp_role: role_from_headers(&headers)?, content_type: content_type_from_headers(&headers)?.to_owned(), content_location: content_location_from_headers(&headers)?.to_owned(), date: date_from_headers(&headers)?, }) } }
/// bindings for ARINC653P1-5 3.7.2.1 buffer pub mod basic { use crate::bindings::*; use crate::Locked; /// ARINC653P1-5 3.7.1 pub type BufferName = ApexName; /// ARINC653P1-5 3.7.1 /// /// According to ARINC 653P1-5 this may either be 32 or 64 bits. /// Internally we will use 64-bit by default. /// The implementing Hypervisor may cast this to 32-bit if needed pub type BufferId = ApexLongInteger; /// ARINC653P1-5 3.7.2.1 required functions for buffer functionality pub trait ApexBufferP1 { /// APEX653P1-5 3.7.2.1.1 /// /// # Errors /// - [ErrorReturnCode::InvalidConfig]: not enough memory is available /// - [ErrorReturnCode::InvalidConfig]: [ApexLimits::SYSTEM_LIMIT_NUMBER_OF_BUFFERS](crate::bindings::ApexLimits::SYSTEM_LIMIT_NUMBER_OF_BUFFERS) was reached /// - [ErrorReturnCode::NoAction]: a buffer with given `buffer_name` already exists /// - [ErrorReturnCode::InvalidParam]: `max_message_size` is zero /// - [ErrorReturnCode::InvalidParam]: `max_nb_message` is too large /// - [ErrorReturnCode::InvalidMode]: our current operating mode is [OperatingMode::Normal](crate::prelude::OperatingMode::Normal) #[cfg_attr(not(feature = "full_doc"), doc(hidden))] fn create_buffer<L: Locked>( buffer_name: BufferName, max_message_size: MessageSize, max_nb_message: MessageRange, queuing_discipline: QueuingDiscipline, ) -> Result<BufferId, ErrorReturnCode>; /// APEX653P1-5 3.7.2.1.2 /// /// # Errors /// - [ErrorReturnCode::InvalidParam]: buffer with `buffer_id` does not exist /// - [ErrorReturnCode::InvalidParam]: `time_out` is invalid /// - [ErrorReturnCode::InvalidMode]: current process holds a mutex /// - [ErrorReturnCode::InvalidMode]: current process is error handler AND `time_out` is not instant. /// - [ErrorReturnCode::NotAvailable]: there is no place in the buffer /// - [ErrorReturnCode::TimedOut]: `time_out` elapsed #[cfg_attr(not(feature = "full_doc"), doc(hidden))] fn send_buffer<L: Locked>( buffer_id: BufferId, message: &[ApexByte], time_out: ApexSystemTime, ) -> Result<(), ErrorReturnCode>; /// APEX653P1-5 3.7.2.1.3 /// /// # Errors /// - [ErrorReturnCode::InvalidParam]: buffer with `buffer_id` does not exist /// - [ErrorReturnCode::InvalidParam]: `time_out` is invalid /// - [ErrorReturnCode::InvalidMode]: current process holds a mutex /// - [ErrorReturnCode::InvalidMode]: current process is error handler AND `time_out` is not instant. /// - [ErrorReturnCode::NotAvailable]: there is no message in the buffer /// - [ErrorReturnCode::TimedOut]: `time_out` elapsed /// /// # Safety /// /// This function is safe, as long as the `message` can hold whatever is received #[cfg_attr(not(feature = "full_doc"), doc(hidden))] unsafe fn receive_buffer<L: Locked>( buffer_id: BufferId, time_out: ApexSystemTime, message: &mut [ApexByte], ) -> Result<MessageSize, ErrorReturnCode>; /// APEX653P1-5 3.7.2.1.4 /// /// # Errors /// - [ErrorReturnCode::InvalidConfig]: buffer with `buffer_name` does not exist #[cfg_attr(not(feature = "full_doc"), doc(hidden))] fn get_buffer_id<L: Locked>(buffer_name: BufferName) -> Result<BufferId, ErrorReturnCode>; /// APEX653P1-5 3.7.2.1.5 /// /// # Errors /// - [ErrorReturnCode::InvalidParam]: buffer with `buffer_id` does not exist #[cfg_attr(not(feature = "full_doc"), doc(hidden))] fn get_buffer_status<L: Locked>( buffer_id: BufferId, ) -> Result<BufferStatus, ErrorReturnCode>; } /// ARINC653P1-5 3.7.1 #[derive(Debug, Clone, PartialEq, Eq)] pub struct BufferStatus { /// number of messages in the buffer pub nb_message: MessageRange, /// maximum number of messages which can fit in this buffer pub max_nb_message: MessageRange, pub max_message_size: MessageSize, pub waiting_processes: WaitingRange, } } /// abstraction for ARINC653P1-5 3.7.2.1 blackboard pub mod abstraction { use core::marker::PhantomData; use core::sync::atomic::AtomicPtr; // Reexport important basic-types for downstream-user pub use super::basic::{BufferId, BufferStatus}; use crate::bindings::*; use crate::hidden::Key; use crate::prelude::*; /// Buffer abstraction struct #[derive(Debug)] pub struct Buffer<B: ApexBufferP1> { _b: PhantomData<AtomicPtr<B>>, id: BufferId, max_size: MessageSize, max_number_msgs: MessageRange, } impl<B: ApexBufferP1> Clone for Buffer<B> { fn clone(&self) -> Self { Self { _b: self._b, id: self.id, max_size: self.max_size, max_number_msgs: self.max_number_msgs, } } } /// Free extra functions for implementer of [ApexBufferP1] pub trait ApexBufferP1Ext: ApexBufferP1 + Sized { /// # Errors /// - [Error::InvalidConfig]: buffer with `name` does not exist fn get_buffer(name: Name) -> Result<Buffer<Self>, Error>; } impl<B: ApexBufferP1> ApexBufferP1Ext for B { fn get_buffer(name: Name) -> Result<Buffer<B>, Error> { let id = B::get_buffer_id::<Key>(name.into())?; // According to ARINC653P1-5 3.7.2.1.5 this can only fail if the buffer_id // does not exist in the current partition. // But since we retrieve the buffer_id directly from the hypervisor // there is no possible way for it not existing let status = B::get_buffer_status::<Key>(id).unwrap(); Ok(Buffer { _b: Default::default(), id, max_size: status.max_message_size, max_number_msgs: status.max_nb_message, }) } } impl<B: ApexBufferP1> Buffer<B> { /// # Errors /// - [Error::InvalidConfig]: buffer with `name` does not exist pub fn from_name(name: Name) -> Result<Buffer<B>, Error> { B::get_buffer(name) } pub fn id(&self) -> BufferId { self.id } /// Max [MessageSize] for this [Buffer] pub fn size(&self) -> MessageSize { self.max_size } /// Max number of messages in this [Buffer] pub fn range(&self) -> MessageRange { self.max_number_msgs } /// Checked Buffer send from specified byte buffer /// /// # Errors /// - [Error::WriteError]: the `buffer` is longer than the `max_message_size` specified for this buffer /// - [Error::WriteError]: `buffer` length is zero pub fn send(&self, buffer: &mut [ApexByte], timeout: SystemTime) -> Result<(), Error> { buffer.validate_write(self.max_size)?; B::send_buffer::<Key>(self.id, buffer, timeout.into())?; Ok(()) } /// Checked Buffer receive into specified byte buffer /// /// # Errors /// - [Error::InvalidParam]: `timeout` is invalid /// - [Error::InvalidMode]: current process holds a mutex /// - [Error::InvalidMode]: current process is error handler AND `timeout` is not instant. /// - [Error::NotAvailable]: there is no message in the buffer /// - [Error::TimedOut]: `timeout` elapsed /// - [Error::ReadError]: prodived `buffer` is too small for this [Buffer]'s `max_message_size` pub fn receive<'a>( &self, buffer: &'a mut [ApexByte], timeout: SystemTime, ) -> Result<&'a [ApexByte], Error> { buffer.validate_read(self.max_size)?; unsafe { self.receive_unchecked(timeout, buffer) } } /// Unchecked Buffer receive into specified byte buffer /// /// # Errors /// - [Error::InvalidParam]: `timeout` is invalid /// - [Error::InvalidMode]: current process holds a mutex /// - [Error::InvalidMode]: current process is error handler AND `timeout` is not instant. /// - [Error::NotAvailable]: there is no message in the buffer /// - [Error::TimedOut]: `timeout` elapsed /// /// # Safety /// /// This function is safe, as long as the buffer can hold whatever is received pub unsafe fn receive_unchecked<'a>( &self, timeout: SystemTime, buffer: &'a mut [ApexByte], ) -> Result<&'a [ApexByte], Error> { let len = B::receive_buffer::<Key>(self.id, timeout.into(), buffer)? as usize; Ok(&buffer[..len]) } /// # Panics /// if this buffer does not exist anymore pub fn status(&self) -> BufferStatus { // According to ARINC653P1-5 3.7.2.1.5 this can only fail if the buffer_id // does not exist in the current partition. // But since we retrieve the buffer_id directly from the hypervisor // there is no possible way for it not existing B::get_buffer_status::<Key>(self.id).unwrap() } } impl<B: ApexBufferP1> StartContext<B> { /// # Errors /// - [Error::InvalidConfig]: not enough memory is available /// - [Error::InvalidConfig]: [ApexLimits::SYSTEM_LIMIT_NUMBER_OF_BUFFERS](crate::bindings::ApexLimits::SYSTEM_LIMIT_NUMBER_OF_BUFFERS) was reached /// - [Error::NoAction]: a buffer with given `name` already exists /// - [Error::InvalidParam]: `size` is zero /// - [Error::InvalidParam]: `range` is too large pub fn create_buffer( &mut self, name: Name, size: MessageSize, range: MessageRange, qd: QueuingDiscipline, ) -> Result<Buffer<B>, Error> { let id = B::create_buffer::<Key>(name.into(), size, range, qd)?; Ok(Buffer { _b: Default::default(), id, max_size: size, max_number_msgs: range, }) } } }
#[doc = "Register `IMSCR` reader"] pub type R = crate::R<IMSCR_SPEC>; #[doc = "Register `IMSCR` writer"] pub type W = crate::W<IMSCR_SPEC>; #[doc = "Field `INIM` reader - Input FIFO service interrupt mask"] pub type INIM_R = crate::BitReader; #[doc = "Field `INIM` writer - Input FIFO service interrupt mask"] pub type INIM_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `OUTIM` reader - Output FIFO service interrupt mask"] pub type OUTIM_R = crate::BitReader; #[doc = "Field `OUTIM` writer - Output FIFO service interrupt mask"] pub type OUTIM_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; impl R { #[doc = "Bit 0 - Input FIFO service interrupt mask"] #[inline(always)] pub fn inim(&self) -> INIM_R { INIM_R::new((self.bits & 1) != 0) } #[doc = "Bit 1 - Output FIFO service interrupt mask"] #[inline(always)] pub fn outim(&self) -> OUTIM_R { OUTIM_R::new(((self.bits >> 1) & 1) != 0) } } impl W { #[doc = "Bit 0 - Input FIFO service interrupt mask"] #[inline(always)] #[must_use] pub fn inim(&mut self) -> INIM_W<IMSCR_SPEC, 0> { INIM_W::new(self) } #[doc = "Bit 1 - Output FIFO service interrupt mask"] #[inline(always)] #[must_use] pub fn outim(&mut self) -> OUTIM_W<IMSCR_SPEC, 1> { OUTIM_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "interrupt mask set/clear register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`imscr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`imscr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct IMSCR_SPEC; impl crate::RegisterSpec for IMSCR_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`imscr::R`](R) reader structure"] impl crate::Readable for IMSCR_SPEC {} #[doc = "`write(|w| ..)` method takes [`imscr::W`](W) writer structure"] impl crate::Writable for IMSCR_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets IMSCR to value 0"] impl crate::Resettable for IMSCR_SPEC { const RESET_VALUE: Self::Ux = 0; }
use *; use windows::*; const COLOR_SELECTION_WINDOW_HEIGHT: i32=508; const COLOR_SELECTION_WINDOW_WIDTH: i32=434; const COLOR_SELECTION_WINDOW_X: i32=(SCREEN_WIDTH/2) as i32-COLOR_SELECTION_WINDOW_WIDTH/2; const COLOR_SELECTION_WINDOW_Y: i32=(SCREEN_HEIGHT/2) as i32-COLOR_SELECTION_WINDOW_HEIGHT/2; const COLOR_SELECTION_WINDOW_BACKGROUND: u32=0; const COLOR_SELECTION_SLIDER: u32=1; const COLOR_SELECTOR: u32=2; const SLIDER_X: i32=20; const SLIDER_WIDTH: i32=14; const SLIDER_LENGTH: i32=224; fn get_angle(i: u8)->f64{ (i as f64)*2.0*PI/3.0-PI/2.0 } pub struct ColorSelectionWindow{ pub draw: bool, pub r: u8, pub g: u8, pub b: u8, } impl ColorSelectionWindow{ pub fn new()->Self{ ColorSelectionWindow{ draw: false, r: 0, g: 0, b: 0, } } } impl Window for ColorSelectionWindow{ fn handle_click<'a>( &mut self, x: i32, y: i32, args: &mut WindowArgs<'a>, )->bool{ if !self.draw{ return false; } let x=x+(SCREEN_WIDTH/2) as i32; let y=y+(SCREEN_HEIGHT/2) as i32; //if close to slider //set slider //else //absorb let mut send_command=false; if let Some(r)=get_slider( x, y, get_angle(0), ){ send_command=true; self.r=r; } else if let Some(g)=get_slider( x, y, get_angle(1), ){ send_command=true; self.g=g; } else if let Some(b)=get_slider( x, y, get_angle(2), ){ send_command=true; self.b=b; } if send_command{ args.command_stream.send(Command::SetColor(SetColorCommand{ r: self.r, g: self.g, b: self.b, })).unwrap(); } x>=COLOR_SELECTION_WINDOW_X && x<COLOR_SELECTION_WINDOW_X+COLOR_SELECTION_WINDOW_WIDTH && y>=COLOR_SELECTION_WINDOW_Y && y<COLOR_SELECTION_WINDOW_Y+COLOR_SELECTION_WINDOW_HEIGHT } fn draw( &self, canvas: &mut SCanvas, image_loader: &mut ImageLoader, ){ if !self.draw{ return; } let mid_x=COLOR_SELECTION_WINDOW_X+COLOR_SELECTION_WINDOW_WIDTH/2; let mid_y=COLOR_SELECTION_WINDOW_Y+COLOR_SELECTION_WINDOW_HEIGHT/2; draw_basic_entity_sprite( canvas, image_loader, &BasicEntitySprite{ x: mid_x as f64, y: mid_y as f64, direction: 0.0, sprite: COLOR_SELECTION_WINDOW_BACKGROUND, }, 0.0, 0.0, DrawEffects::client_side() ); for i in 0..3{ let dir=get_angle(i); let slider_mid_x=mid_x as f64+dir.cos()*(SLIDER_X+SLIDER_LENGTH/2) as f64; let slider_mid_y=mid_y as f64+dir.sin()*(SLIDER_X+SLIDER_LENGTH/2) as f64; draw_basic_entity_sprite( canvas, image_loader, &BasicEntitySprite{ x: slider_mid_x, y: slider_mid_y, direction: dir, sprite: COLOR_SELECTION_SLIDER, }, 0.0, 0.0, DrawEffects::client_side(), ); for j in 0..SLIDER_LENGTH{ let color=(j as f64/SLIDER_LENGTH as f64)*255.0; let color=color as u8; } let relevant=match i{ 0=>self.r, 1=>self.g, 2=>self.b, _=>unreachable!(), } as f64; let dist=SLIDER_X as f64+(SLIDER_LENGTH as f64)*(relevant/255.0); let selector_x=mid_x as f64+dir.cos()*dist; let selector_y=mid_y as f64+dir.sin()*dist; draw_basic_entity_sprite( canvas, image_loader, &BasicEntitySprite{ x: selector_x, y: selector_y, direction: dir, sprite: COLOR_SELECTOR, }, 0.0, 0.0, DrawEffects::client_side(), ); } } } pub fn get_slider(x: i32, y: i32, angle: f64)->Option<u8>{ let mid_x=COLOR_SELECTION_WINDOW_X+COLOR_SELECTION_WINDOW_WIDTH/2; let mid_y=COLOR_SELECTION_WINDOW_Y+COLOR_SELECTION_WINDOW_HEIGHT/2; let x=x-mid_x; let y=mid_y-y; let rx=(x as f64)*angle.cos()-(y as f64)*angle.sin(); let ry=(y as f64)*angle.cos()+(x as f64)*angle.sin(); if ry.abs()>(SLIDER_WIDTH as f64)/2.0{ return None } let rx=rx-(SLIDER_X as f64); if rx<0.0{ return None } if rx>(SLIDER_LENGTH as f64){ return None } Some((255.0*rx/(SLIDER_LENGTH as f64)) as u8) }
use inkwell as llvm; use crate::parser::expr::TypedExpr; use super::builder::Builder; use std::path::Path; pub struct Module<'ctx, 'a> { context: &'ctx llvm::context::Context, module: &'a llvm::module::Module<'ctx>, fpm: &'a llvm::passes::PassManager<llvm::values::FunctionValue<'ctx>> } impl<'ctx, 'a> Module<'ctx, 'a> { pub fn new(context: &'ctx llvm::context::Context, module: &'a llvm::module::Module<'ctx>, fpm: &'a llvm::passes::PassManager<llvm::values::FunctionValue<'ctx>>) -> Module<'ctx, 'a> { let fn_type = context.i64_type().fn_type(&[context.i64_type().into()], false); module.add_function("atoi", fn_type, None); module.add_function("puts", fn_type, None); module.add_function("putchar", fn_type, None); Module { context: context, module: module, fpm } } pub fn build_function<'e>(&self, name: &str, args: &Vec<String>, expr: &Box<TypedExpr<'e>>) { let arg_types = vec![self.context.i64_type().into(); args.len()]; let fn_type = self.context.i64_type().fn_type(arg_types.as_slice(), false); let func = self.module.add_function(name, fn_type, None); let builder = Builder::new(self.context, &self.module, args, func); builder.build_ret(expr); if func.verify(false) { self.fpm.run_on(&func); } else { panic!("Couldn't verify function"); } } pub fn dump(&self) { let result = self.module.print_to_string().to_string(); println!("{}", result); } pub fn write_to_file(&self, target: &inkwell::targets::TargetMachine, path: &Path) { target.write_to_file(&self.module, inkwell::targets::FileType::Object, path).expect("Failed to write object file"); } }
/// An enum to represent all characters in the VedicExtensions block. #[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)] pub enum VedicExtensions { /// \u{1cd0}: '᳐' VedicToneKarshana, /// \u{1cd1}: '᳑' VedicToneShara, /// \u{1cd2}: '᳒' VedicTonePrenkha, /// \u{1cd3}: '᳓' VedicSignNihshvasa, /// \u{1cd4}: '᳔' VedicSignYajurvedicMidlineSvarita, /// \u{1cd5}: '᳕' VedicToneYajurvedicAggravatedIndependentSvarita, /// \u{1cd6}: '᳖' VedicToneYajurvedicIndependentSvarita, /// \u{1cd7}: '᳗' VedicToneYajurvedicKathakaIndependentSvarita, /// \u{1cd8}: '᳘' VedicToneCandraBelow, /// \u{1cd9}: '᳙' VedicToneYajurvedicKathakaIndependentSvaritaSchroeder, /// \u{1cda}: '᳚' VedicToneDoubleSvarita, /// \u{1cdb}: '᳛' VedicToneTripleSvarita, /// \u{1cdc}: '᳜' VedicToneKathakaAnudatta, /// \u{1cdd}: '᳝' VedicToneDotBelow, /// \u{1cde}: '᳞' VedicToneTwoDotsBelow, /// \u{1cdf}: '᳟' VedicToneThreeDotsBelow, /// \u{1ce0}: '᳠' VedicToneRigvedicKashmiriIndependentSvarita, /// \u{1ce1}: '᳡' VedicToneAtharvavedicIndependentSvarita, /// \u{1ce2}: '᳢' VedicSignVisargaSvarita, /// \u{1ce3}: '᳣' VedicSignVisargaUdatta, /// \u{1ce4}: '᳤' VedicSignReversedVisargaUdatta, /// \u{1ce5}: '᳥' VedicSignVisargaAnudatta, /// \u{1ce6}: '᳦' VedicSignReversedVisargaAnudatta, /// \u{1ce7}: '᳧' VedicSignVisargaUdattaWithTail, /// \u{1ce8}: '᳨' VedicSignVisargaAnudattaWithTail, /// \u{1ce9}: 'ᳩ' VedicSignAnusvaraAntargomukha, /// \u{1cea}: 'ᳪ' VedicSignAnusvaraBahirgomukha, /// \u{1ceb}: 'ᳫ' VedicSignAnusvaraVamagomukha, /// \u{1cec}: 'ᳬ' VedicSignAnusvaraVamagomukhaWithTail, /// \u{1ced}: '᳭' VedicSignTiryak, /// \u{1cee}: 'ᳮ' VedicSignHexiformLongAnusvara, /// \u{1cef}: 'ᳯ' VedicSignLongAnusvara, /// \u{1cf0}: 'ᳰ' VedicSignRthangLongAnusvara, /// \u{1cf1}: 'ᳱ' VedicSignAnusvaraUbhayatoMukha, /// \u{1cf2}: 'ᳲ' VedicSignArdhavisarga, /// \u{1cf3}: 'ᳳ' VedicSignRotatedArdhavisarga, /// \u{1cf4}: '᳴' VedicToneCandraAbove, /// \u{1cf5}: 'ᳵ' VedicSignJihvamuliya, /// \u{1cf6}: 'ᳶ' VedicSignUpadhmaniya, /// \u{1cf7}: '᳷' VedicSignAtikrama, /// \u{1cf8}: '᳸' VedicToneRingAbove, /// \u{1cf9}: '᳹' VedicToneDoubleRingAbove, /// \u{1cfa}: 'ᳺ' VedicSignDoubleAnusvaraAntargomukha, } impl Into<char> for VedicExtensions { fn into(self) -> char { match self { VedicExtensions::VedicToneKarshana => '᳐', VedicExtensions::VedicToneShara => '᳑', VedicExtensions::VedicTonePrenkha => '᳒', VedicExtensions::VedicSignNihshvasa => '᳓', VedicExtensions::VedicSignYajurvedicMidlineSvarita => '᳔', VedicExtensions::VedicToneYajurvedicAggravatedIndependentSvarita => '᳕', VedicExtensions::VedicToneYajurvedicIndependentSvarita => '᳖', VedicExtensions::VedicToneYajurvedicKathakaIndependentSvarita => '᳗', VedicExtensions::VedicToneCandraBelow => '᳘', VedicExtensions::VedicToneYajurvedicKathakaIndependentSvaritaSchroeder => '᳙', VedicExtensions::VedicToneDoubleSvarita => '᳚', VedicExtensions::VedicToneTripleSvarita => '᳛', VedicExtensions::VedicToneKathakaAnudatta => '᳜', VedicExtensions::VedicToneDotBelow => '᳝', VedicExtensions::VedicToneTwoDotsBelow => '᳞', VedicExtensions::VedicToneThreeDotsBelow => '᳟', VedicExtensions::VedicToneRigvedicKashmiriIndependentSvarita => '᳠', VedicExtensions::VedicToneAtharvavedicIndependentSvarita => '᳡', VedicExtensions::VedicSignVisargaSvarita => '᳢', VedicExtensions::VedicSignVisargaUdatta => '᳣', VedicExtensions::VedicSignReversedVisargaUdatta => '᳤', VedicExtensions::VedicSignVisargaAnudatta => '᳥', VedicExtensions::VedicSignReversedVisargaAnudatta => '᳦', VedicExtensions::VedicSignVisargaUdattaWithTail => '᳧', VedicExtensions::VedicSignVisargaAnudattaWithTail => '᳨', VedicExtensions::VedicSignAnusvaraAntargomukha => 'ᳩ', VedicExtensions::VedicSignAnusvaraBahirgomukha => 'ᳪ', VedicExtensions::VedicSignAnusvaraVamagomukha => 'ᳫ', VedicExtensions::VedicSignAnusvaraVamagomukhaWithTail => 'ᳬ', VedicExtensions::VedicSignTiryak => '᳭', VedicExtensions::VedicSignHexiformLongAnusvara => 'ᳮ', VedicExtensions::VedicSignLongAnusvara => 'ᳯ', VedicExtensions::VedicSignRthangLongAnusvara => 'ᳰ', VedicExtensions::VedicSignAnusvaraUbhayatoMukha => 'ᳱ', VedicExtensions::VedicSignArdhavisarga => 'ᳲ', VedicExtensions::VedicSignRotatedArdhavisarga => 'ᳳ', VedicExtensions::VedicToneCandraAbove => '᳴', VedicExtensions::VedicSignJihvamuliya => 'ᳵ', VedicExtensions::VedicSignUpadhmaniya => 'ᳶ', VedicExtensions::VedicSignAtikrama => '᳷', VedicExtensions::VedicToneRingAbove => '᳸', VedicExtensions::VedicToneDoubleRingAbove => '᳹', VedicExtensions::VedicSignDoubleAnusvaraAntargomukha => 'ᳺ', } } } impl std::convert::TryFrom<char> for VedicExtensions { type Error = (); fn try_from(c: char) -> Result<Self, Self::Error> { match c { '᳐' => Ok(VedicExtensions::VedicToneKarshana), '᳑' => Ok(VedicExtensions::VedicToneShara), '᳒' => Ok(VedicExtensions::VedicTonePrenkha), '᳓' => Ok(VedicExtensions::VedicSignNihshvasa), '᳔' => Ok(VedicExtensions::VedicSignYajurvedicMidlineSvarita), '᳕' => Ok(VedicExtensions::VedicToneYajurvedicAggravatedIndependentSvarita), '᳖' => Ok(VedicExtensions::VedicToneYajurvedicIndependentSvarita), '᳗' => Ok(VedicExtensions::VedicToneYajurvedicKathakaIndependentSvarita), '᳘' => Ok(VedicExtensions::VedicToneCandraBelow), '᳙' => Ok(VedicExtensions::VedicToneYajurvedicKathakaIndependentSvaritaSchroeder), '᳚' => Ok(VedicExtensions::VedicToneDoubleSvarita), '᳛' => Ok(VedicExtensions::VedicToneTripleSvarita), '᳜' => Ok(VedicExtensions::VedicToneKathakaAnudatta), '᳝' => Ok(VedicExtensions::VedicToneDotBelow), '᳞' => Ok(VedicExtensions::VedicToneTwoDotsBelow), '᳟' => Ok(VedicExtensions::VedicToneThreeDotsBelow), '᳠' => Ok(VedicExtensions::VedicToneRigvedicKashmiriIndependentSvarita), '᳡' => Ok(VedicExtensions::VedicToneAtharvavedicIndependentSvarita), '᳢' => Ok(VedicExtensions::VedicSignVisargaSvarita), '᳣' => Ok(VedicExtensions::VedicSignVisargaUdatta), '᳤' => Ok(VedicExtensions::VedicSignReversedVisargaUdatta), '᳥' => Ok(VedicExtensions::VedicSignVisargaAnudatta), '᳦' => Ok(VedicExtensions::VedicSignReversedVisargaAnudatta), '᳧' => Ok(VedicExtensions::VedicSignVisargaUdattaWithTail), '᳨' => Ok(VedicExtensions::VedicSignVisargaAnudattaWithTail), 'ᳩ' => Ok(VedicExtensions::VedicSignAnusvaraAntargomukha), 'ᳪ' => Ok(VedicExtensions::VedicSignAnusvaraBahirgomukha), 'ᳫ' => Ok(VedicExtensions::VedicSignAnusvaraVamagomukha), 'ᳬ' => Ok(VedicExtensions::VedicSignAnusvaraVamagomukhaWithTail), '᳭' => Ok(VedicExtensions::VedicSignTiryak), 'ᳮ' => Ok(VedicExtensions::VedicSignHexiformLongAnusvara), 'ᳯ' => Ok(VedicExtensions::VedicSignLongAnusvara), 'ᳰ' => Ok(VedicExtensions::VedicSignRthangLongAnusvara), 'ᳱ' => Ok(VedicExtensions::VedicSignAnusvaraUbhayatoMukha), 'ᳲ' => Ok(VedicExtensions::VedicSignArdhavisarga), 'ᳳ' => Ok(VedicExtensions::VedicSignRotatedArdhavisarga), '᳴' => Ok(VedicExtensions::VedicToneCandraAbove), 'ᳵ' => Ok(VedicExtensions::VedicSignJihvamuliya), 'ᳶ' => Ok(VedicExtensions::VedicSignUpadhmaniya), '᳷' => Ok(VedicExtensions::VedicSignAtikrama), '᳸' => Ok(VedicExtensions::VedicToneRingAbove), '᳹' => Ok(VedicExtensions::VedicToneDoubleRingAbove), 'ᳺ' => Ok(VedicExtensions::VedicSignDoubleAnusvaraAntargomukha), _ => Err(()), } } } impl Into<u32> for VedicExtensions { fn into(self) -> u32 { let c: char = self.into(); let hex = c .escape_unicode() .to_string() .replace("\\u{", "") .replace("}", ""); u32::from_str_radix(&hex, 16).unwrap() } } impl std::convert::TryFrom<u32> for VedicExtensions { type Error = (); fn try_from(u: u32) -> Result<Self, Self::Error> { if let Ok(c) = char::try_from(u) { Self::try_from(c) } else { Err(()) } } } impl Iterator for VedicExtensions { type Item = Self; fn next(&mut self) -> Option<Self> { let index: u32 = (*self).into(); use std::convert::TryFrom; Self::try_from(index + 1).ok() } } impl VedicExtensions { /// The character with the lowest index in this unicode block pub fn new() -> Self { VedicExtensions::VedicToneKarshana } /// The character's name, in sentence case pub fn name(&self) -> String { let s = std::format!("VedicExtensions{:#?}", self); string_morph::to_sentence_case(&s) } }
// Long term, I would like to put a Lexical-Analyzer Generator in here. // // Nonetheless, I think we are happy with our hand-coded lexer.rs, and // I should strive to remain compatible with it, both for short-term // and perhaps also for long term. extern mod extra; #[allow(unused_imports)] // work around lint bug (warns about use extra::sort) mod grammar { use std::str; use std::cmp; use std::hashmap::HashMap; use extra::treemap::TreeMap; use std::to_bytes; use std::hashmap::HashSet; use std::vec; trait Terminal : IterBytes+Eq+Clone { } trait NonTerminal : IterBytes+Eq+Clone { } impl Terminal for &'static str { } impl NonTerminal for &'static str { } #[deriving(Clone,Eq,IterBytes)] pub enum ProductionSym<T,NT> { T(T), NT(NT) } impl<T,NT:Eq> ProductionSym<T,NT> { fn matches_nt(&self, other:&NT) -> bool { match self { &T(_) => false, &NT(ref nt) => nt == other } } } impl<T:Ord,NT:Ord> Ord for ProductionSym<T,NT> { fn lt(&self, other: &ProductionSym<T,NT>) -> bool { match (self, other) { ( &T(_), &NT(_)) => true, (&NT(_), &T(_)) => false, ( &T(ref t1), &T(ref t2)) => t1 <= t2, (&NT(ref n1), &NT(ref n2)) => n1 <= n2 } } } fn maximal_common_prefix<T:Eq+Clone>(v1: &[T], v2: &[T]) -> ~[T] { let mut accum = ~[]; for (a, b) in v1.iter().zip(v2.iter()) { if (a == b) { accum.push(a.clone()) } else { break; } } accum } fn factor_suffix<'r, T:Eq+Clone>(alpha: &[T], vec: &'r [T]) -> Option<&'r [T]> { if vec.len() < alpha.len() { return None; } let prefix = vec.slice_to(alpha.len()); let suffix = vec.slice_from(alpha.len()); if prefix == alpha { return Some(suffix); } else { return None; } } #[deriving(Clone,IterBytes,Eq)] struct PString<T, NT>(~[ProductionSym<T, NT>]); impl<T:Eq+Clone,NT:Eq+Clone> PString<T,NT> { fn maximal_common_prefix(&self, other: &PString<T,NT>) -> ~[ProductionSym<T,NT>] { maximal_common_prefix(**self, **other) } } #[deriving(Clone,IterBytes,Eq)] struct Prod<T, NT> { head: NT, body: PString<T, NT> } struct Grammar<T, NT> { start: NT, productions: ~[Prod<T, NT>], } struct ProductionIterator<'self,T,NT>(vec::VecIterator<'self,Prod<T,NT>>); struct TerminalSet<T>(HashSet<T>); struct NonterminalSet<NT>(HashSet<NT>); struct SymbolSet<T,NT>(HashSet<ProductionSym<T,NT>>); impl<T:Clone+IterBytes+Eq,NT:Clone+IterBytes+Eq> Grammar<T,NT> { fn terminals(&self) -> TerminalSet<T> { let mut accum = HashSet::new(); do self.for_each_sym |s| { match s { &T(ref t) => { accum.insert(t.clone()); }, &NT(_) => {} } } TerminalSet(accum) } fn non_terminals(&self) -> NonterminalSet<NT> { let mut accum = HashSet::new(); do self.for_each_sym |s| { match s { &T(_) => {} &NT(ref nt) => { accum.insert(nt.clone()); }, } } NonterminalSet(accum) } fn symbols(&self) -> SymbolSet<T,NT> { let mut accum = HashSet::new(); do self.for_each_sym |s| { accum.insert(s.clone()); } SymbolSet(accum) } fn for_each_sym(&self, f: &fn (&ProductionSym<T,NT>) -> ()) { f(&NT(self.start.clone())); for p in self.productions_iter() { f(&NT(p.head.clone())); for s in p.body.iter() { f(s); } } } fn productions_iter<'a>(&'a self) -> ProductionIterator<'a, T, NT> { ProductionIterator(self.productions.iter()) } } trait ToGrammar<T, NT> { fn to_grammar<'a>(&'a self) -> &'a Grammar<T, NT>; } trait OwnedGrammar<T, NT> { fn owned_grammar(~self) -> ~Grammar<T, NT>; } trait Primable { fn prime(&self) -> Self; } trait GenFresh<Registry> : Primable { fn gensym(&mut Registry) -> Self; } #[deriving(Eq,Clone,IterBytes,Ord,TotalOrd,TotalEq)] pub enum SymVariant<T> { core(T), gensym(T, uint) } struct SymbolRegistry(@mut uint); impl ToStr for SymbolRegistry { fn to_str(&self) -> ~str { ~"SymbolRegistry{ counter: "+(**self).to_str()+" }" } } impl Eq for SymbolRegistry { fn eq(&self, other: &SymbolRegistry) -> bool { use std::managed; managed::mut_ptr_eq(**self, **other) } } impl TotalEq for SymbolRegistry { fn equals(&self, other: &SymbolRegistry) -> bool { use std::managed; managed::mut_ptr_eq(**self, **other) } } impl Ord for SymbolRegistry { fn lt(&self, other: &SymbolRegistry) -> bool { use std::ptr; let s = ptr::to_unsafe_ptr(**self); let t = ptr::to_unsafe_ptr(**other); (s as uint) < (t as uint) } } impl TotalOrd for SymbolRegistry { fn cmp(&self, other: &SymbolRegistry) -> Ordering { use std::ptr; let s = ptr::to_unsafe_ptr(**self); let t = ptr::to_unsafe_ptr(**other); (&(s as uint)).cmp(&(t as uint)) } } pub fn new_symbol_registry() -> SymbolRegistry { SymbolRegistry(@mut 0) } impl SymbolRegistry { fn sym<T>(&self, v:T) -> Sym<T> { sym(*self, core(v)) } } #[deriving(Eq, Ord, TotalOrd, TotalEq)] struct Sym<T> { registry: SymbolRegistry, value: SymVariant<T> } impl<T:IterBytes+Eq+Clone> NonTerminal for Sym<T> { } impl<T:IterBytes> IterBytes for Sym<T> { // (don't include the registry in the hash) fn iter_bytes(&self, lsb0: bool, f: to_bytes::Cb) -> bool { self.value.iter_bytes(lsb0, f) } } pub fn sym<T>(registry: SymbolRegistry, value: SymVariant<T>) -> Sym<T> { Sym{ registry: registry, value: value } } impl<T:Clone> Clone for Sym<T> { fn clone(&self) -> Sym<T> { sym(self.registry, self.value.clone()) } } impl<T:Clone> Primable for Sym<T> { fn prime(&self) -> Sym<T> { let g = self.registry; let count : uint = **g; let count = count + 1; **self.registry = count; let t = match self.value { core(ref t) => t, gensym(ref t,_) => t }; sym(self.registry, gensym(t.clone(), count)) } } impl<T:ToStr> ToStr for SymVariant<T> { fn to_str(&self) -> ~str { match self { &core(ref t) => t.to_str(), &gensym(ref t, count) => t.to_str() + "_" + count.to_str() } } } impl<T:ToStr> ToStr for Sym<T> { fn to_str(&self) -> ~str { self.value.to_str() } } impl<T:ToStr,NT:ToStr> ToStr for ProductionSym<T,NT> { fn to_str(&self) -> ~str { match *self { T(ref t) => ~"\"" + t.to_str() + "\"", NT(ref nt) => ~"<" + nt.to_str() + ">", } } } fn fill_left(s:~str, c:char, width:uint) -> ~str { if s.char_len() < width { str::from_char(c).repeat(width - s.char_len()) + s } else { s } } impl<T:ToStr,NT:ToStr> ToStr for Prod<T,NT> { fn to_str(&self) -> ~str { self.to_str_head_aligned(0) } } impl<T:ToStr,NT:ToStr> Prod<T,NT> { fn to_str_head_aligned(&self, width:uint) -> ~str { let head = self.head.to_str(); let head = fill_left("<"+head+">", ' ', width+2); head + " ::= " + self.body.map(|x|x.to_str()).connect(" ") } } impl<T:ToStr,NT:ToStr> ToStr for Grammar<T,NT> { fn to_str(&self) -> ~str { let w = self.productions.iter().map(|x|x.head.to_str().char_len()).fold(0u, cmp::max); "Grammar ⟨\n "+self.productions.map(|x|x.to_str_head_aligned(w)).connect("\n ") + "\n⟩" } } pub fn production<T,NT>(h:NT, b: ~[ProductionSym<T,NT>]) -> Prod<T,NT> { Prod { head:h, body:PString(b) } } // The $G argument is the SymbolRegistry. (Originally I used a // tricky of overloading the N to act as a tag and also a binding // for the registry, but was not sufficient since the head of the // production needs the registry (even when the right hand side // has no non-terminals occurrences). macro_rules! production ( ($G:ident $H:ident -> $($T:ident : $S:expr)*) => ( production(sym($G, core(stringify!($H))), ~[$(symbolify!($G $T $S)),*]) ) ) macro_rules! symbolify ( ( $G:ident N $NT:expr) => ( NT(sym($G, core(stringify!($NT)))) ); ( $ignored:ident T $T:expr) => ( T($T) ); ) trait GrammarLike<T,NT> { fn start(&self) -> NT; fn productions<'a>(&'a self) -> &'a [Prod<T, NT>]; fn owned_productions(~self) -> ~[Prod<T, NT>]; } pub type StaticGrammar = (SymbolRegistry, Grammar<&'static str, Sym<&'static str>>); type StaticSym = ProductionSym<&'static str, Sym<&'static str>>; type StaticStr = PString<&'static str, Sym<&'static str>>; impl ToGrammar<&'static str, Sym<&'static str>> for StaticGrammar { fn to_grammar<'a>(&'a self) -> &'a Grammar<&'static str, Sym<&'static str>> { let &(_, ref g) = self; g } } impl OwnedGrammar<&'static str, Sym<&'static str>> for StaticGrammar { fn owned_grammar(~self) -> ~Grammar<&'static str, Sym<&'static str>> { let ~(_, g) = self; ~g } } impl GrammarLike<&'static str, Sym<&'static str>> for StaticGrammar { fn start(&self) -> Sym<&'static str> { self.to_grammar().start } fn productions<'a>(&'a self) -> &'a [Prod<&'static str, Sym<&'static str>>] { self.to_grammar().productions.as_slice() } fn owned_productions(~self) -> ~[Prod<&'static str, Sym<&'static str>>] { let ~(_, g) = self; g.productions } } fn example_4_5() -> ~StaticGrammar { let G = new_symbol_registry(); ~(G,Grammar { start: sym(G, core("expression")), productions: ~[ production!( G expression -> N:expression T:"+" N:term ), production!( G expression -> N:expression T:"-" N:term ), production!( G expression -> N:term ), production!( G term -> N:term T:"*" N:factor ), production!( G term -> N:term T:"/" N:factor ), production!( G term -> N:factor ), production!( G factor -> T:"(" N:expression T:")" ), production!( G factor -> T:"id" ), ]})} fn example_4_6() -> ~StaticGrammar { let G = new_symbol_registry(); ~(G,Grammar { start: sym(G, core("E")), productions: ~[ production!( G E -> N:E T:"+" N:T ), // E -> E + T | E - T | T production!( G E -> N:E T:"-" N:T ), production!( G E -> N:T ), production!( G T -> N:T T:"*" N:F ), // T -> T * F | T / F | F production!( G T -> N:T T:"/" N:F ), production!( G F -> T:"(" N:E T:")" ), // F -> ( E ) | id production!( G F -> T:"id" ), ]})} fn example_4_7() -> ~StaticGrammar { let G = new_symbol_registry(); ~(G,Grammar { start: sym(G, core("E")), productions: ~[ production!( G E -> N:E T:"+" N:E ), // E -> E + E | E * E | - E | ( E ) | id production!( G E -> N:E T:"*" N:E ), production!( G E -> T:"-" N:E ), production!( G E -> T:"(" N:E T:")" ), production!( G E -> T:"id" ), ]})} fn example_4_13() -> ~StaticGrammar { let G = new_symbol_registry(); ~(G,Grammar { start: sym(G, core("S")), productions: ~[ production!( G S -> T:"(" N:S T:")" ), // S -> ( S ) S | \epsilon production!( G S -> ), ]})} fn exercise_4_2_1() -> ~StaticGrammar { let _ = "aa+a*"; let G = new_symbol_registry(); ~(G,Grammar { start: sym(G, core("S")), productions: ~[ production!( G S -> N:S N:S T:"+" ), // S -> S S + | S S * | a production!( G S -> N:S N:S T:"*" ), production!( G S -> T:"a" ), ]})} fn exercise_4_2_2_help(input: &'static str, n: SymbolRegistry, prods: ~[Prod<&'static str, Sym<&'static str>>]) -> ~StaticGrammar { let _ = input; ~(n,Grammar{ start: sym(n, core("S")), productions: prods }) } fn exercise_4_2_2_a() -> ~StaticGrammar { let G = new_symbol_registry(); exercise_4_2_2_help("000111", G, ~[ production!( G S -> T:"0" N:S T:"1" ), // S -> 0 S 1 | 0 1 production!( G S -> T:"0" T:"1" ), ])} fn exercise_4_2_2_b() -> ~StaticGrammar { let G = new_symbol_registry(); exercise_4_2_2_help("+*aaa", G, ~[ production!( G S -> T:"+" N:S N:S ), // S -> + S S | * S S | a production!( G S -> T:"*" N:S N:S ), production!( G S -> T:"a" ), ])} fn exercise_4_2_2_c() -> ~StaticGrammar { let G = new_symbol_registry(); exercise_4_2_2_help("(()())", G, ~[ production!( G S -> N:S T:"(" N:S T:")" N:S ), // S -> S ( S ) S | \epsilon production!( G S -> ), ])} fn ex_elim_amb_1() -> ~StaticGrammar { let G = new_symbol_registry(); ~(G,Grammar { start: sym(G, core("stmt")), productions: ~[ production!( G stmt -> T:"if" N:expr T:"then" N:stmt ), production!( G stmt -> T:"if" N:expr T:"then" N:stmt T:"else" N:stmt ), production!( G stmt -> T:"other" ), ]})} fn ex_elim_amb_2() -> ~StaticGrammar { let G = new_symbol_registry(); ~(G,Grammar { start: sym(G, core("stmt")), productions: ~[ production!( G stmt -> N:matched_stmt ), production!( G stmt -> N:unmatched_stmt ), production!( G matched_stmt -> T:"if" N:expr T:"then" N:matched_stmt T:"else" N:matched_stmt ), production!( G matched_stmt -> T:"other" ), production!( G unmatched_stmt -> T:"if" N:expr T:"then" N:stmt ), production!( G unmatched_stmt -> T:"if" N:expr T:"then" N:matched_stmt T:"else" N:unmatched_stmt ), ]})} fn ex_left_recur_1() -> ~StaticGrammar { let G = new_symbol_registry(); ~(G,Grammar { start: sym(G, core("E")), productions: ~[ production!( G E -> N:E T:"T" N:T ), production!( G E -> N:T ), production!( G T -> N:T T:"*" N:F ), production!( G T -> N:F ), production!( G F -> T:"(" N:E T:")" ), production!( G F -> T:"id" ), ]})} fn ex_left_recur_2() -> ~StaticGrammar { let G = new_symbol_registry(); ~(G,Grammar { start: sym(G, core("E")), productions: ~[ production!( G E -> N:T N:E2 ), production!( G E2 -> T:"+" N:T N:E2 ), production!( G T -> N:F N:T2 ), production!( G T2 -> T:"*" N:F N:T2 ), production!( G T2 -> ), production!( G F -> T:"(" N:E T:")" ), production!( G F -> T:"id" ), ]})} fn ex_left_factor_1() -> ~StaticGrammar { let G = new_symbol_registry(); ~(G,Grammar { start: sym(G, core("stmt")), productions: ~[ production!( G stmt -> T:"if" N:expr T:"then" N:stmt T:"else" N:stmt ), production!( G stmt -> T:"if" N:expr T:"then" N:stmt ), ]})} fn ex_left_factor_2() -> ~StaticGrammar { let G = new_symbol_registry(); ~(G,Grammar { start: sym(G, core("S")), productions: ~[ production!( G S -> T:"i" N:E T:"t" N:S ), production!( G S -> T:"i" N:E T:"t" N:S T:"e" N:S ), production!( G S -> T:"a" ), production!( G E -> T:"b" ), ]})} fn exercise_4_3_1_input() -> ~StaticGrammar { let G = new_symbol_registry(); ~(G,Grammar { start: sym(G, core("rexpr")), productions: ~[ production!( G rexpr -> N:rexpr T:"+" N:rterm ), production!( G rexpr -> N:rterm ), production!( G rterm -> N:rterm N:rfactor ), production!( G rterm -> N:rfactor ), production!( G rfactor -> N:rfactor T:"*" ), production!( G rfactor -> N:rprimary ), production!( G rprimary -> T:"a" ), production!( G rprimary -> T:"b" ), ]}) } mod dragon86 { use super::*; pub fn example_4_17() -> ~StaticGrammar { let G = new_symbol_registry(); ~(G,Grammar { start: sym(G, core("E")), productions: ~[ production!( G E -> N:T N:E_ ), // E -> T E' production!( G E_ -> T:"+" N:T N:E_ ), // E' -> + T E' | \epsilon production!( G E_ -> ), production!( G T -> N:F N:T_ ), // T -> F T' production!( G T_ -> T:"*" N:F N:T_ ), // T' -> * F T' | \epsilon production!( G T_ -> ), production!( G F -> T:"(" N:E T:")" ), // F -> ( E ) | id production!( G F -> T:"id" ), ]}) } } fn eliminate_immediate_left_recursion<T:Clone,NT:Eq+Clone+Primable>(prods:&[Prod<T, NT>]) -> ~[Prod<T,NT>] { type P = Prod<T,NT>; type PS = ProductionSym<T,NT>; // Eliminating immediate left recursion for A is the transformation of // // A -> A \alpha_1 | A \alpha_2 | ... | A \alpha_m // | \beta_1 | \beta_2 | ... | \beta_n // // where no \beta_i begins with an A (and no \alpha_i is \epsilon), // replacing the A-productions by: // // A -> \beta_1 A2 | \beta_2 A2 | ... | \beta_n A2 // A2 -> \alpha_1 A2 | \alpha_2 A2 | ... \alpha_m A2 | \epsilon // // where A2 is fresh. let mut accum : ~[P] = ~[]; if prods.len() > 0 { let a = &prods[0].head; let bodies : ~[~[PS]] = prods.map(|p| { assert!(p.head == *a); (*p.body).clone() }); let (alphas, betas) : (~[~[PS]], ~[~[PS]]) = bodies.partitioned(|b| b.len() > 0 && b[0].matches_nt(a)); let alphas = alphas.map(|b| { assert!(b.len() > 1); b.slice_from(1).to_owned() }); let a2 = a.prime(); let new_beta_bodies : ~[~[PS]] = betas.map(|b| { let mut b = b.clone(); b.push(NT(a2.clone())); b }); let mut new_alpha_bodies : ~[~[PS]] = alphas.map(|b| { let mut b = b.clone(); b.push(NT(a2.clone())); b }); new_alpha_bodies.push(~[]); for b_body in new_beta_bodies.iter() { accum.push(production(a.clone(), b_body.clone())); } for a_body in new_alpha_bodies.iter() { accum.push(production(a2.clone(), a_body.clone())) } } accum } impl<T,NT:Eq> Prod<T,NT> { fn is_empty(&self) -> bool { self.body.len() == 0 } fn is_left_recursive(&self) -> bool { self.body.len() >= 1 && self.body[0].matches_nt(&self.head) } fn is_trivial_cycle(&self) -> bool { self.body.len() == 1 && self.is_left_recursive() } } fn has_empty<T,NT:Eq>(prods:&[Prod<T,NT>]) -> bool { prods.iter().any(|p| p.is_empty() ) } fn has_left_recursion<T,NT:Eq>(prods:&[Prod<T, NT>]) -> bool { prods.iter().any(|p| p.is_left_recursive() ) } struct LeftFactoring<T,NT> { orig: ~[Prod<T,NT>], fresh: ~[Prod<T,NT>], } fn left_factor_onestep<T:Eq+Ord+Clone, NT:Eq+Ord+Clone+Primable>(rules: &[Prod<T,NT>]) -> Option<LeftFactoring<T,NT>> { use extra::sort; use std::vec; let rules = do sort::merge_sort(rules) |r1, r2| { *r1.body <= *r2.body }; let i0 = rules.iter(); let mut i1 = rules.iter(); i1.next(); let (alt0, alt1) = match do i0.zip(i1).max_by() |&(alt0, alt1)| { alt0.body.maximal_common_prefix(&alt1.body).len() } { Some(e) => e, None => return None, // handle case where rules has only one element. }; let a = alt0.head.clone(); let alpha = alt0.body.maximal_common_prefix(&alt1.body); if alpha.len() == 0 { return None; } let a_prime = a.prime(); let mut new_a_rules = ~[]; let mut new_a2_rules = ~[]; let new_a_body = vec::append_one(alpha.clone(), NT(a_prime.clone())); new_a_rules.push(production(a, new_a_body)); for r in rules.iter() { match factor_suffix(alpha, *r.body) { None => new_a_rules.push(r.clone()), Some(suffix) => new_a2_rules.push(production(a_prime.clone(), suffix.to_owned())) } } Some(LeftFactoring{ orig: new_a_rules, fresh: new_a2_rules }) } impl<T:Clone+TotalOrd,NT:Eq+Hash+Clone+TotalOrd+Primable> Grammar<T,NT> { fn to_nt_map(&self) -> TreeMap<NT, ~[Prod<T,NT>]> { type Rules = ~[Prod<T,NT>]; type NTMap = TreeMap<NT, Rules>; let mut rules : NTMap = TreeMap::new(); for p in self.productions.iter() { if match rules.find_mut(&p.head) { Some(ref mut v) => { v.push(p.clone()); false }, None => true, } { rules.insert(p.head.clone(), ~[p.clone()]); } } rules } fn from_nt_map(start: &NT, rules: &TreeMap<NT, ~[Prod<T,NT>]>) -> Grammar<T,NT> { let mut new_prods : ~[Prod<T,NT>] = ~[]; for (_nt, prods) in rules.iter() { new_prods.push_all(*prods); } Grammar { start: start.clone(), productions: new_prods } } // Eliminating all (immediate and multi-step) left recursion from a // grammar, for grammars with no cycles or \epsilon-productions. // (Note that the resulting grammar may have \epsilon productions.) // // Let the nonterminals be A_1, A_2, ..., A_n // // for i := 1 to n do: // for j := 1 to i-1 do: // replace each production of the form A_i -> A_j \gamma // by the productions // A_i -> \delta_1 \gamma | \delta_2 \gamma | ... | \delta_k \gamma, // where A_j -> \delta_1 | \delta_2 | ... | \delta_k // are all the current A_j-productions; // end // eliminate the immediate left recursion among the A_i productions // end fn eliminate_left_recursion(&self) -> Grammar<T,NT> { use std::vec; type Rules = ~[Prod<T,NT>]; type Bodies = ~[PString<T,NT>]; type NTMap = TreeMap<NT, Rules>; let mut rules : NTMap = self.to_nt_map(); let keys : ~[NT] = rules.iter().map(|(k,_v)|k.clone()).collect(); for i in range(0, keys.len()) { let A_i = keys[i].clone(); for j in range(0, i) { let A_j = keys[j].clone(); // replace each production of the form A_i -> A_j \gamma // by the productions { A_i -> \delta_l \gamma | l in {1..k} } // where A_j -> \delta_1 | \delta_2 | ... | \delta_k // are all the current A_j-productions; let deltas : Bodies = rules.find(&A_j).unwrap().iter().map(|p|p.body.clone()).collect(); let mut new_rules : Rules = ~[]; for p in rules.find(&A_i).unwrap().iter() { let body = p.body.clone(); if body.len() > 0 && body[0].matches_nt(&A_j) { let gamma = body.slice_from(1); for delta in deltas.iter().map(|pb|(**pb).clone()) { new_rules.push(production(A_i.clone(), vec::append(delta, gamma))); } } else { new_rules.push(p.clone()); } } rules.insert(A_i.clone(), new_rules); } let new_rules = eliminate_immediate_left_recursion(*rules.find(&A_i).unwrap()); rules.insert(A_i, new_rules); } Grammar::from_nt_map(&self.start, &rules) } } impl<T:Clone+Eq+Ord+TotalOrd,NT:Clone+Eq+Ord+TotalOrd+Hash+Primable> Grammar<T,NT> { fn left_factor(&self) -> Grammar<T,NT> { use extra::sort; let mut rules = self.to_nt_map(); loop { let keys : ~[NT] = rules.iter().map(|(k,_v)|k.clone()).collect(); let mut changed = false; for k in keys.iter() { match left_factor_onestep(*rules.find(k).unwrap()) { None => {}, Some(LeftFactoring{ orig: orig, fresh: fresh }) => { rules.insert(orig[0].head.clone(), orig); rules.insert(fresh[0].head.clone(), fresh); changed = true; break; } } } if changed { loop; } else { break; } } Grammar::from_nt_map(&self.start, &rules) } } struct PredictiveParserGen<'self, T,NT> { grammar: &'self Grammar<T,NT>, precomputed_firsts: HashMap<NT, FirstSet<T>>, precomputed_follows: HashMap<NT, FollowSet<T>>, } type MidEntry<T,NT> = HashSet<Prod<T, NT>>; type EndEntry<T,NT> = HashSet<Prod<T, NT>>; struct MidTable<T,NT> { map: HashMap<(NT,T), MidEntry<T,NT>> } struct EndTable<T,NT> { map: HashMap<NT, EndEntry<T,NT>> } impl<T:Terminal,NT:NonTerminal> MidTable<T,NT> { fn new() -> MidTable<T,NT> { MidTable { map: HashMap::new() } } fn insert(&mut self, nt: NT, t: T, p: Prod<T,NT>) { fn new<T:Terminal, NT:NonTerminal>(_: &(NT,T), p: Prod<T,NT>) -> MidEntry<T,NT> { let mut s = HashSet::new(); s.insert(p.clone()); s } fn add<'a, T:Terminal,NT:NonTerminal>(_: &(NT,T), prior: &'a mut MidEntry<T,NT>, p: Prod<T,NT>) { prior.insert(p); } self.map.mangle((nt, t), p, new::<T,NT>, add::<T,NT>); } fn entries(&self, _nt: NT, _input: T, _visit: &fn (&Prod<T,NT>)) { fail!("MidTable entries unimplemented"); } } impl <T:Terminal,NT:NonTerminal> EndTable<T,NT> { fn new() -> EndTable<T,NT> { EndTable { map: HashMap::new() } } fn insert(&mut self, nt: NT, prod: Prod<T,NT>) { fn new<T:Terminal, NT:NonTerminal>(_: &NT, p: &Prod<T,NT>) -> EndEntry<T,NT> { let mut s = HashSet::new(); s.insert(p.clone()); s } fn add<T:Terminal, NT:NonTerminal>(_: &NT, prior: &mut EndEntry<T,NT>, p: &Prod<T,NT>) { prior.insert(p.clone()); } self.map.mangle(nt, &prod, new::<T,NT>, add::<T,NT>); } fn entries(&self, _nt: NT, _visit: &fn (&Prod<T,NT>)) { fail!("EndTable entries unimplemented"); } } struct PredictiveParsingTable<T,NT> { terms: HashSet<T>, nonterms: HashSet<NT>, mid: MidTable<T,NT>, end: EndTable<T,NT>, } enum Cell<'self, T,NT> { MidCell(&'self T, &'self NT, &'self Prod<T,NT>), EndCell(&'self NT, &'self Prod<T,NT>), } impl<T:Terminal,NT:NonTerminal> PredictiveParsingTable<T,NT> { fn each_term(&self, visit: &fn(&T)) { for t in self.terms.iter() { visit(t); } } fn each_nonterm(&self, visit: &fn(&NT)) { for nt in self.nonterms.iter() { visit(nt); } } fn each_prod(&self, key: &(NT, T), visit: &fn(&Prod<T,NT>)) { match self.mid.map.find(key) { None => {}, Some(s) => { for p in s.iter() { visit(p); } } } } fn each_end_prod(&self, key: &NT, visit: &fn(&Prod<T,NT>)) { match self.end.map.find(key) { None => {}, Some(s) => { for p in s.iter() { visit(p); } } } } fn each_cell(&self, visit: &fn(&Cell<T,NT>)) { do self.each_term |t| { do self.each_nonterm |nt| { let key = (nt.clone(), t.clone()); do self.each_prod(&key) |p| { let c = MidCell(t, nt, p); visit(&c); } } } do self.each_nonterm |nt| { do self.each_end_prod(nt) |p| { let c = EndCell(nt, p); visit(&c); } } } } // A parsing table has its rows labelled by non-terminals and its // columns labelled by terminals. impl<T:Terminal+ToStr,NT:NonTerminal+ToStr> ToStr for PredictiveParsingTable<T,NT> { fn to_str(&self) -> ~str { use extra::sort::quick_sort; // For proper formatting, we need to know the maximum // width of all productions in a column (as well as the // length of the string for the token itself labelling the // column). let mut width_map : ~[int] = ~[]; // Copy terms and nonterms into vectors to keep stable order fn iter_clone_to_vec<T:Clone,I:Iterator<T>>(i: I) -> ~[T] { i.map(|x|x.clone()).collect() } // why doesn't this work same as below? // let terms : ~[T] = iter_clone_to_vec(self.terms.iter()); let mut terms : ~[T] = self.terms.iter().map(|x|x.clone()).collect(); let mut nonterms : ~[NT] = self.nonterms.iter().map(|x|x.clone()).collect(); do quick_sort(terms) |a,b| { a.to_str() <= b.to_str() } do quick_sort(nonterms) |a,b| { a.to_str() <= b.to_str() } let terms = terms; let nonterms = nonterms; println!("terms: {:?}\n", terms); println!("nonterms: {:?}\n", nonterms); for t in terms.iter() { let mut max_width = t.to_str().len() as int; for nt in nonterms.iter() { let key = (nt.clone(), t.clone()); do self.each_prod(&key) |p| { let l = p.to_str().len() as int; if l > max_width { max_width = l } } } width_map.push(max_width); } // ... and the end productions as well { let mut max_width = 1; for nt in nonterms.iter() { do self.each_end_prod(nt) |p| { let l = p.to_str().len() as int; if l > max_width { max_width = l } } } width_map.push(max_width); } let mut row_header_width = 0; for nt in nonterms.iter() { let l = nt.to_str().len(); if l > row_header_width { row_header_width = l; } } let row_header_width = row_header_width; let mut s = " ".repeat(row_header_width) + " |"; for i in range(0, terms.len()) { let ref t = terms[i]; let len = width_map[i]; let t = t.to_str(); let remainder = len - (t.len() as int); let left = remainder / 2; let right = remainder - left; if remainder < 0 || left < 0 || right < 0 { fail!("negative pad value(s)."); } let left = " ".repeat(left as uint); let right = " ".repeat(right as uint); s = s + "| " + left + "`" + t + "`" + right + " "; } { let i = terms.len(); let len = width_map[i]; let t = "$"; let remainder = len - 1; let left = remainder / 2; let right = remainder - left; if remainder < 0 || left < 0 || right < 0 { fail!("negative pad value(s)."); } let left = " ".repeat(left as uint); let right = " ".repeat(right as uint); s = s + "|| " + left + " " + t + " " + right + " "; } let _row_width = s.len(); s = s + "\n"; let row_div = |line:&str, brk1: &str, brk2: &str| -> ~str { let mut s = ~""; s = s + line.repeat(row_header_width) + line + brk1; for w in range(0, width_map.len()) { s = s + if w+1 < width_map.len() { brk1 } else { brk2 }; s = s + line.repeat(width_map[w] as uint + 4); } s = s + "\n"; s }; s = s + row_div("=", "|", "||"); for nt in nonterms.iter() { let mut entries : ~[~[Prod<T,NT>]] = ~[]; let mut max_len = 1; // always print a row, even if no entries. for t in terms.iter() { let mut prods : ~[Prod<T,NT>] = ~[]; let key = (nt.clone(), t.clone()); do self.each_prod(&key) |p| { prods.push(p.clone()); } if prods.len() > max_len { max_len = prods.len(); } entries.push(prods); } { let mut prods : ~[Prod<T,NT>] = ~[]; do self.each_end_prod(nt) |p| { prods.push(p.clone()); } if prods.len() > max_len { max_len = prods.len(); } entries.push(prods); } let max_len = max_len; for i in range(0, max_len) { if i == 0 { let nt = nt.to_str(); assert!(nt.len() <= row_header_width); let remainder = row_header_width - nt.len(); let left = " ".repeat(remainder); s = s + left + nt + " |"; } else { s = s + " ".repeat(row_header_width) + " |"; } for j in range(0, terms.len()) { let len = width_map[j]; if i < entries[j].len() { let p = entries[j][i].to_str(); let remainder = len - (p.len() as int); let left = remainder / 2; let right = remainder - left; if left < 0 || right < 0 { println!("width_map: {:?}", width_map); println!("prod: {}", p); println!("remainder: {} left: {} right: {}", remainder, left, right); } if remainder < 0 || left < 0 || right < 0 { fail!("negative pad value(s)."); } assert!(left >= 0); assert!(right >= 0); let remainder = " ".repeat(remainder as uint); let _left = " ".repeat(left as uint); let _right = " ".repeat(right as uint); s = s + "| " + p + " " + remainder + " "; } else { assert!(len >= 0); let fill = " ".repeat(len as uint); s = s + "| " + fill + " "; } } { let j = terms.len(); let len = width_map[j]; if i < entries[j].len() { let p = entries[j][i].to_str(); let remainder = len - (p.len() as int); let left = remainder / 2; let right = remainder - left; if left < 0 || right < 0 { println!("width_map: {:?} j: {} len: {}", width_map, j, len); println!("prod: {}", p); println!("remainder: {} left: {} right: {}", remainder, left, right); } if remainder < 0 || left < 0 || right < 0 { fail!("negative pad value(s)."); } assert!(left >= 0); assert!(right >= 0); let remainder = " ".repeat(remainder as uint); let _left = " ".repeat(left as uint); let _right = " ".repeat(right as uint); s = s + "|| " + p + " " + remainder + " "; } else { assert!(len >= 0); let fill = " ".repeat(len as uint); s = s + "|| " + fill + " "; } } s = s + "\n"; } s = s + row_div("-", "+", "++"); } s } } enum FirstSet<T> { Empty, Term(T), Many{beginnings: HashSet<T>, has_epsilon: bool} } impl<T:ToStr+Eq+Hash> ToStr for FirstSet<T> { fn to_str(&self) -> ~str { match self { &Empty => ~"{}", &Term(ref t) => ~"{" + t.to_str() + "}", &Many{beginnings: ref b, has_epsilon: e} => { let mut seen = false; let ret = do b.iter().fold(~"{") |b, a| { let ret = if seen { b+", "+a.to_str() } else { b+a.to_str() }; seen = true; ret }; let ret = if e { let epsilon = "\u03B5"; (if seen { ret + ", " } else { ret }) + epsilon } else { ret }; ret + "}" } } } } impl<T:Eq+IterBytes+Clone> FirstSet<T> { fn contains_epsilon(&self) -> bool { match self { &Empty => false, &Term(*) => false, &Many{ beginnings: _, has_epsilon: b } => b, } } fn termless_many(has_epsilon: bool) -> ~FirstSet<T> { ~Many{ beginnings: HashSet::new(), has_epsilon: has_epsilon } } fn singleton_many(t: T, has_epsilon: bool) -> ~FirstSet<T> { let mut s = HashSet::new(); s.insert(t); ~Many{ beginnings: s, has_epsilon: has_epsilon } } fn add_epsilon(~self) -> ~FirstSet<T> { match self { ~Empty => FirstSet::termless_many(true), ~Term(t) => FirstSet::singleton_many(t, true), ~Many{ beginnings: b, has_epsilon: _ } => { ~Many{ beginnings: b, has_epsilon: true } } } } fn union(~self, other: &FirstSet<T>) -> ~FirstSet<T> { let mut recv = match self { ~Empty => FirstSet::termless_many(false), ~Term(t) => FirstSet::singleton_many(t, false), ~Many{ beginnings: _, has_epsilon: _ } => self }; match recv { ~Empty | ~Term(*) => fail!("cannot happen now"), ~Many{ beginnings: ref mut b, has_epsilon: ref mut e } => { match other { &Empty => {}, &Term(ref t) => { b.insert(t.clone()); }, &Many{ beginnings: ref c, has_epsilon: ref f } => { for s in c.iter() { b.insert(s.clone()); } *e |= *f; } } } } recv } } impl<T:IterBytes+Eq> FirstSet<T> { fn for_each_term(&self, f: &fn (&T) -> ()) { match self { &Empty => {}, &Term(ref t) => f(t), &Many{ beginnings: ref b, has_epsilon: _ } => { b.iter().all(|x| { f(x); true }); }, } } fn has_epsilon(&self) -> bool { match self { &Empty | &Term(*) => false, &Many{ beginnings: _, has_epsilon: he } => he, } } } struct FollowSet<T> { right_neighbors: HashSet<T>, can_terminate: bool, } impl<T:Hash+Eq+Clone> Clone for FollowSet<T> { fn clone(&self) -> FollowSet<T> { FollowSet{ right_neighbors: self.right_neighbors.clone(), can_terminate: self.can_terminate } } } impl<T:ToStr+IterBytes+Eq> ToStr for FollowSet<T> { fn to_str(&self) -> ~str { let mut seen = false; let ret = do self.right_neighbors.iter().fold(~"{") |b, a| { if seen { b+", "+a.to_str() } else { seen = true; b+a.to_str() } }; let ret = if self.can_terminate { if seen { ret + ", $" } else { ret + "$" } } else { ret }; ret + "}" } } impl<T:Clone+Eq+IterBytes> FollowSet<T> { fn just_end_marker() -> FollowSet<T> { FollowSet{ right_neighbors: HashSet::new(), can_terminate: true } } } fn opt_to_str<T:ToStr>(x: Option<&T>) -> ~str { match x { None => ~"None", Some(x) => format!("Some({:s})", x.to_str()), } } impl<'self, T:Terminal+ToStr, NT:NonTerminal+ToStr> PredictiveParserGen<'self, T,NT> { fn make_parsing_table(&self) -> PredictiveParsingTable<T,NT> { type Rule = Prod<T,NT>; let mut mid_table = MidTable::new(); let mut end_table = EndTable::new(); let mut terms = HashSet::new(); let mut nonterms = HashSet::new(); for p in self.grammar.productions_iter() { println!("prod: {:s}", p.to_str()); let A : NT = p.head.clone(); let newA = || A.clone(); nonterms.insert(newA()); let ref alpha = *p.body; let first = self.first(*alpha); println!("FIRST({:s}): {:s}", A.to_str(), first.to_str()); do first.for_each_term |a| { terms.insert(a.clone()); println!(" a in FIRST(A) where a = `{:s}`, A = {:s} => M[{1:s},{0:s}] gets {:s}", a.to_str(), A.to_str(), p.to_str()); mid_table.insert(newA(), a.clone(), p.clone()); } if first.has_epsilon() { let follow = self.follow(&A); println!("FOLLOW({:s}): {:s}", A.to_str(), follow.to_str()); for b in follow.right_neighbors.iter() { terms.insert(b.clone()); mid_table.insert(newA(), b.clone(), p.clone()); } if follow.can_terminate { end_table.insert(newA(), p.clone()); } } } PredictiveParsingTable { terms: terms, nonterms: nonterms, mid: mid_table, end: end_table, } } fn make<'a>(grammar: &'a Grammar<T,NT>) -> PredictiveParserGen<'a, T,NT> { let mut first : HashMap<NT, FirstSet<T>> = HashMap::new(); for p in grammar.productions_iter() { if p.body.len() == 0 { first.insert(p.head.clone(), Many{ beginnings: HashSet::new(), has_epsilon: true }); } } loop { let mut any_changed = false; for p in grammar.productions_iter() { let mut to_add : HashSet<T> = HashSet::new(); let mut all_had_epsilon = true; let update = |first_set:&FirstSet<T>| -> bool { do first_set.for_each_term |s| { to_add.insert(s.clone()); } if !first_set.has_epsilon() { all_had_epsilon = false; true } else { false } }; for s in p.body.iter() { match s { &T(ref t) => { let first_set = Term(t.clone()); if update(&first_set) { break; } }, &NT(ref nt) => { match first.find(nt) { None => { all_had_epsilon = false; break; }, // wait until entry is filled later. Some(first_set) => { if update(first_set) { break; } } } } }; } let fresh_entry = |_:&NT, to_add| { any_changed = true; Many{ beginnings: to_add, has_epsilon: all_had_epsilon, } }; let update_entry = |_:&NT, prior: &mut FirstSet<T>, to_add:HashSet<T>| { let action = match prior { &Empty => { Some(Many{ beginnings: to_add, has_epsilon: all_had_epsilon }) } &Term(ref t) => { let mut to_add = to_add.clone(); to_add.insert(t.clone()); Some(Many{ beginnings: to_add.clone(), has_epsilon: all_had_epsilon }) }, &Many{beginnings: ref mut begin_recv, has_epsilon: ref mut eps_recv} => { for t in to_add.iter() { if begin_recv.insert(t.clone()) { any_changed = true; } } if !*eps_recv && all_had_epsilon { *eps_recv = true; any_changed = true; } None } }; match action { Some(p) => *prior = p, None => {} } }; first.mangle(p.head.clone(), to_add, fresh_entry, update_entry); } if !any_changed { break; } } let prefollows = PredictiveParserGen { grammar: grammar, precomputed_firsts: first, precomputed_follows: HashMap::new(), }; let mut follows : HashMap<NT, FollowSet<T>> = HashMap::new(); follows.insert(grammar.start.clone(), FollowSet::just_end_marker()); let mut iter_count = 0u; loop { let mut any_change = false; iter_count = iter_count + 1; let copy_first = |_:&NT, first_beta:&FirstSet<T>| { any_change = true; let mut s = HashSet::new(); do first_beta.for_each_term |t| { s.insert(t.clone()); } FollowSet{ right_neighbors: s, can_terminate: false } }; let add_all_first = |_:&NT, prior: &mut FollowSet<T>, first_beta:&FirstSet<T>| { do first_beta.for_each_term |t| { if prior.right_neighbors.insert(t.clone()) { any_change = true; } } }; let copy_follow = |_:&NT, follow_A:&FollowSet<T>| { any_change = true; follow_A.clone() }; let add_all_follow = |_:&NT, prior: &mut FollowSet<T>, follow_A:&FollowSet<T>| { for r in follow_A.right_neighbors.iter() { if prior.right_neighbors.insert(r.clone()) { any_change = true; } } if !prior.can_terminate && follow_A.can_terminate { any_change = true; prior.can_terminate = true; } }; for p in grammar.productions_iter() { // Production A -> α B β // implies FOLLOW(B) := FOLLOW(B) U (FIRST(β) \ {ε}) // // Production A -> α B or A -> α B β where ε in FIRST(β) // imples FOLLOW(B) := FOLLOW(B) U FOLLOW(A) for i in range(0, p.body.len()) { match p.body[i] { T(*) => {}, NT(ref B) => { let beta = p.body.slice(i+1, p.body.len()); let first_beta = prefollows.first(beta); let act = if first_beta.contains_epsilon() { let ref A = p.head; match follows.find(A) { None => None, Some(f) => Some(f.clone()) } } else { None }; follows.mangle(B.clone(), &first_beta, |nt,fi| copy_first(nt,fi), |nt,p,fi| add_all_first(nt,p,fi)); match act { None => {}, Some(ref f) => { follows.mangle(B.clone(), f, |nt,fo| copy_follow(nt,fo), |nt,p,fo| add_all_follow(nt,p,fo)); } } } } } } if !any_change { break; } } PredictiveParserGen{ grammar: grammar, precomputed_firsts: prefollows.precomputed_firsts, precomputed_follows: follows, } } fn first_for_term(&self, t: T) -> FirstSet<T> { Term(t) } fn first_for_nonterm<'a>(&'a self, nt: &NT) -> &'a FirstSet<T> { self.precomputed_firsts.get(nt) } fn first(&self, alpha: &[ProductionSym<T,NT>]) -> FirstSet<T> { let mut accum = ~Empty; let mut all_contain_epsilon = true; for s in alpha.iter() { match s { &T(ref t) => { accum = accum.union(&self.first_for_term(t.clone())); all_contain_epsilon = false; break; }, &NT(ref nt) => { let f = self.first_for_nonterm(nt); accum = accum.union(f); if !f.contains_epsilon() { all_contain_epsilon = false; break; } } } } // N.B. If alpha is empty, we get right answer here (inherently). if all_contain_epsilon { accum = accum.add_epsilon(); } *accum } fn follow<'a>(&'a self, A: &NT) -> &'a FollowSet<T> { self.precomputed_follows.get(A) } } #[test] fn elim_immed_left_rec() { let g = eliminate_immediate_left_recursion(ex_elim_amb_1().owned_productions()); println(fmt!("%s\n", g.to_str())); } #[test] fn elim_left_rec() { let g = ex_left_recur_1().owned_grammar().eliminate_left_recursion(); println(fmt!("%s\n", g.to_str())); } #[test] fn left_factor() { println(fmt!("left_factor_1:\n%s\n", ex_left_factor_1().to_str())); println(fmt!("left_factor_1.left_factor():\n%s\n", ex_left_factor_1().owned_grammar().left_factor().to_str())); println(fmt!("left_factor_2:\n%s\n", ex_left_factor_2().to_str())); println(fmt!("left_factor_2.left_factor():\n%s\n", ex_left_factor_2().owned_grammar().left_factor().to_str())); } #[test] fn exercise_4_3_1() { let g = exercise_4_3_1_input(); println(fmt!("4_3_1:\n%s\n", g.to_str())); let h = g.to_grammar().left_factor(); println(fmt!("4_3_1 left factored:\n%s\n", h.to_str())); let i = h.eliminate_left_recursion(); println(fmt!("4_3_1 left factored, left rec elim:\n%s\n", i.to_str())); } #[test] fn whoa() { let ex4_5 = example_4_5(); println(fmt!("%s\n", ex4_5.to_str())); println(fmt!("%s\n", example_4_6().to_str())); println(fmt!("%s\n", example_4_7().to_str())); println(fmt!("%s\n", example_4_13().to_str())); println(fmt!("%s\n", ex_elim_amb_1().to_str())); println(fmt!("left_recur_1:\n%s\n", ex_left_recur_1().to_str())); println(fmt!("left_recur_2:\n%s\n", ex_left_recur_2().to_str())); } #[test] fn first() { fn go(name: ~str, maker: &fn() -> ~StaticGrammar, make_alpha: &fn(&SymbolRegistry) -> StaticStr) { let ~(syms, ref g) = maker(); let ppg = PredictiveParserGen::make(g); let alpha : StaticStr = make_alpha(&syms); println(fmt!("%s alpha: %s", name, alpha.to_str())); println(fmt!("FIRST(alpha): %s", ppg.first(*alpha).to_str())); } do go(~"eg 4.5", example_4_5) |syms| { PString(~[ NT(syms.sym("expression")), ]) } do go(~"eg 4.13", example_4_13) |syms| { PString(~[ NT(syms.sym("S")), NT(syms.sym("S")), T("h"), ]) } do go(~"ex 4.2.1", exercise_4_2_1) |syms| { PString(~[ NT(syms.sym("S")), NT(syms.sym("S")), ]) } do go(~"ex 4.2.1", ex_elim_amb_1) |syms| { PString(~[ NT(syms.sym("stmt")), NT(syms.sym("stmt")), ]) } do go(~"ex elim amb 2", ex_elim_amb_2) |syms| { PString(~[ NT(syms.sym("stmt")), NT(syms.sym("unmatched_stmt")), ]) } do go(~"ex elim amb 2", ex_elim_amb_2) |syms| { PString(~[ NT(syms.sym("unmatched_stmt")), NT(syms.sym("stmt")), ]) } do go(~"extra (empty) case 4.5", example_4_5) |_syms| { PString(~[ ]) } } #[test] fn follow() { let ~(syms, ref g) = example_4_5(); let ppg = PredictiveParserGen::make(g); let t = syms.sym("expression"); // notably, "id" is not in FOLLOW(<expression>) println(fmt!("ex 4.5 T: %s FOLLOW(T): %s", t.to_str(), ppg.follow(&t).to_str())); let ~(syms, ref g) = ex_elim_amb_2(); let ppg = PredictiveParserGen::make(g); let t = syms.sym("expr"); // notably, "id" is not in FOLLOW(<expression>) println(fmt!("ex elim amb 2 T: %s FOLLOW(T): %s", t.to_str(), ppg.follow(&t).to_str())); let t = syms.sym("matched_stmt"); println(fmt!("ex elim amb 2 T: %s FOLLOW(T): %s", t.to_str(), ppg.follow(&t).to_str())); // FOLLOW(<unmatched_stmtm>) ?= { $ } ? let t = syms.sym("unmatched_stmt"); println(fmt!("ex elim amb 2 T: %s FOLLOW(T): %s", t.to_str(), ppg.follow(&t).to_str())); } #[test] fn test_make_table() { let process = |g| { let ppg = PredictiveParserGen::make(g); let table = ppg.make_parsing_table(); println(fmt!("grammar: %s, parsing_table: \n%s", g.to_str(), table.to_str())); }; let ~(_syms, ref g) = example_4_5(); process(g); let ~(_syms, ref g) = dragon86::example_4_17(); process(g); let ~(_syms, ref g) = ex_left_factor_2(); process(g); let ~(_syms, ref g) = ex_left_factor_2(); let ref g = g.left_factor(); process(g); } fn iter_to_vec<'a, X:Clone, I:Iterator<X>>(i:I) -> ~[X] { i.map(|x| x.clone()).collect() } fn set_to_vec<X:Eq+Hash+Clone>(s:&HashSet<X>) -> ~[X] { s.iter().map(|x| x.clone()).collect() } }