text
stringlengths
8
4.13M
//! This project is used for creating two different digital sinusoidal signals //! with certain frequencies. //! //! Runs entirely locally without hardware. Rounding might be different than on //! device. Except for when printing you must be vigilent to not become reliant //! on any std tools that can't otherwise port over no no_std without alloc. //! //! `cargo run --example 2_2` use core::f32::consts::{FRAC_PI_4, PI}; use textplots::{Chart, Plot, Shape}; use typenum::Unsigned; type N = heapless::consts::U512; fn main() { let w0 = (0..N::to_usize()) .map(|n| (PI * n as f32 / 128.0).sin()) .collect::<heapless::Vec<f32, N>>(); display::<N, _>("w0:", w0.iter().cloned()); let w1 = (0..N::to_usize()) .map(|n| (FRAC_PI_4 * n as f32).sin()) .collect::<heapless::Vec<f32, N>>(); display::<N, _>("w1:", w1.iter().cloned()); } // Points isn't a great representation as you can lose the line in the graph, // however while Lines occasionally looks good it also can be terrible. // Continuous requires to be in a fn pointer closure which cant capture any // external data so not useful without lots of code duplication. fn display<N, I>(name: &str, input: I) where N: Unsigned, I: Iterator<Item = f32> + core::clone::Clone + std::fmt::Debug, { println!("{:?}: ", name); let display = input .enumerate() .map(|(n, y)| (n as f32, y)) .collect::<Vec<(f32, f32)>>(); Chart::new(120, 60, 0.0, N::to_usize() as f32) .lineplot(Shape::Points(&display[..])) .display(); }
// Import hacspec and all needed definitions. use hacspec_lib::*; // WARNING: // This spec does not provide secret independence, and treats all keys as public. // Consequently, it should only be used as a FORMAL SPEC, NOT as a reference implementation. // Type definitions for use in poly1305. bytes!(PolyKey, 32); const BLOCKSIZE: usize = 16; // These are type aliases for convenience bytes!(PolyBlock, 16); // These are actual types; fixed-length arrays. bytes!(Poly1305Tag, 16); // A byte sequence of length <= BLOCKSIZE pub type SubBlock = ByteSeq; // A length <= BLOCKSIZE pub type BlockIndex = usize; // This defines the field for modulo 2^130-5. // In particular `FieldElement` and `FieldCanvas` are defined. // The `FieldCanvas` is an integer type with 131-bit (to hold 2*(2^130-5)). // The `FieldElement` is a natural integer modulo 2^130-5. public_nat_mod!( type_name: FieldElement, type_of_canvas: FieldCanvas, bit_size_of_field: 131, // This amounts to 17 bytes modulo_value: "03fffffffffffffffffffffffffffffffb" ); // Internal Poly1305 State pub type PolyState = (FieldElement, FieldElement, PolyKey); //(accumulator,r,key) pub fn poly1305_encode_r(b: &PolyBlock) -> FieldElement { let mut n = U128_from_le_bytes(U128Word::from_seq(b)); n = n & U128(0x0fff_fffc_0fff_fffc_0fff_fffc_0fff_ffffu128); FieldElement::from_secret_literal(n) } pub fn poly1305_encode_block(b: &PolyBlock) -> FieldElement { let n = U128_from_le_bytes(U128Word::from_seq(b)); let f = FieldElement::from_secret_literal(n); f + FieldElement::pow2(128) } // In Poly1305 as used in this spec, pad_len is always the length of b, i.e. there is no padding // In Chacha20Poly1305, pad_len is set to BLOCKSIZE pub fn poly1305_encode_last(pad_len: BlockIndex, b: &SubBlock) -> FieldElement { let n = U128_from_le_bytes(U128Word::from_slice(b, 0, b.len())); let f = FieldElement::from_secret_literal(n); f + FieldElement::pow2(8 * pad_len) } pub fn poly1305_init(k: PolyKey) -> PolyState { let r = poly1305_encode_r(&PolyBlock::from_slice(&k, 0, 16)); (FieldElement::ZERO(), r, k) } pub fn poly1305_update_block(b: &PolyBlock, st: PolyState) -> PolyState { let (acc, r, k) = st; ((poly1305_encode_block(b) + acc) * r, r, k) } pub fn poly1305_update_blocks(m: &ByteSeq, st: PolyState) -> PolyState { let mut st = st; let n_blocks = m.len() / BLOCKSIZE; for i in 0..n_blocks { let block = PolyBlock::from_seq(&m.get_exact_chunk(BLOCKSIZE, i)); st = poly1305_update_block(&block, st); } st } pub fn poly1305_update_last(pad_len: usize, b: &SubBlock, st: PolyState) -> PolyState { let mut st = st; if b.len() != 0 { let (acc, r, k) = st; st = ((poly1305_encode_last(pad_len, b) + acc) * r, r, k); } st } pub fn poly1305_update(m: &ByteSeq, st: PolyState) -> PolyState { let st = poly1305_update_blocks(m, st); let last = m.get_remainder_chunk(BLOCKSIZE); poly1305_update_last(last.len(), &last, st) } pub fn poly1305_finish(st: PolyState) -> Poly1305Tag { let (acc, _, k) = st; let n = U128_from_le_bytes(U128Word::from_slice(&k, 16, 16)); let aby = acc.to_byte_seq_le(); // We can't use from_seq here because the accumulator is larger than 16 bytes. let a = U128_from_le_bytes(U128Word::from_slice(&aby, 0, 16)); Poly1305Tag::from_seq(&U128_to_le_bytes(a + n)) } pub fn poly1305(m: &ByteSeq, key: PolyKey) -> Poly1305Tag { let mut st = poly1305_init(key); st = poly1305_update(m, st); poly1305_finish(st) }
use std::thread; use std::sync::{mpsc, Arc, Mutex}; struct Worker { thread: Option<thread::JoinHandle<()>>, id: usize, } impl Worker { fn new(id: usize, receiver: Arc<Mutex<mpsc::Receiver<Message>>>) -> Self { Self{ id, thread: Some(thread::spawn(move || loop { let message = receiver.lock().unwrap().recv().unwrap(); match message { Message::NewJob(job) => { println!("Worker {} got job!", id); job(); println!("Worker {} finished!", id); } Message::Terminate => { println!("Worker {} was told to terminate.", id); break; } } })) } } } type Job = Box<dyn FnOnce() + Send + 'static>; pub enum Message { NewJob(Job), Terminate, } pub struct ThreadPool { workers: Vec<Worker>, sender: mpsc::Sender<Message>, } impl ThreadPool { pub fn new(thread_count: usize) -> Self { assert!(thread_count > 0); let (sender, receiver) = mpsc::channel(); let receiver = Arc::new(Mutex::new(receiver)); let mut workers = Vec::with_capacity(thread_count); for id in 0..thread_count { println!("Created worker {}", id); workers.push(Worker::new(id, Arc::clone(&receiver))); } Self{workers, sender} } pub fn execute<F>(&self, function: F) -> Result<(), mpsc::SendError<Message>> where F: FnOnce() + Send + 'static { println!("Sending message to worker..."); self.sender.send(Message::NewJob(Box::new(function)))?; println!("Sent!"); Ok(()) } } impl Drop for ThreadPool { fn drop(&mut self) { println!("Terminating all workers."); for _ in &self.workers { self.sender.send(Message::Terminate).unwrap(); } for worker in &mut self.workers { println!("Waiting for worker {} to finish.", worker.id); if let Some(thread) = worker.thread.take() { thread.join().unwrap(); } } println!("All workers are now dead =)"); } }
use failure::Error; use client::Jenkins; use helpers::Class; use super::Job; use action::CommonAction; use build::ShortBuild; use property::CommonProperty; use queue::ShortQueueItem; use super::{BallColor, HealthReport, JobBuilder}; job_build_with_common_fields_and_impl!(/// A pipeline project #[derive(Deserialize, Debug)] #[serde(rename_all = "camelCase")] pub struct WorkflowJob { /// Description of the job pub description: String, /// Is concurrent build enabled for the job? pub concurrent_build: bool, }); register_class!("org.jenkinsci.plugins.workflow.job.WorkflowJob" => WorkflowJob); impl WorkflowJob { /// Build this job pub fn build(&self, jenkins_client: &Jenkins) -> Result<ShortQueueItem, Error> { self.builder(jenkins_client)?.send() } /// Create a `JobBuilder` to setup a build of a `Job` pub fn builder<'a, 'b, 'c, 'd>( &'a self, jenkins_client: &'b Jenkins, ) -> Result<JobBuilder<'a, 'b, 'c, 'd>, Error> { JobBuilder::new(self, jenkins_client) } }
// Import hacspec and all needed definitions. use hacspec_lib::*; const ROUNDS: usize = 24; pub const SHA3224_RATE: usize = 144; pub const SHA3256_RATE: usize = 136; pub const SHA3384_RATE: usize = 104; pub const SHA3512_RATE: usize = 72; pub const SHAKE128_RATE: usize = 168; pub const SHAKE256_RATE: usize = 136; array!(State, 25, U64); array!(Row, 5, U64); bytes!(Digest224, 28); bytes!(Digest256, 32); bytes!(Digest384, 48); bytes!(Digest512, 64); array!(RoundConstants, ROUNDS, u64); array!(RotationConstants, 25, usize); #[rustfmt::skip] const ROUNDCONSTANTS: RoundConstants = RoundConstants([ 0x0000_0000_0000_0001u64, 0x0000_0000_0000_8082u64, 0x8000_0000_0000_808au64, 0x8000_0000_8000_8000u64, 0x0000_0000_0000_808bu64, 0x0000_0000_8000_0001u64, 0x8000_0000_8000_8081u64, 0x8000_0000_0000_8009u64, 0x0000_0000_0000_008au64, 0x0000_0000_0000_0088u64, 0x0000_0000_8000_8009u64, 0x0000_0000_8000_000au64, 0x0000_0000_8000_808bu64, 0x8000_0000_0000_008bu64, 0x8000_0000_0000_8089u64, 0x8000_0000_0000_8003u64, 0x8000_0000_0000_8002u64, 0x8000_0000_0000_0080u64, 0x0000_0000_0000_800au64, 0x8000_0000_8000_000au64, 0x8000_0000_8000_8081u64, 0x8000_0000_0000_8080u64, 0x0000_0000_8000_0001u64, 0x8000_0000_8000_8008u64, ]); const ROTC: RotationConstants = RotationConstants([ 0, 1, 62, 28, 27, 36, 44, 6, 55, 20, 3, 10, 43, 25, 39, 41, 45, 15, 21, 8, 18, 2, 61, 56, 14, ]); const PI: RotationConstants = RotationConstants([ 0, 6, 12, 18, 24, 3, 9, 10, 16, 22, 1, 7, 13, 19, 20, 4, 5, 11, 17, 23, 2, 8, 14, 15, 21, ]); fn theta(mut s: State) -> State { let mut b = Row::new(); for i in 0..5 { b[i] = s[i] ^ s[i + 5] ^ s[i + 10] ^ s[i + 15] ^ s[i + 20]; } for i in 0..5 { let u: U64 = b[(i + 1) % 5]; let t = b[(i + 4) % 5] ^ u.rotate_left(1); for j in 0..5 { s[5 * j + i] = s[5 * j + i] ^ t; } } s } fn rho(mut s: State) -> State { for i in 0..25 { let u: U64 = s[i]; s[i] = u.rotate_left(ROTC[i]); } s } fn pi(s: State) -> State { let mut v = State::new(); for i in 0..25 { v[i] = s[PI[i]]; } v } fn chi(mut s: State) -> State { let mut b = Row::new(); for i in 0..5 { for j in 0..5 { b[j] = s[5 * i + j]; } for j in 0..5 { let u: U64 = b[(j + 1) % 5]; s[5 * i + j] = s[5 * i + j] ^ (!u) & b[(j + 2) % 5]; } } s } fn iota(mut s: State, rndconst: u64) -> State { s[0] = s[0] ^ U64::classify(rndconst); s } pub fn keccakf1600(mut s: State) -> State { for i in 0..ROUNDS { s = theta(s); s = rho(s); s = pi(s); s = chi(s); s = iota(s, ROUNDCONSTANTS[i]); } s } fn absorb_block(mut s: State, block: &ByteSeq) -> State { for i in 0..block.len() { let w = i >> 3u32; let o = 8 * (i & 7); s[w] = s[w] ^ U64_from_U8(block[i]) << o; } keccakf1600(s) } fn squeeze(mut s: State, nbytes: usize, rate: usize) -> ByteSeq { let mut out = ByteSeq::new(nbytes); for i in 0..nbytes { let pos = i % rate; let w = pos >> 3u32; let o = 8 * (pos & 7); let b = (s[w] >> o) & U64::classify(0xffu64); out[i] = U8_from_U64(b); if ((i + 1) % rate) == 0 { s = keccakf1600(s); } } out } fn keccak(rate: usize, data: &ByteSeq, p: u8, outbytes: usize) -> ByteSeq { let mut buf = ByteSeq::new(rate); let mut last_block_len = 0; let mut s = State::new(); for i in 0..data.num_chunks(rate) { let (block_len, block) = data.get_chunk(rate, i); if block_len == rate { s = absorb_block(s, &block); } else { buf = buf.update_start(&block); last_block_len = block_len; } } buf[last_block_len] = U8(p); buf[rate - 1] = buf[rate - 1] | U8(128u8); s = absorb_block(s, &buf); squeeze(s, outbytes, rate) } pub fn sha3224(data: &ByteSeq) -> Digest224 { let t = keccak(SHA3224_RATE, data, 0x06u8, 28); Digest224::from_seq(&t) } pub fn sha3256(data: &ByteSeq) -> Digest256 { let t = keccak(SHA3256_RATE, data, 0x06u8, 32); Digest256::from_seq(&t) } pub fn sha3384(data: &ByteSeq) -> Digest384 { let t = keccak(SHA3384_RATE, data, 0x06u8, 48); Digest384::from_seq(&t) } pub fn sha3512(data: &ByteSeq) -> Digest512 { let t = keccak(SHA3512_RATE, data, 0x06u8, 64); Digest512::from_seq(&t) } pub fn shake128(data: &ByteSeq, outlen: usize) -> ByteSeq { keccak(SHAKE128_RATE, data, 0x1fu8, outlen) } pub fn shake256(data: &ByteSeq, outlen: usize) -> ByteSeq { keccak(SHAKE256_RATE, data, 0x1fu8, outlen) }
//! Provide helpers for making ioctl system calls //! //! Currently supports Linux on all architectures. Other platforms welcome! //! //! This library is pretty low-level and messy. `ioctl` is not fun. //! //! What is an `ioctl`? //! =================== //! //! The `ioctl` syscall is the grab-bag syscall on POSIX systems. Don't want //! to add a new syscall? Make it an `ioctl`! `ioctl` refers to both the syscall, //! and the commands that can be send with it. `ioctl` stands for "IO control", //! and the commands are always sent to a file descriptor. //! //! It is common to see `ioctl`s used for the following purposes: //! //! * Provide read/write access to out-of-band data related //! to a device such as configuration (for instance, setting //! serial port options) //! * Provide a mechanism for performing full-duplex data //! transfers (for instance, xfer on SPI devices). //! * Provide access to control functions on a device (for example, //! on Linux you can send commands like pause, resume, and eject //! to the CDROM device. //! * Do whatever else the device driver creator thought made most sense. //! //! `ioctl`s are synchronous system calls and are similar to read and //! write calls in that regard. //! //! What does this module support? //! =============================== //! //! This library provides the `ioctl!` macro, for binding `ioctl`s. It also tries //! to bind every `ioctl` supported by the system with said macro, but //! some `ioctl`s requires some amount of manual work (usually by //! providing `struct` declaration) that this library does not support yet. //! //! Additionally, in `etc`, there are scripts for scraping system headers for //! `ioctl` definitions, and generating calls to `ioctl!` corresponding to them. //! //! How do I get the magic numbers? //! =============================== //! //! For Linux, look at your system's headers. For example, `/usr/include/linxu/input.h` has a lot //! of lines defining macros which use `_IOR`, `_IOW`, `_IOC`, and `_IORW`. These macros //! correspond to the `ior!`, `iow!`, `ioc!`, and `iorw!` macros defined in this crate. //! Additionally, there is the `ioctl!` macro for creating a wrapper around `ioctl` that is //! somewhat more type-safe. //! //! Most `ioctl`s have no or little documentation. You'll need to scrounge through //! the source to figure out what they do and how they should be used. //! //! # Interface Overview //! //! This ioctl module seeks to tame the ioctl beast by providing a set of safer (although not safe) //! functions implementing the most common ioctl access patterns. //! //! The most common access patterns for ioctls are as follows: //! //! 1. `read`: A pointer is provided to the kernel which is populated //! with a value containing the "result" of the operation. The //! result may be an integer or structure. The kernel may also //! read values from the provided pointer (usually a structure). //! 2. `write`: A pointer is provided to the kernel containing values //! that the kernel will read in order to perform the operation. //! 3. `execute`: The operation is passed to the kernel but no //! additional pointer is passed. The operation is enough //! and it either succeeds or results in an error. //! //! Where appropriate, versions of these interface function are provided //! taking either refernces or pointers. The pointer versions are //! necessary for cases (notably slices) where a reference cannot //! be generically cast to a pointer. #[cfg(any(target_os = "linux", target_os = "android"))] #[path = "platform/linux.rs"] #[macro_use] mod platform; #[cfg(target_os = "macos")] #[path = "platform/macos.rs"] #[macro_use] mod platform; #[cfg(target_os = "ios")] #[path = "platform/ios.rs"] #[macro_use] mod platform; #[cfg(target_os = "freebsd")] #[path = "platform/freebsd.rs"] #[macro_use] mod platform; #[cfg(target_os = "openbsd")] #[path = "platform/openbsd.rs"] #[macro_use] mod platform; #[cfg(target_os = "dragonfly")] #[path = "platform/dragonfly.rs"] #[macro_use] mod platform; pub use self::platform::*; // liblibc has the wrong decl for linux :| hack until #26809 lands. extern "C" { #[doc(hidden)] pub fn ioctl(fd: libc::c_int, req: libc::c_ulong, ...) -> libc::c_int; } /// A hack to get the macros to work nicely. #[doc(hidden)] pub use ::libc as libc;
fn main() { let x = 5; let x = "something"; { let x = 5; let y = 10; } let y = "new"; }
use sudo_test::{Command, Env}; use crate::{Result, HOSTNAME}; macro_rules! assert_snapshot { ($($tt:tt)*) => { insta::with_settings!({ prepend_module_to_snapshot => false, }, { insta::assert_snapshot!($($tt)*) }) }; } // NOTE all the input sudoers files have extra whitespaces to check that `--list` pretty prints the // sudoers entries fn sudo_ll_of(sudoers: &str) -> Result<String> { let env = Env(sudoers).hostname(HOSTNAME).build()?; Command::new("sudo") .args(["-l", "-l"]) .output(&env)? .stdout() } #[test] fn no_runas() -> Result<()> { let stdout = sudo_ll_of(" ALL ALL = ALL ")?; assert_snapshot!(stdout); Ok(()) } #[test] fn empty_runas() -> Result<()> { let stdout = sudo_ll_of(" ALL ALL = ( ) ALL ")?; assert_snapshot!(stdout); Ok(()) } #[test] fn user_runas() -> Result<()> { let stdout = sudo_ll_of(" ALL ALL = ( ferris ) ALL ")?; assert_snapshot!(stdout); Ok(()) } #[test] fn user_id_runas() -> Result<()> { let stdout = sudo_ll_of(" ALL ALL = ( #0 ) ALL ")?; assert_snapshot!(stdout); Ok(()) } #[test] fn user_group_runas() -> Result<()> { let stdout = sudo_ll_of(" ALL ALL = ( %root ) ALL ")?; assert_snapshot!(stdout); Ok(()) } #[test] fn user_group_id_runas() -> Result<()> { let stdout = sudo_ll_of(" ALL ALL = ( %#0 ) ALL ")?; assert_snapshot!(stdout); Ok(()) } #[test] fn user_non_unix_group_runas() -> Result<()> { let stdout = sudo_ll_of(" ALL ALL = ( %:root ) ALL ")?; assert_snapshot!(stdout); Ok(()) } #[test] fn user_non_unix_group_id_runas() -> Result<()> { let stdout = sudo_ll_of(" ALL ALL = ( %:#0 ) ALL ")?; assert_snapshot!(stdout); Ok(()) } #[test] fn not_user_runas() -> Result<()> { let stdout = sudo_ll_of(" ALL ALL = ( ! ferris ) ALL ")?; assert_snapshot!(stdout); Ok(()) } #[test] fn multiple_users_runas() -> Result<()> { let stdout = sudo_ll_of(" ALL ALL = ( ferris , root ) ALL ")?; assert_snapshot!(stdout); Ok(()) } #[test] fn group_runas() -> Result<()> { let stdout = sudo_ll_of(" ALL ALL = ( : crabs ) ALL ")?; assert_snapshot!(stdout); Ok(()) } #[test] fn not_group_runas() -> Result<()> { let stdout = sudo_ll_of(" ALL ALL = ( : ! crabs ) ALL ")?; assert_snapshot!(stdout); Ok(()) } #[test] fn multiple_group_runas() -> Result<()> { let stdout = sudo_ll_of(" ALL ALL = ( : crabs , root ) ALL ")?; assert_snapshot!(stdout); Ok(()) } #[test] fn complex_runas() -> Result<()> { let stdout = sudo_ll_of(" ALL ALL = ( ! ferris , root : crabs , !root ) ALL ")?; assert_snapshot!(stdout); Ok(()) } #[test] fn command_alias() -> Result<()> { let stdout = sudo_ll_of( "Cmnd_Alias COMMANDS = /usr/bin/true, /usr/bin/false ALL ALL = /usr/bin/ls, COMMANDS ", )?; assert_snapshot!(stdout); Ok(()) } #[test] fn negated_command_alias() -> Result<()> { let stdout = sudo_ll_of( "Cmnd_Alias COMMANDS = /usr/bin/true, !/usr/bin/false ALL ALL = /usr/bin/ls, !COMMANDS ", )?; assert_snapshot!(stdout); Ok(()) } #[test] fn command_arguments() -> Result<()> { let stdout = sudo_ll_of(" ALL ALL = /usr/bin/true a b c , /usr/bin/false ")?; assert_snapshot!(stdout); Ok(()) } #[test] fn multiple_commands() -> Result<()> { let stdout = sudo_ll_of(" ALL ALL = /usr/bin/true , /usr/bin/false ")?; assert_snapshot!(stdout); Ok(()) } #[test] fn multiple_runas_groups() -> Result<()> { let stdout = sudo_ll_of(" ALL ALL = ( root ) /usr/bin/true , ( ferris ) /usr/bin/false ")?; assert_snapshot!(stdout); Ok(()) } #[test] fn implicit_runas_group() -> Result<()> { let stdout = sudo_ll_of(" ALL ALL = /usr/bin/true , ( ferris ) /usr/bin/false ")?; assert_snapshot!(stdout); Ok(()) } #[test] fn cwd_any() -> Result<()> { let stdout = sudo_ll_of(" ALL ALL = CWD = * /usr/bin/true ")?; assert_snapshot!(stdout); Ok(()) } #[test] fn cwd_path() -> Result<()> { let stdout = sudo_ll_of(" ALL ALL = CWD = /home /usr/bin/true ")?; assert_snapshot!(stdout); Ok(()) } #[test] fn cwd_multiple_commands() -> Result<()> { let stdout = sudo_ll_of(" ALL ALL = CWD = * /usr/bin/true , /usr/bin/false ")?; assert_snapshot!(stdout); Ok(()) } #[test] fn cwd_multiple_runas_groups() -> Result<()> { let stdout = sudo_ll_of(" ALL ALL = CWD = * /usr/bin/true , ( ferris ) /usr/bin/false ")?; assert_snapshot!(stdout); Ok(()) } #[test] fn cwd_override() -> Result<()> { let stdout = sudo_ll_of(" ALL ALL = CWD = * /usr/bin/true , CWD = /home /usr/bin/false ")?; assert_snapshot!(stdout); Ok(()) } #[test] fn cwd_not_in_first_position() -> Result<()> { let stdout = sudo_ll_of(" ALL ALL = /usr/bin/true , CWD = * /usr/bin/false ")?; assert_snapshot!(stdout); Ok(()) } #[test] fn cwd_across_runas_groups() -> Result<()> { let stdout = sudo_ll_of(" ALL ALL = CWD = * /usr/bin/true , (ferris) /usr/bin/false ")?; assert_snapshot!(stdout); Ok(()) } #[test] fn cwd_override_across_runas_groups() -> Result<()> { let stdout = sudo_ll_of( " ALL ALL = CWD = * /usr/bin/true , (ferris) /usr/bin/false , CWD = /home /usr/bin/ls ", )?; assert_snapshot!(stdout); Ok(()) } #[test] fn passwd() -> Result<()> { let stdout = sudo_ll_of(" ALL ALL = PASSWD : /usr/bin/true ")?; assert_snapshot!(stdout); Ok(()) } #[test] fn nopasswd() -> Result<()> { let stdout = sudo_ll_of(" ALL ALL = NOPASSWD : /usr/bin/true ")?; assert_snapshot!(stdout); Ok(()) } #[test] fn passwd_nopasswd_override() -> Result<()> { let stdout = sudo_ll_of(" ALL ALL = PASSWD : /usr/bin/true , NOPASSWD: /usr/bin/false ")?; assert_snapshot!(stdout); Ok(()) } #[test] fn nopasswd_passwd_override() -> Result<()> { let stdout = sudo_ll_of(" ALL ALL = NOPASSWD : /usr/bin/true , PASSWD: /usr/bin/false ")?; assert_snapshot!(stdout); Ok(()) } #[test] fn nopasswd_passwd_on_same_command() -> Result<()> { let stdout = sudo_ll_of(" ALL ALL = NOPASSWD : PASSWD : /usr/bin/true ")?; assert_snapshot!(stdout); Ok(()) } #[test] fn nopasswd_across_runas_groups() -> Result<()> { let stdout = sudo_ll_of(" ALL ALL = NOPASSWD : /usr/bin/true , ( ferris ) /usr/bin/false ")?; assert_snapshot!(stdout); Ok(()) } #[test] fn passwd_across_runas_groups() -> Result<()> { let stdout = sudo_ll_of(" ALL ALL = PASSWD : /usr/bin/true , ( ferris ) /usr/bin/false ")?; assert_snapshot!(stdout); Ok(()) } #[test] fn nopasswd_passwd_override_across_runas_groups() -> Result<()> { let stdout = sudo_ll_of( " ALL ALL = NOPASSWD : /usr/bin/true , ( ferris ) /usr/bin/false , PASSWD : /usr/bin/ls ", )?; assert_snapshot!(stdout); Ok(()) } #[test] fn cwd_nopasswd() -> Result<()> { let stdout = sudo_ll_of(" ALL ALL = CWD = * NOPASSWD : /usr/bin/true ")?; assert_snapshot!(stdout); Ok(()) } #[test] fn multiple_lines() -> Result<()> { let stdout = sudo_ll_of( " ALL ALL = /usr/bin/true , /usr/bin/false root ALL = /usr/bin/ls ", )?; assert_snapshot!(stdout); Ok(()) }
extern crate rusqlite; use std::result::Result; use std::string::String; // TODO: use rusqlite::Connection pub struct Database { conn: rusqlite::Connection, } #[derive(Debug)] pub struct Artist { pub id: i32, pub name: String, pub musicbrainz_id: String, pub last_checked: i64 } impl Database { pub fn new(filename: String) -> Database { Database { conn: rusqlite::Connection::open(filename).unwrap(), } } pub fn create_artists_table(&self) -> Result<(), rusqlite::Error> { try!(self.conn.execute( "CREATE TABLE artists ( id INTEGER PRIMARY KEY AUTOINCREMENT, name TEXT, musicbrainzId TEXT, lastChecked INTEGER )", &[]) ); Ok(()) } pub fn create_releases_table(&self) -> Result<(), rusqlite::Error> { try!(self.conn.execute( "CREATE TABLE releases ( id INTEGER PRIMARY KEY AUTOINCREMENT, artistId INTEGER, title TEXT, releaseGroupId TEXT, releaseDate TEXT, releaseType TEXT, releaseStatus TEXT, collected INTEGER, -- 0 = don't have, 1 = have, 2 = temporary ignore, 3 = permanent ignore notes TEXT )", &[]) ); Ok(()) } pub fn get_least_recently_checked_artist(&self) -> Result<Artist, rusqlite::Error> { let mut stmt = self.conn.prepare("SELECT id, name, musicbrainzId, lastChecked FROM artists ORDER BY lastChecked ASC LIMIT 1").unwrap(); let mut artist_iter = stmt.query_map(&[], |row| { Artist { id: row.get(0), name: row.get(1), musicbrainz_id: row.get(2), last_checked: row.get(3) } }).unwrap(); for artist in artist_iter { //println!("Found artist {:?}", artist); return artist; // Return the first one we find } Err(rusqlite::Error::QueryReturnedNoRows) } }
use shorthand::ShortHand; #[derive(ShortHand, Default, PartialEq, Debug)] #[shorthand(enable(try_into))] struct Example { value: String, other: Option<String>, } #[test] fn test_try_into() { let _: Result<&mut Example, ::core::convert::Infallible> = Example::default().try_value("s"); assert_eq!( Ok(&mut Example { value: "hi".to_string(), other: None, }), Example::default().try_value::<&'static str>("hi") ); }
use std::collections::vec_deque::*; type Book = (usize, usize); #[derive(Debug,PartialEq)] pub enum LibraryState { SignUpNotStarted, SignUpStarted(usize), SignedUp, Empty } #[derive(Debug)] pub struct Library { pub state: LibraryState, pub books: VecDeque<Book>, signup_days: usize, books_per_day: usize, } impl Library { pub fn new(library_metadata: Vec<usize>, mut books: Vec<Book>) -> Self { books.sort_by(|(_, score_one), (_, score_two)| score_two.cmp(score_one)); Self { state: LibraryState::SignUpNotStarted, signup_days: library_metadata[1], books_per_day: library_metadata[2], books: VecDeque::from(books) } } pub fn start_signup(&mut self) { if self.state == LibraryState::SignUpNotStarted { self.state = LibraryState::SignUpStarted(self.signup_days); } } pub fn pull_books(&mut self) -> Vec<Book> { let mut pulled_books = Vec::with_capacity(self.signup_days); for _ in 0..self.signup_days { match self.books.pop_front() { Some(book) => pulled_books.push(book), None => { self.state = LibraryState::Empty; break; } } } pulled_books } }
mod inner_decoder; use crate::frame_data::FrameData; use inner_decoder::InnerDecoder; use stainless_ffmpeg::prelude::*; pub struct Decoder { inner_decoder: InnerDecoder, graph: Option<FilterGraph>, } impl Decoder { pub fn new_audio_decoder(decoder: AudioDecoder, graph: Option<FilterGraph>) -> Self { let inner_decoder = InnerDecoder::AudioDecoder(decoder); Decoder { inner_decoder, graph, } } pub fn new_video_decoder(decoder: VideoDecoder, graph: Option<FilterGraph>) -> Self { let inner_decoder = InnerDecoder::VideoDecoder(decoder); Decoder { inner_decoder, graph, } } pub fn decode(&mut self, packet: &Packet) -> std::result::Result<Option<FrameData>, String> { match &self.inner_decoder { InnerDecoder::AudioDecoder(audio_decoder) => { unsafe { log::warn!( "[FFmpeg] Send packet to audio decoder {:?} {}", (*packet.packet).size, (*audio_decoder.codec_context).codec_id as i32 ); } let av_frame = unsafe { let ret_code = avcodec_send_packet(audio_decoder.codec_context, packet.packet); check_result!(ret_code); let av_frame = av_frame_alloc(); let ret_code = avcodec_receive_frame(audio_decoder.codec_context, av_frame); check_result!(ret_code); let frame = Frame { frame: av_frame, name: Some("audio_source_1".to_string()), index: 1, }; if let Some(graph) = &self.graph { if let Ok((audio_frames, _video_frames)) = graph.process(&[frame], &[]) { log::trace!("[FFmpeg] Output graph count {} frames", audio_frames.len()); let frame = audio_frames.first().unwrap(); av_frame_clone((*frame).frame) } else { av_frame } } else { av_frame } }; let frame = Frame { frame: av_frame, name: Some("audio".to_string()), index: 1, }; Ok(Some(FrameData::AudioVideo(frame))) } InnerDecoder::VideoDecoder(video_decoder) => { log::trace!("[FFmpeg] Send packet to video decoder"); let av_frame = unsafe { let ret_code = avcodec_send_packet(video_decoder.codec_context, packet.packet); check_result!(ret_code); let av_frame = av_frame_alloc(); let ret_code = avcodec_receive_frame(video_decoder.codec_context, av_frame); check_result!(ret_code); let frame = Frame { frame: av_frame, name: Some("video_source_1".to_string()), index: 1, }; if let Some(graph) = &self.graph { if let Ok((_audio_frames, video_frames)) = graph.process(&[], &[frame]) { log::trace!("[FFmpeg] Output graph count {} frames", video_frames.len()); let frame = video_frames.first().unwrap(); av_frame_clone((*frame).frame) } else { av_frame } } else { av_frame } }; let frame = Frame { frame: av_frame, name: Some("video".to_string()), index: 1, }; Ok(Some(FrameData::AudioVideo(frame))) } // InnerDecoder::EbuTtmlLiveDecoder(ebu_ttml_live_decoder) => { // let result = match ebu_ttml_live_decoder.decode(packet)? { // Some(ttml_content) => Some(FrameData::EbuTtmlLive(Box::new(ttml_content))), // None => None, // }; // Ok(result) // } } } }
use mio::unix::SourceFd; use mio::{Events, Interest, Poll, Token}; use nix::errno::Errno; // vim: shiftwidth=2 use nix::fcntl::{open, OFlag}; use nix::sys::stat::Mode; use nix::unistd::{read, write}; use nix::Error; use libc::input_event; use std::mem::size_of; use uinput_sys::{ui_set_evbit, EV_SYN, EV_KEY, EV_MSC, ui_dev_create, ui_set_keybit}; use crate::struct_ser::StructSerializer; use std::os::unix::io::RawFd; use crate::keys::Event; use num_traits::FromPrimitive; use std::path::Path; use ioctls::{eviocgkey, eviocgrab}; pub struct DevInputReader { pub fd: RawFd } pub enum Exclusion { NoExclusion, #[allow(dead_code)] ImmediateExclusion, // Waits for all keys to be released. Could block for a long time. WaitReleaseAndExclude } impl DevInputReader { pub fn next(self: &mut DevInputReader) -> Result<Event, Error> { loop { let size = size_of::<input_event>(); let mut buf: Vec<u8> = vec![0; size]; read(self.fd, &mut buf)?; let type_ = u16::from_ne_bytes([buf[16], buf[17]]); let code = u16::from_ne_bytes([buf[18], buf[19]]); let value = i32::from_ne_bytes([buf[20], buf[21], buf[22], buf[23]]); if type_ == 1 && (value == 0 || value == 1) { match FromPrimitive::from_u16(code) { Some(k) => match value { 1 => return Ok(Event::Pressed(k)), 0 => return Ok(Event::Released(k)), _ => () }, None => () } } } } pub fn open(path: &Path, exclusion: Exclusion, nonblock: bool) -> Result<DevInputReader, Error> { let fd = open(path, if nonblock {OFlag::O_RDONLY | OFlag::O_NONBLOCK} else {OFlag::O_RDONLY}, Mode::empty())?; match exclusion { Exclusion::NoExclusion => { }, Exclusion::ImmediateExclusion => { unsafe { if eviocgrab(fd, &*Box::new(1)) == -1 { return Err(Error::last()); } } }, Exclusion::WaitReleaseAndExclude => { do_exclusion_loop(fd)?; unsafe { if eviocgrab(fd, &*Box::new(1)) == -1 { return Err(Error::last()); } } } }; Ok(DevInputReader { fd }) } } fn do_exclusion_loop(fd: RawFd) -> Result<(), Error> { let num_bytes = ( (uinput_sys::KEY_MAX + 7) / 8 ) as usize; let mut bytes = vec![0u8; num_bytes]; loop { unsafe { // Get which keys are currently pressed if eviocgkey(fd, bytes.as_mut_ptr(), bytes.len()) == -1 { return Err(Error::last()); } } let all_zero = bytes.iter().all(|x| *x == 0); if all_zero { break; } else { // Don't check again until the next key event; otherwise, it is pointless to check. wait_for_any_activity(fd)?; bytes.fill(0); } } Ok(()) } fn wait_for_any_activity(fd: RawFd) -> Result<(), Error> { let size = size_of::<input_event>(); let mut buf: Vec<u8> = vec![0; size]; match read(fd, &mut buf) { Err(e) => match e { Error::Sys(code) => match code { nix::errno::Errno::EAGAIN => { let mut poll = Poll::new().unwrap(); poll.registry().register(&mut SourceFd(&fd), Token(0), Interest::READABLE).unwrap(); let mut events = Events::with_capacity(24); match poll.poll(&mut events, None) { Err(e) => { return Err(Error::Sys(Errno::from_i32(e.raw_os_error().unwrap_or(0)))); }, Ok(_) => () }; }, _ => { return Err(e); } }, _ => { return Err(e); } }, Ok(_) => () }; Ok(()) } pub struct DevInputWriter { fd: RawFd } impl DevInputWriter { pub fn open() -> Result<DevInputWriter, Error> { let fdo = open("/dev/uinput", OFlag::O_WRONLY | OFlag::O_NONBLOCK, Mode::empty())?; unsafe { ui_set_evbit(fdo, EV_SYN); ui_set_evbit(fdo, EV_KEY); ui_set_evbit(fdo, EV_MSC); } for i in 1 .. 562 { unsafe { ui_set_keybit(fdo, i); } } { let mut user_dev_data = StructSerializer { sink: Vec::new() }; user_dev_data.add_string_in_buf("totalmapper", 80); user_dev_data.add_u16(3); user_dev_data.add_u16(1); user_dev_data.add_u16(1); user_dev_data.add_u16(1); user_dev_data.add_u32(0); user_dev_data.add_i32_array(&[0; 64]); user_dev_data.add_i32_array(&[0; 64]); user_dev_data.add_i32_array(&[0; 64]); user_dev_data.add_i32_array(&[0; 64]); write(fdo, &user_dev_data.sink).unwrap(); } unsafe { ui_dev_create(fdo); } Ok(DevInputWriter { fd: fdo }) } pub fn send(self: &mut DevInputWriter, evs: &Vec<Event>) -> Result<(), Error> { let mut input_event_data = StructSerializer { sink: Vec::new() }; let mut send_type_code_value = |type_, code, value| { input_event_data.add_i64(0); input_event_data.add_i64(0); input_event_data.add_u16(type_); input_event_data.add_u16(code); input_event_data.add_i32(value); }; for ev in evs { let k = match ev { Event::Pressed(k) => k, Event::Released(k) => k, }; let value = match ev { Event::Pressed(_) => 1, Event::Released(_) => 0 }; let code = (*k) as u16; send_type_code_value(1, code, value); } send_type_code_value(0, 0, 0); write(self.fd, &input_event_data.sink)?; Ok(()) } }
use serde::{Deserialize, Serialize}; use futures::FutureExt; use rstreams::{events::Event, BotInvocationEvent, LeoCheckpointOptions, LeoReadOptions, LeoSdk, AllProviders, Error}; use rstreams::aws::AWSProvider; use rusoto_signature::Region; use lambda::{run, handler_fn}; use anyhow::{anyhow}; pub struct ExampleSdkConfig; impl ExampleSdkConfig { pub fn bus_config() -> AllProviders { AllProviders::AWS(AWSProvider::new( Region::UsEast1, "", "", "", "", "", "", )) } } #[derive(Deserialize, Serialize, Debug)] struct MyReadEvent { suborder_id: usize, order_created: String, number_of_line_items: usize, po_number: String, order_status: String, } #[tokio::main] async fn main() -> Result<(), Error> { run(handler_fn(offload)).await?; Ok(()) } async fn offload(event: BotInvocationEvent<()>, context: lambda::Context) -> Result<(), Error> { let sdk = LeoSdk::new(ExampleSdkConfig::bus_config()); let bot_id = &event.bot_id; let source_queue = "SOURCE_TOKEN"; sdk.cron(&event.__cron, &context, || async { sdk.offload( bot_id, source_queue, LeoReadOptions::default(), LeoCheckpointOptions::Enabled.with_initial_values(&event), |event: Event<MyReadEvent>| async move { //offload processing here let results = processing_function(event.eid.clone()).await; match results { Ok(..) => Some(Ok(())), Err(err) => { Some(Err(anyhow!(format!("Failed to process event {}", err)))) } } }, ) .await .map_err(|e| e.into()) }) .then(|r| async move { println!("Handler done!"); r }) .await?; Ok(()) } async fn processing_function(eid: String) -> anyhow::Result<()> { println!("Handling event eid: {}", eid); Ok(()) }
use Entity; pub struct World { regions: Map<Vector2<u64>, Region>, } pub struct Region { chunks: Map<Vector2<u64>, Chunk>, } pub struct Chunk { blocks: [[[Block;32];32];32], } pub struct Block { material: Material, } pub struct Material { resistance: f32, opacity: f32, }
use crate::SizeComparison; use structopt::StructOpt; #[derive(Debug, StructOpt)] #[structopt( name = "fnd", about = "A tiny command line tool to find file paths based on substring matching or regular expressions", setting = structopt::clap::AppSettings::ColoredHelp )] pub struct Flags { pub query: Option<String>, #[structopt(short, long)] pub all: bool, #[structopt(short, long)] pub regex: bool, #[structopt(short, long)] pub hidden: bool, #[structopt(short = "i", long)] pub case_insensitive: bool, #[structopt(short, long, allow_hyphen_values = true, verbatim_doc_comment)] /// Match files on very close match (+/- 512 bytes), or above or below a certain threshold. /// /// To run a close match, provide the number and corresponding designation of bytes: /// k kilobytes (1024 bytes) /// M megabytes (1024 kilobytes) /// G gigabytes (1024 megabytes) /// T terabytes (1024 gigabytes) /// P petabytes (1024 terabytes) /// /// For example, to find all roughly 1gb JPGs: /// /// fnd "\.jpg$" -r -s 1G /// /// To select files above or below a particular threshold, prefix with a '+' or '-'. /// /// For example, to find all CSVs larger than 500mb: /// /// fnd "\.csv$" -r -s +500M /// /// To find all SVGs under 500kb /// /// fnd "\.svg$" -r -s -500k pub size: Option<SizeComparison>, #[structopt(short, long)] /// The maximum directory depth when looking for matches pub depth: Option<usize>, }
extern crate pnet_datalink; extern crate actix_web; use actix_web::{http, server, App, Path, Responder}; fn index(info: Path<(u32, String)>) -> impl Responder { format!("Hello {}! id:{}", info.1, info.0) } fn main() { for interface in pnet_datalink::interfaces() { println!("{:?}", interface.ips); } server::new( || App::new() .route("/{id}/{name}/index.html", http::Method::GET, index)) .bind("127.0.0.1:8080").unwrap() .run(); }
use git2::Repository; use git2::DiffFormat; use std::path::PathBuf; struct Arguments { path: PathBuf, } fn main() { let args = Arguments { path: ".".into() }; let repo = match Repository::open(args.path) { Ok(repo) => repo, Err(e) => panic!("failed to open: {}", e), }; let mut revwalker = repo.revwalk().expect("Unable to create Revwalker."); revwalker.simplify_first_parent().expect("Settings error"); revwalker.push_head().expect("Couldn't push head."); let oids: Vec<_> = revwalker .map(|oid| oid.and_then(|oid| repo.find_commit(oid).and_then(|commit| commit.tree()))) .collect(); for pair in oids.chunks(2) { if let [prev, next] = pair { let diff = repo .diff_tree_to_tree( Some(prev.as_ref().unwrap()), Some(next.as_ref().unwrap()), None, ) .expect("Couldn't diff."); println!("{:?}", diff.stats()); diff.print(DiffFormat::Patch, |delta, hunk, line| { println!("Hunk: {:?}", String::from_utf8_lossy(hunk.map(|h| h.header()).unwrap_or_default())); println!("Line: {:?}", String::from_utf8_lossy(line.content())); true }); } } }
//! Top-level lib.rs for `cretonne_simplejit`. #![deny(missing_docs, trivial_numeric_casts, unused_extern_crates)] #![warn(unused_import_braces, unstable_features)] #![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../../clippy.toml")))] #![cfg_attr(feature = "cargo-clippy", allow(new_without_default, new_without_default_derive))] #![cfg_attr(feature="cargo-clippy", warn( float_arithmetic, mut_mut, nonminimal_bool, option_map_unwrap_or, option_map_unwrap_or_else, print_stdout, unicode_not_nfc, use_self, ))] extern crate cretonne_codegen; extern crate cretonne_module; extern crate cretonne_native; extern crate errno; extern crate region; extern crate libc; #[cfg(target_os = "windows")] extern crate winapi; mod backend; mod memory; pub use backend::{SimpleJITBuilder, SimpleJITBackend};
use::std::ops;
#[derive(Debug, Deserialize, PartialEq, Eq, Clone)] pub struct ValveSettings { pub name: String, pub socket: String, pub gpio: u8 }
//! Components that are useful for all types of entities pub mod npc; pub mod map;
use actix_web::{ web, web::{post, resource, ServiceConfig}, HttpResponse, Result, }; use reqwest::{self, Error, Response}; use serde::{Deserialize, Serialize}; use serde_json::json; #[derive(Debug, Deserialize, Serialize)] pub struct UserNotificationForm { token: String, from: String, to: String, message: String, } pub fn config(cfg: &mut ServiceConfig) { cfg.service(resource("/api/v2/notification").route(post().to(send_notification))); } pub async fn notify_creation(to: &str, name: &str, ticket_id: i32) -> Result<Response, Error> { let host = "https://mon-ticket.azap.io"; let msg = format!( r#"Félicitations {}, ⌛ Tu es bien inscrit dans la file, tu peux consulter ton temps d'attente ici : {}/mobile-view/{} 📩 On te prévient par message avant ton passage 👀 Ne t'éloigne pas trop et surveille ton portable pour ne pas perdre ta place A tout de suite"#, name, host, ticket_id ); notify_client(to, &msg).await } //#[cfg!(feature(print-sms))] pub async fn notify_your_turn(to: &str, name: &str) -> Result<Response, Error> { notify_client( to, &format!("{}, c'est à toi !\n\nMerci d'avoir patienté 😉", name), ) .await } pub async fn notify_get_closer(to: &str, name: &str, id: i32) -> Result<Response, Error> { let host = "https://mon-ticket.azap.io"; let msg = format!( r#" {}, La personne juste avant toi viens de passer sur le siège 🔜💺, rapproche toi du salon c'est bientôt à toi : {}/mobile-view/{} "#, name, host, id ); notify_client(to, &msg).await } async fn notify_client(to: &str, message: &str) -> Result<Response, Error> { //fixme: retrive from DB / use store ID? //fixme: get url from env and build test server with echo let token = "123".into(); let from = "+33766322917".into(); let param = UserNotificationForm { token, from, to: to.into(), message: message.into(), }; let client = reqwest::Client::new(); client .post("https://azap-sms-gateway.herokuapp.com/send-sms/") .json(&param) .send() .await } #[derive(Deserialize)] #[serde(rename_all(deserialize = "camelCase"))] pub struct NotificationForm { message: String, phone_number: String, } pub async fn send_notification( notification_form: web::Json<NotificationForm>, ) -> Result<HttpResponse> { let notification_form = notification_form.into_inner(); let res = notify_client(&notification_form.phone_number, &notification_form.message).await; match res { Ok(_) => Ok(HttpResponse::Ok().json(json!({"status":"ok"}))), Err(_) => Ok(HttpResponse::BadRequest().json(json!({}))), } }
use crate::utils::Ops; use erased_serde::serialize_trait_object; use std::any::Any; /* * This is the base for every AST type */ pub trait AstBase: dyn_clone::DynClone + erased_serde::Serialize + std::fmt::Debug { fn get_type(&self) -> Ops; fn as_self(&self) -> &dyn Any; } dyn_clone::clone_trait_object!(AstBase); serialize_trait_object!(AstBase);
#![doc(html_root_url = "https://docs.rs/rustfm-scrobble/1.0.0")] #![deny(clippy::all)] #![deny(clippy::pedantic)] //! # rustfm-scrobble //! //! Client for the Last.fm Scrobble API v2.0. Allows easy access to the most-commonly used Scrobble/Now Playing //! endpoints in the Last.fm API, as well as robust support for multiple authentication flows. More advanced API //! features such as metadata correction are also exposed to help build more sophisticated Scrobble clients. //! //! The primary types to use are `Scrobbler` - the actual client, which you will authenticate and then use to send //! scrobble requests - and `Scrobble` - which represents a single track played at a point in time. An example using //! these types to scrobble a track to Last.fm is given below. //! //! # Example usage //! ```ignore //! use rustfm_scrobble::{Scrobble, Scrobbler}; //! use std::error::Error; //! //! fn main() -> Result<(), Box<dyn Error>> { //! let api_key = "{{api_key}}"; //! let api_secret = "{{api_secret}}"; //! let username = "{{username}}"; //! let password = "{{password}}"; //! //! let mut scrobbler = Scrobbler::new(api_key, api_secret); //! //! let response = scrobbler.authenticate_with_password(username, password)?; //! println!("Authenticated! {:#?}", response); //! //! let track = Scrobble::new("Los Campesinos!", "To Tundra", "No Blues"); //! let response = scrobbler.now_playing(&track)?; //! println!("Sent now playing! {:#?}", response); //! //! let response = scrobbler.scrobble(&track)?; //! println!("Sent scrobble! {:#?}", response); //! //! Ok(()) //! } //! ``` //! //! *Note:* This crate does not implement any of the logic to comply with Last.fm's scrobbling rules. Typical //! ("real-time") implementations will likely want to adhere to these rules, outlined in Last.fm's //! [API Documentation](https://www.last.fm/api/scrobbling#scrobble-requests). Other implementations may choose to //! ignore these guidelines. This crate provides the flexibility to develop any type of Scrobbling application. //! #[macro_use] extern crate wrapped_vec; mod auth; mod client; mod error; mod models; mod scrobbler; pub use crate::models::metadata::{Scrobble, ScrobbleBatch}; pub use crate::scrobbler::Scrobbler; pub use crate::error::ScrobblerError; /// Last.fm API Response Types /// /// Types used to represent responses from the Last.fm API pub mod responses { pub use crate::models::responses::{ BatchScrobbleResponse, NowPlayingResponse, ScrobbleResponse, SessionResponse, }; /// Data types used to represent values in API Response types pub mod values { pub use crate::models::responses::{CorrectableString, ScrobbleList}; } }
impl Solution { pub fn maximum_element_after_decrementing_and_rearranging(mut arr: Vec<i32>) -> i32 { let n = arr.len(); arr.sort_unstable(); arr[0] = 1; for i in 1..n{ arr[i] = arr[i].min(arr[i - 1] + 1); } arr[n - 1] } }
use totsu::prelude::*; use totsu::MatBuild; use totsu::totsu_core::{MatOp, solver::{Operator, LinAlg}}; use super::La; // pub struct ProbOpB<'a> { x_sz: usize, t_sz: usize, target_lx_norm1: f64, one: MatBuild<La>, xh: MatOp<'a, La>, } impl<'a> ProbOpB<'a> { pub fn new(width: usize, height: usize, ratio: f64, vec_xh: &'a[f64]) -> Self { let target_lx_norm1 = (width * height) as f64 * ratio; log::info!("target_lx_norm1: {}", target_lx_norm1); ProbOpB { x_sz: width * height, t_sz: (width - 2) * (height - 2), target_lx_norm1: target_lx_norm1, one: MatBuild::new(MatType::General(width * height, 1)) .by_fn(|_, _| 1.0), xh: MatOp::new(MatType::General(width * height, 1), vec_xh), } } } impl<'a> Operator<La> for ProbOpB<'a> { fn size(&self) -> (usize, usize) { (self.t_sz * 2 + 1 + self.x_sz * 2 + 1 + self.x_sz, 1) } fn op(&self, alpha: f64, x: &[f64], beta: f64, y: &mut[f64]) { let (y_lp_ln, y_rest) = y.split_at_mut(self.t_sz * 2); let (y_l1, y_rest) = y_rest.split_at_mut(1); let (y_xp, y_rest) = y_rest.split_at_mut(self.x_sz); let (y_xn, y_rest) = y_rest.split_at_mut(self.x_sz); let (y_sz, y_sx) = y_rest.split_at_mut(1); La::scale(beta, y_lp_ln); y_l1[0] = alpha * self.target_lx_norm1 * x[0] + beta * y_l1[0]; La::scale(beta, y_xp); self.one.as_op().op(alpha, x, beta, y_xn); La::scale(beta, y_sz); self.xh.op(-alpha, x, beta, y_sx); } fn trans_op(&self, alpha: f64, x: &[f64], beta: f64, y: &mut[f64]) { let (_x_lp_ln, x_rest) = x.split_at(self.t_sz * 2); let (x_l1, x_rest) = x_rest.split_at(1); let (_x_xp, x_rest) = x_rest.split_at(self.x_sz); let (x_xn, x_rest) = x_rest.split_at(self.x_sz); let (_x_sz, x_sx) = x_rest.split_at(1); self.one.as_op().trans_op(alpha, x_xn, beta, y); self.xh.trans_op(-alpha, x_sx, 1.0, y); y[0] += alpha * self.target_lx_norm1 * x_l1[0]; } fn absadd_cols(&self, tau: &mut[f64]) { tau[0] += self.target_lx_norm1.abs() + self.x_sz as f64; self.xh.absadd_cols(tau); } fn absadd_rows(&self, sigma: &mut[f64]) { let (_sigma_lp_ln, sigma_rest) = sigma.split_at_mut(self.t_sz * 2); let (sigma_l1, sigma_rest) = sigma_rest.split_at_mut(1); let (_sigma_xp, sigma_rest) = sigma_rest.split_at_mut(self.x_sz); let (sigma_xn, sigma_rest) = sigma_rest.split_at_mut(self.x_sz); let (_sigma_sz, sigma_sx) = sigma_rest.split_at_mut(1); sigma_l1[0] += self.target_lx_norm1.abs(); La::adds(1., sigma_xn); self.xh.absadd_rows(sigma_sx); } } #[test] fn test_trans_op() { use float_eq::assert_float_eq; let n = 32; let vec_xh = vec![1.0; n * n]; let op = ProbOpB::new(n, n, 1.0, &vec_xh); let sz = op.size(); let xi = vec![1.; sz.0]; let mut yo = vec![0.; sz.1]; op.trans_op(1., &xi, 0., &mut yo); let mut yo_ref = vec![0.; sz.1]; utils::operator_ref::trans_op::<La, _>( op.size(), |x, y| op.op(1., x, 0., y), 1., &xi, 0., &mut yo_ref); assert_float_eq!(yo, yo_ref, abs_all <= 1e-6); } #[test] fn test_abssum_cols() { use float_eq::assert_float_eq; let n = 32; let vec_xh = vec![1.0; n * n]; let op = ProbOpB::new(n, n, 1.0, &vec_xh); let sz = op.size(); let mut tau = vec![0.; sz.1]; op.absadd_cols(&mut tau); let mut tau_ref = vec![0.; sz.1]; utils::operator_ref::absadd_cols::<La, _>( op.size(), |x, y| op.op(1., x, 0., y), &mut tau_ref ); assert_float_eq!(tau, tau_ref, abs_all <= 1e-6); } #[test] fn test_abssum_rows() { use float_eq::assert_float_eq; let n = 32; let vec_xh = vec![1.0; n * n]; let op = ProbOpB::new(n, n, 1.0, &vec_xh); let sz = op.size(); let mut sigma = vec![0.; sz.0]; op.absadd_rows(&mut sigma); let mut sigma_ref = vec![0.; sz.0]; utils::operator_ref::absadd_rows::<La, _>( op.size(), |x, y| op.trans_op(1., x, 0., y), &mut sigma_ref ); assert_float_eq!(sigma, sigma_ref, abs_all <= 1e-6); }
use crate::cli::login::Login; use crate::cli::news::News; use crate::cli::query::Query; use crate::cli::thread::Thread; use crate::cli::tree::Tree; use crate::cli::HnCommand; use crate::error::HnError; use clap::App; use clap::ArgMatches; /// Top level parser/cmd for the cli pub struct HackerNews; impl HnCommand for HackerNews { const NAME: &'static str = "hackernews"; fn parser<'a, 'b>() -> App<'a, 'b> { App::new(Self::NAME) .subcommand(Query::parser()) .subcommand(Tree::parser()) .subcommand(News::parser()) .subcommand(Login::parser()) .subcommand(Thread::parser()) } fn cmd(matches: &ArgMatches) -> Result<(), Box<HnError>> { match matches.subcommand() { (Query::NAME, Some(matches)) => Query::cmd(matches).map_err(|e| { log::error!("hackernews subcommand {:?} failed", Query::NAME); e }), (Tree::NAME, Some(matches)) => Tree::cmd(matches).map_err(|e| { log::error!("hackernews subcommand {:?} failed", Tree::NAME); e }), (News::NAME, Some(matches)) => News::cmd(matches).map_err(|e| { log::error!("hackernews subcommand {:?} failed", News::NAME); e }), (Login::NAME, Some(matches)) => Login::cmd(matches).map_err(|e| { log::error!("hackernews subcommand {:?} failed", Login::NAME); e }), (Thread::NAME, Some(matches)) => Thread::cmd(matches).map_err(|e| { log::error!("hackernews subcommand {:?} failed", Thread::NAME); e }), // Lack of a subcommand defaults to listing the current HN front page _ => News::cmd(matches).map_err(|e| { log::error!("hackernews subcommand {:?} failed", News::NAME); e }), } } }
use std::env; fn gcd2(mut a: u64, mut b: u64) -> u64 { while b != 0 { let c = b; b = a % b; a = c; } a } fn main() { // XXX skip(1) to skip program name: let nums = env::args().skip(1).map(|s| s.parse().unwrap()); let gcd = nums.fold(0, gcd2); println!("{}", gcd); }
use impl_ops::*; use rand::Rng; use std::ops::{self, Neg, Range}; #[derive(Debug, Clone, Copy)] pub struct Vec3 { x: f64, y: f64, z: f64, } impl Neg for Vec3 { type Output = Self; fn neg(self) -> Self::Output { self * -1.0 } } impl_op_ex!(+ |lhs: &Vec3, rhs: &Vec3| -> Vec3 { Vec3 { x: lhs.x + rhs.x, y: lhs.y + rhs.y, z: lhs.z + rhs.z, } }); impl_op_ex!(-|lhs: &Vec3, rhs: &Vec3| -> Vec3 { Vec3 { x: lhs.x - rhs.x, y: lhs.y - rhs.y, z: lhs.z - rhs.z, } }); impl_op_ex!(*|lhs: &Vec3, rhs: &Vec3| -> Vec3 { Vec3 { x: lhs.x * rhs.x, y: lhs.y * rhs.y, z: lhs.z * rhs.z, } }); impl_op_ex!(*|lhs: &Vec3, rhs: f64| -> Vec3 { Vec3 { x: lhs.x * rhs, y: lhs.y * rhs, z: lhs.z * rhs, } }); impl_op_ex!(*|lhs: f64, rhs: &Vec3| -> Vec3 { rhs * lhs }); impl_op_ex!(/ |lhs: &Vec3, rhs: f64| -> Vec3 { Vec3 { x: lhs.x / rhs, y: lhs.y / rhs, z: lhs.z / rhs, } }); impl_op_ex!(/ |lhs: f64, rhs: &Vec3| -> Vec3 { rhs / lhs }); impl Vec3 { pub fn new(x: f64, y: f64, z: f64) -> Self { Self { x, y, z } } pub fn x(&self) -> f64 { self.x } pub fn y(&self) -> f64 { self.y } pub fn z(&self) -> f64 { self.z } pub fn random_in_unit_sphere() -> Self { let mut rng = rand::thread_rng(); loop { let v = Self { x: rng.gen_range::<f64, Range<f64>>(-1.0..1.0), y: rng.gen_range::<f64, Range<f64>>(-1.0..1.0), z: rng.gen_range::<f64, Range<f64>>(-1.0..1.0), }; if v.length_squared() < 1.0 { return v; } } } pub fn dot(&self, rhs: &Self) -> f64 { self.x * rhs.x + self.y * rhs.y + self.z * rhs.z } pub fn cross(&self, rhs: &Self) -> Self { Self { x: self.y * rhs.z - self.z * rhs.y, y: self.z * rhs.x - self.x * rhs.z, z: self.x * rhs.y - self.y * rhs.x, } } pub fn reflect(&self, norm: &Self) -> Self { self - 2.0 * self.dot(norm) * norm } pub fn refract(&self, norm: &Self, refraction_ratio: f64) -> Self { let uv = self.unit_vector(); let cos_theta = (-uv).dot(norm).min(1.0); let r_out_perp = refraction_ratio * (uv + cos_theta * norm); let r_out_parallel = -(1.0 - r_out_perp.length_squared()).abs().sqrt() * norm; r_out_perp + r_out_parallel } pub fn unit_vector(&self) -> Self { self / self.length() } pub fn length(&self) -> f64 { self.length_squared().sqrt() } pub fn length_squared(&self) -> f64 { self.dot(self) } pub fn near_zero(&self) -> bool { self.x.abs() < 1e-8 && self.y.abs() < 1e-8 && self.z.abs() < 1e-8 } } pub type Color = Vec3;
use std::io::{stdin, Read, StdinLock}; use std::str::FromStr; #[allow(dead_code)] struct Scanner<'a> { cin: StdinLock<'a>, } #[allow(dead_code)] impl<'a> Scanner<'a> { fn new(cin: StdinLock<'a>) -> Scanner<'a> { Scanner { cin: cin } } fn read<T: FromStr>(&mut self) -> Option<T> { let token = self .cin .by_ref() .bytes() .map(|c| c.unwrap() as char) .skip_while(|c| c.is_whitespace()) .take_while(|c| !c.is_whitespace()) .collect::<String>(); token.parse::<T>().ok() } fn input<T: FromStr>(&mut self) -> T { self.read().unwrap() } fn vec<T: FromStr>(&mut self, len: usize) -> Vec<T> { (0..len).map(|_| self.input()).collect() } fn mat<T: FromStr>(&mut self, row: usize, col: usize) -> Vec<Vec<T>> { (0..row).map(|_| self.vec(col)).collect() } } fn gcd(m: i64, n: i64) -> i64 { if m < n { gcd(n, m) } else { if n == 0 { m } else { gcd(n, m % n) } } } fn mod_pow(x: i64, n: i64, modulo: i64) -> i64 { let mut n = n; let mut x = x; let mut res = 1; while n > 0 { if n & 1 == 1 { res = res * x % modulo; } x = x * x % modulo; n >>= 1; } res } #[derive(Clone, Copy)] struct ModInt(usize); const MOD: usize = 1_000_000_007; #[allow(dead_code)] impl ModInt { fn new(n: usize) -> ModInt { ModInt(n % MOD) } fn zero() -> ModInt { ModInt(0) } fn one() -> ModInt { ModInt(1) } fn pow(self, mut n: usize) -> ModInt { let mut ret = ModInt::one(); let mut x = self; while n > 0 { if n & 1 == 1 { ret *= x; } x *= x; n >>= 1; } ret } fn inv(self) -> ModInt { assert!(self.0 > 0); self.pow(MOD - 2) } } impl std::ops::Add for ModInt { type Output = ModInt; fn add(self, rhs: ModInt) -> Self::Output { let mut d = self.0 + rhs.0; if d >= MOD { d -= MOD; } ModInt(d) } } impl std::ops::AddAssign for ModInt { fn add_assign(&mut self, rhs: ModInt) { *self = *self + rhs; } } impl std::ops::Sub for ModInt { type Output = ModInt; fn sub(self, rhs: ModInt) -> Self::Output { let mut d = self.0 + MOD - rhs.0; if d >= MOD { d -= MOD; } ModInt(d) } } impl std::ops::SubAssign for ModInt { fn sub_assign(&mut self, rhs: ModInt) { *self = *self - rhs; } } impl std::ops::Mul for ModInt { type Output = ModInt; fn mul(self, rhs: ModInt) -> Self::Output { ModInt((self.0 * rhs.0 % MOD)) } } impl std::ops::MulAssign for ModInt { fn mul_assign(&mut self, rhs: ModInt) { *self = *self * rhs; } } impl std::ops::Neg for ModInt { type Output = ModInt; fn neg(self) -> Self::Output { ModInt(if self.0 == 0 { 0 } else { MOD - self.0 }) } } impl std::fmt::Display for ModInt { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!(f, "{}", self.0) } } impl std::str::FromStr for ModInt { type Err = std::num::ParseIntError; fn from_str(s: &str) -> Result<Self, Self::Err> { let val = s.parse::<usize>()?; Ok(ModInt::new(val)) } } use std::collections::BTreeMap; fn main() { let cin = stdin(); let cin = cin.lock(); let mut sc = Scanner::new(cin); let n: usize = sc.input(); let mut arr: Vec<(i64, i64)> = (0..n).map(|_| (sc.input(), sc.input())).collect(); let mut map = BTreeMap::new(); for p in arr.iter_mut() { if p.0 == 0 || p.1 == 0 { if p.0 != 0 { p.0 = 1; } if p.1 != 0 { p.1 = 1; } } else { let g = gcd(p.0.abs(), p.1.abs()); if p.0 < 0 { p.0 = -p.0; p.1 = -p.1; } p.0 /= g; p.1 /= g; } *map.entry((p.0, p.1)).or_insert(0) += 1; } let mut ans = ModInt::one(); let zero = map.remove(&(0, 0)).map_or(ModInt::zero(), |c| ModInt(c)); while let Some((&key, &c)) = map.iter().next() { map.remove(&key); let p = if key.1 < 0 { (-key.1, key.0) } else { (key.1, -key.0) }; if let Some(d) = map.remove(&p) { ans *= ModInt(2).pow(c) + ModInt(2).pow(d) - ModInt::one(); } else { ans *= ModInt(2).pow(c); } } ans += zero - ModInt::one(); println!("{}", ans); }
//! Provides information about the initial status of the system. mod freestanding; mod multiboot; mod multiboot2; #[cfg(target_arch = "x86_64")] use arch::{self, vga_buffer, Architecture}; use memory::{Address, MemoryArea, PhysicalAddress, PAGE_SIZE}; /// Lists possiblities for boot sources. enum BootMethod { /// No known bootloader could be found. Unknown, /// The system was booted using multiboot. Multiboot, /// The system was booted using multiboot2. Multiboot2 } /// The memory area containing the initramfs. fn initramfs() -> MemoryArea<PhysicalAddress> { let area = get_initramfs_area(); // Align to the previous page. let initramfs_start = area.start_address().page_align_down(); // Round up the the next page boundary. let initramfs_length = area.length(); let initramfs_length = if initramfs_length > 0 { (initramfs_length - 1) / PAGE_SIZE * PAGE_SIZE + PAGE_SIZE } else { 0 }; MemoryArea::new(initramfs_start, initramfs_length) } /// Provides an iterator for a memory map. pub struct MemoryMapIterator { multiboot_iterator: multiboot::MemoryMapIterator, to_exclude: [MemoryArea<PhysicalAddress>; 2], current_entry: Option<MemoryArea<PhysicalAddress>>, exclude_index: usize } impl MemoryMapIterator { /// Creates a new memory map iterator. fn new() -> MemoryMapIterator { let kernel_area = arch::Current::get_kernel_area(); let initramfs_area = initramfs(); let to_exclude = if kernel_area.start_address() <= initramfs_area.start_address() { [kernel_area, initramfs_area] } else { [initramfs_area, kernel_area] }; let mut multiboot_iterator = multiboot::get_memory_map(); let current_entry = multiboot_iterator.next(); let exclude_index = 0; MemoryMapIterator { multiboot_iterator, to_exclude, current_entry, exclude_index } } } impl Iterator for MemoryMapIterator { type Item = MemoryArea<PhysicalAddress>; fn next(&mut self) -> Option<MemoryArea<PhysicalAddress>> { // NOTE: This assumes function makes a few assumptions to work properly: // - The to_exclude list must be ordered by the start addresses. // - The to_exclude entries must not overlap. // - The memory areas must not overlap. // - A to_exclude entry must lie completely within a memory area. let get_next_entry = |iterator: &mut MemoryMapIterator| match *get_boot_method() { BootMethod::Multiboot => iterator.multiboot_iterator.next(), _ => unimplemented!() }; loop { return if let Some(current_entry) = self.current_entry { if self.exclude_index >= self.to_exclude.len() { // If all the exclude areas were handled. self.current_entry = get_next_entry(self); Some(current_entry) } else { // Handle the exclude areas. if self.to_exclude[self.exclude_index].is_contained_in(current_entry) { // The area to exclude is contained in the current free entry. let (entry_before, entry_after) = { let exclude_area = &self.to_exclude[self.exclude_index]; ( MemoryArea::new( current_entry.start_address(), exclude_area.start_address() - current_entry.start_address() ), MemoryArea::new( exclude_area.end_address(), current_entry.end_address() - exclude_area.end_address() ) ) }; self.exclude_index += 1; if entry_after.end_address() == entry_after.start_address() { self.current_entry = get_next_entry(self); } else { self.current_entry = Some(entry_after); } if entry_before.end_address() == entry_before.start_address() { continue; } else { Some(entry_before) } } else { self.current_entry = get_next_entry(self); Some(current_entry) } } } else { None }; } } } /// The method that the system was booted with. // This will only be set once very early. After that it can be assumed to be // static. static mut BOOT_METHOD: BootMethod = BootMethod::Unknown; /// Initializes the boot module and all the data it provides. pub fn init(magic_number: u32, information_structure_address: usize) { assert_has_not_been_called!("Boot information should only be initialized once."); set_boot_method(magic_number); match *get_boot_method() { BootMethod::Multiboot2 => multiboot2::init(information_structure_address), BootMethod::Multiboot => multiboot::init(information_structure_address), _ => freestanding::init() }; } /// Identifies the boot method. fn set_boot_method(magic_number: u32) { unsafe { BOOT_METHOD = match magic_number { 0x36d76289 => BootMethod::Multiboot2, 0x2badb002 => BootMethod::Multiboot, _ => BootMethod::Unknown } } } /// Returns the method the system was booted with. fn get_boot_method() -> &'static BootMethod { unsafe { &BOOT_METHOD } } /// Returns information about the VGA buffer. #[cfg(target_arch = "x86_64")] pub fn get_vga_info() -> vga_buffer::Info { match *get_boot_method() { BootMethod::Multiboot2 => multiboot2::get_vga_info(), _ => freestanding::get_vga_info() } } /// Returns the name of the boot loader. pub fn get_bootloader_name() -> &'static str { match *get_boot_method() { BootMethod::Multiboot2 => multiboot2::get_bootloader_name(), BootMethod::Multiboot => multiboot::get_bootloader_name(), _ => "no boot loader" } } /// Returns the memory area of the initramfs. pub fn get_initramfs_area() -> MemoryArea<PhysicalAddress> { match *get_boot_method() { BootMethod::Multiboot => multiboot::get_initramfs_area(), _ => unimplemented!() } } /// Returns an iterator for the map of usable memory. pub fn get_memory_map() -> MemoryMapIterator { MemoryMapIterator::new() }
use crate::{config::Config, Server}; use anyhow::{anyhow, Context, Result}; use futures::{io::BufReader, AsyncBufReadExt, AsyncReadExt, StreamExt}; use log::{debug, error, info}; use serde::Serialize; use spa_server::re_export::{ error::{ErrorBadRequest, ErrorInternalServerError}, get, http::StatusCode, post, web, HttpMessage, HttpRequest, HttpResponse, }; use std::{ convert::{TryFrom, TryInto}, fs::{self, create_dir_all}, io::{ErrorKind, Write}, path::{Path, PathBuf}, process::{Command, Stdio}, sync::Arc, }; use tokio::{ sync::{Mutex, RwLock}, time, }; pub struct Git { http_backend: PathBuf, config: Arc<RwLock<Config>>, #[doc(hidden)] pub inited: Mutex<bool>, #[doc(hidden)] pub busy: Mutex<bool>, } struct GitCmd<'a> { cmd: &'static str, dir: Option<&'a Path>, } #[cfg(test)] mod test { use super::convert_args; #[test] fn test_convert_args() { let test_str = "commit -m \"change config.json to mirror\""; let result = vec!["commit", "-m", "\"change config.json to mirror\""]; assert_eq!(result, convert_args(test_str).unwrap()) } } fn convert_args(args: &str) -> anyhow::Result<Vec<String>> { let args: Vec<&str> = args.split_whitespace().collect(); let mut args_fixed = Vec::new(); let mut quote_args = String::new(); let mut in_quote = false; for index in 0..args.len() { if args[index].starts_with('"') { quote_args = args[index].to_string(); in_quote = true; continue; } if in_quote { quote_args.push(' '); quote_args.push_str(args[index]); if args[index].ends_with('"') { args_fixed.push(quote_args.clone()); in_quote = false; } } else { args_fixed.push(args[index].to_string()); } } if in_quote { return Err(anyhow!("found single quote args, abort")); } Ok(args_fixed) } impl<'a> GitCmd<'a> { fn dir<'b: 'a>(dir: &'b Path) -> Self { GitCmd { cmd: "git", dir: Some(dir), } } #[allow(dead_code)] fn new() -> Self { GitCmd { cmd: "git", dir: None, } } #[allow(dead_code)] fn cmd(cmd: &'static str) -> Self { GitCmd { cmd, dir: None } } fn run(&self, args: impl AsRef<str>) -> anyhow::Result<String> { debug!("command input : git {}", args.as_ref()); let mut cmd = Command::new(self.cmd); if let Some(dir) = self.dir { cmd.current_dir(dir); } let args = convert_args(args.as_ref())?; debug!("args: {:?}", args); let output = cmd .args(args) .stdout(Stdio::piped()) .stderr(Stdio::piped()) .spawn()? .wait_with_output() .context("run git command failed")?; if output.status.success() { let o = String::from_utf8_lossy(&*output.stdout); let o = o.trim(); debug!("command output: {}", o); Ok(o.to_string()) } else { Err( anyhow!("{}", String::from_utf8_lossy(&*output.stderr).to_string()) .context("git command error"), ) } } } #[derive(Serialize)] struct IndexConfig { dl: String, api: String, } impl Git { pub async fn new(cfg: Arc<RwLock<Config>>) -> anyhow::Result<Arc<Self>> { let http_backend = find_git_http_backend()?; let git = Arc::new(Git { http_backend, config: cfg.clone(), inited: Mutex::new(false), busy: Mutex::new(false), }); let git_clone = git.clone(); let _ = tokio::spawn(async move { let mut interval = cfg.read().await.registry.interval; debug!("schedule start running, next: {}s", interval.as_secs()); loop { time::sleep(interval).await; interval = cfg.read().await.registry.interval; if *git_clone.inited.lock().await { info!( "sync upstream by schedule now, next: {}s", interval.as_secs() ); if let Err(e) = git_clone.sync_upstream().await { error!("sync upstream by schedule failed: {:?}", e); } else { if let Err(e) = git_clone.sync_upstream().await { error!("sync index by schedule failed: {:?}", e); } } } } }); Ok(git) } pub async fn initialize(&self) -> Result<()> { self.init_repo().await?; self.sync_upstream().await?; self.modify_config_json().await?; self.sync_index().await?; Ok(()) } pub async fn sync_index(&self) -> Result<()> { GitCmd::dir(&self.config.read().await.git.working_path) .run("push origin master") .context("sync with index failed")?; Ok(()) } pub async fn commit(&self, message: impl AsRef<str>) -> anyhow::Result<()> { let working_path = &self.config.read().await.git.working_path; GitCmd::dir(working_path).run("add .")?; GitCmd::dir(working_path).run(format!("commit -m \"{}\"", message.as_ref()))?; Ok(()) } async fn modify_config_json(&self) -> Result<()> { let cfg = self.config.read().await; let config_json_path = cfg.git.working_path.join("config.json"); let content = fs::read_to_string(config_json_path).context("can not read config.json content")?; let config_json = serde_json::to_string_pretty(&IndexConfig { dl: format!("{}/api/v1/crates", cfg.registry.address), api: cfg.registry.address.clone(), }) .context("generate config.json failed")?; if content != config_json { fs::write(cfg.git.working_path.join("config.json"), config_json) .context("write config.json failed")?; GitCmd::dir(&cfg.git.working_path) .run("add .") .context("modify config json, run git add . failed")?; GitCmd::dir(&cfg.git.working_path) .run("commit -m \"change config.json to mirror\"") .context("modify config json, run git commit failed")?; } Ok(()) } async fn sync_upstream(&self) -> anyhow::Result<()> { // here --progress flag must be set GitCmd::dir(&self.config.read().await.git.working_path) .run("pull --progress upstream master") .context("sync with upstream failed")?; Ok(()) } async fn init_repo(&self) -> Result<()> { let cfg = self.config.read().await; let mut bare_repo_inited = false; let mut work_tree_inited = false; if !Path::new(&cfg.git.index_path).exists() { create_dir_all(&cfg.git.index_path)?; } if let Ok(r) = GitCmd::dir(&cfg.git.index_path).run("rev-parse --is-bare-repository") { if r == "true" { bare_repo_inited = true; debug!("{:?} is a bare repo already", cfg.git.index_path); } } if !bare_repo_inited { info!("git init --bare for {:?}", cfg.git.index_path); if let Err(e) = GitCmd::dir(&cfg.git.index_path).run("init --bare") { return Err(e.context("git init --bare failed")); } } if !Path::new(&cfg.git.working_path).exists() { create_dir_all(&cfg.git.working_path)?; } if let Ok(r) = GitCmd::dir(&cfg.git.working_path).run("rev-parse --is-inside-work-tree") { if r == "true" { debug!("{:?} is a work tree already", cfg.git.working_path); work_tree_inited = true; } } if !work_tree_inited { info!("git clone for {:?}", cfg.git.working_path); if let Err(e) = GitCmd::dir(&cfg.git.working_path.parent().ok_or(anyhow!( "work tree {:?} has no parent", cfg.git.working_path ))?) .run(format!( "clone {} {}", cfg.git.index_path.to_string_lossy().to_string(), cfg.git .working_path .file_name() .ok_or(anyhow!("work tree {:?} has no filename"))? .to_string_lossy() .to_string() )) { return Err(e.context("git clone failed")); } info!("git remote add upstream {}", cfg.git.upstream_url); if let Err(e) = GitCmd::dir(&cfg.git.working_path) .run(format!("remote add upstream {}", cfg.git.upstream_url)) { return Err(e.context("git add remote failed")); } } Ok(()) } } fn find_git_http_backend() -> Result<PathBuf> { if let Err(e) = Command::new("git").stdout(Stdio::null()).spawn() { if ErrorKind::NotFound == e.kind() { panic!("git not found, you need install git first"); } } let output = Command::new("which").arg("git").output()?; if output.status.success() { let git_path = std::str::from_utf8(&*output.stdout)?.trim(); let backend_path1 = Path::new(&git_path.replace("bin/git", "lib")).join("git-core/git-http-backend"); let backend_path2 = Path::new(&git_path.replace("bin/git", "libexec")).join("git-core/git-http-backend"); let http_backend_path = if backend_path1.exists() { backend_path1 } else if backend_path2.exists() { backend_path2 } else { panic!("can not found git-http-backend, upgrade your git"); }; info!("git-http-backend path: {:?}", http_backend_path); return Ok(http_backend_path); } panic!("which command failed! on windows? not implement yet"); } #[post("/crates.io-index/.*")] pub(crate) async fn http_backend_post( req: HttpRequest, body: web::Payload, data: web::Data<Server>, ) -> spa_server::re_export::Result<HttpResponse> { http_backend(req, Some(body), data).await } #[get("/crates.io-index/.*")] pub(crate) async fn http_backend_get( req: HttpRequest, data: web::Data<Server>, ) -> spa_server::re_export::Result<HttpResponse> { http_backend(req, None, data).await } async fn http_backend( req: HttpRequest, body: Option<web::Payload>, data: web::Data<Server>, ) -> spa_server::re_export::Result<HttpResponse> { if !*data.git.inited.lock().await { return Err(ErrorBadRequest("System not initialized")); } debug!("git req:{:?}", req); let request_method = req.method().to_string(); let mut path_info = req .uri() .to_string() .replace("/registry/crates.io-index", ""); if let Some(i) = path_info.find('?') { path_info = path_info.split_at(i).0.to_string(); } let mut child = Command::new(&data.git.http_backend) .env("REQUEST_METHOD", request_method) .env("GIT_PROJECT_ROOT", &data.config.read().await.git.index_path) .env("PATH_INFO", path_info) .env("REMOTE_USER", "raven") .env("REMOTE_ADDR", "dahua") .env("QUERY_STRING", req.query_string()) .env("CONTENT_TYPE", req.content_type()) .env("GIT_HTTP_EXPORT_ALL", "") .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::piped()) .spawn()?; if let Some(mut body) = body { let child_stdin = child .stdin .as_mut() .ok_or(ErrorInternalServerError("can not open cgi stdin"))?; while let Some(b) = body.next().await { let b = b?; child_stdin.write_all(&*b)?; } drop(child_stdin); } let o = child.wait_with_output()?; let err = String::from_utf8_lossy(&*o.stderr); if !err.is_empty() { error!("cgi error: {}", err.trim()); } let mut reader = BufReader::new(&*o.stdout); let mut status_code = 200u16; let mut headers = Vec::new(); let mut line = String::new(); while reader.read_line(&mut line).await? != 0 { let l = line.trim(); if l.is_empty() { // next is body part break; } if l.starts_with("Status:") { status_code = l.split(':').collect::<Vec<&str>>()[1] .split(' ') .collect::<Vec<&str>>()[1] .parse() .map_err(|e| ErrorInternalServerError(format!("parse cgi status code: {:?}", e)))?; continue; } if let Some(i) = l.find(':') { let (k, v) = l.split_at(i + 1); headers.push((k.trim_end_matches(':').to_string(), v.trim().to_string())); } else { return Err(ErrorInternalServerError(format!( "unknown part of cgi response: {:?}", l ))); } line.clear(); } let mut body = Vec::new(); reader.read_to_end(&mut body).await?; debug!("cgi return:\n{:?}\nbody size:{}", headers, body.len()); let cgi_resp = CgiResponse { code: status_code, headers: headers .into_iter() .map(|(a, b)| (a.to_string(), b.to_string())) .collect(), body, }; Ok(cgi_resp.try_into()?) } struct CgiResponse { code: u16, headers: Vec<(String, String)>, body: Vec<u8>, } impl TryFrom<CgiResponse> for HttpResponse { type Error = spa_server::re_export::Error; fn try_from(cr: CgiResponse) -> Result<Self, Self::Error> { let mut builder = HttpResponse::build(StatusCode::from_u16(cr.code).map_err(|e| { ErrorInternalServerError(format!("invalid cgi status code: {:?}", e)) })?); for header in cr.headers { builder.append_header(header); } if !cr.body.is_empty() { Ok(builder.body(cr.body)) } else { Ok(builder.finish()) } } }
fn main() { println!("Hello, world!"); let mut user = User { username: String::from("user_i"), email: String::from("test@gmail.com"), sign_in_count: 2, active: true, }; user.email = String::from("test"); println!("email: {}",build_user(String::from("test@gmail.com"),String::from("user_i")).email); let user2 = User{ email:String::from("email2"), username: String::from("user2"), // sign_in_count:user.sign_in_count, // active:user.active, ..user }; //tuple struct struct Color(i32,i32,i32); struct Point(i32,i32,i32); let black = Color(0,0,0); let origin = Point(0,0,0); let Color(a,b,c) = black; println!("{}, {}, {}",a,b,c); } fn build_user(email:String, username:String) -> User{ User{ username, email, sign_in_count: 2, active: true, } } struct User { username: String, email: String, sign_in_count: u64, active: bool, }
#[macro_use] extern crate serde_derive; mod config; use native_tls::{TlsConnector, TlsStream}; use std::io::Write; use std::net::TcpStream; use std::fs::File; use std::thread; use std::time::Duration; use mqtt::{Encodable, Decodable}; use mqtt::packet::*; use mqtt::{TopicName}; use mqtt::control::variable_header::ConnectReturnCode; use clap::{Arg, App}; use config::read_config; fn connect(broker: String, username: String, password: String, client_id: String, verify_name: String) -> TlsStream<TcpStream> { let connector = TlsConnector::builder().build().unwrap(); let stream = TcpStream::connect(&broker).unwrap(); let mut stream = connector.connect(&verify_name, stream).unwrap(); let mut conn = ConnectPacket::new("MQTT", &client_id); conn.set_clean_session(true); conn.set_user_name(Some(username).to_owned()); conn.set_password(Some(password.to_owned())); conn.set_client_identifier(client_id); let mut buf = Vec::new(); conn.encode(&mut buf).unwrap(); stream.write_all(&buf[..]).unwrap(); let connack = ConnackPacket::decode(&mut stream).unwrap(); if connack.connect_return_code() != ConnectReturnCode::ConnectionAccepted { panic!("Failed to connect to server, return code {:?}", connack.connect_return_code()); } return stream; } fn publish(stream: &mut TlsStream<TcpStream>, msg: String, topic: TopicName) { let packet = PublishPacket::new(topic, QoSWithPacketIdentifier::Level1(10), msg); let mut buf = Vec::new(); packet.encode(&mut buf).unwrap(); stream.write_all(&buf).unwrap(); } fn main() { let matches = App::new("MQTT publisher") .version("0.2.0") .author("Claus Matzinger. <claus.matzinger+kb@gmail.com>") .about("Sends data to an MQTT broker") .arg(Arg::with_name("config") .short("c") .long("config") .help("Sets a custom config file [default: config.toml]") .value_name("config.toml") .takes_value(true)) .get_matches(); let config_filename = matches.value_of("config").unwrap_or("config.toml"); let mut f = File::open(config_filename) .expect(&format!("Can't open configuration file: {}", config_filename)); let settings = read_config(&mut f).expect("Can't read configuration file."); println!("Connecting to mqtts://{}", settings.mqtt.broker_address); let topic_name = TopicName::new(settings.mqtt.topic.clone()).unwrap(); let mut stream = connect(settings.mqtt.broker_address, settings.mqtt.username, settings.mqtt.password, settings.mqtt.client_id, settings.mqtt.broker); let mut i = 0; loop { i += 1; let msg = format!("{}", i); println!("Sending message '{}' to topic: '{}'", msg, settings.mqtt.topic); publish(&mut stream, msg, topic_name.clone()); thread::sleep(Duration::from_millis(3000)); } }
use once_cell::sync::OnceCell; use std::io::{Result, Write}; use std::sync::{Mutex, MutexGuard, PoisonError}; use termcolor::{Color, ColorChoice, ColorSpec, StandardStream as Stream, WriteColor}; static TERM: OnceCell<Mutex<Term>> = OnceCell::new(); pub fn lock() -> MutexGuard<'static, Term> { TERM.get_or_init(|| Mutex::new(Term::new())) .lock() .unwrap_or_else(PoisonError::into_inner) } pub fn bold() { lock().set_color(ColorSpec::new().set_bold(true)); } pub fn color(color: Color) { lock().set_color(ColorSpec::new().set_fg(Some(color))); } pub fn bold_color(color: Color) { lock().set_color(ColorSpec::new().set_bold(true).set_fg(Some(color))); } pub fn reset() { lock().reset(); } #[deny(unused_macros)] macro_rules! print { ($($args:tt)*) => {{ use std::io::Write; let _ = std::write!($crate::term::lock(), $($args)*); }}; } #[deny(unused_macros)] macro_rules! println { ($($args:tt)*) => {{ use std::io::Write; let _ = std::writeln!($crate::term::lock(), $($args)*); }}; } pub struct Term { spec: ColorSpec, stream: Stream, start_of_line: bool, } impl Term { fn new() -> Self { Term { spec: ColorSpec::new(), stream: Stream::stderr(ColorChoice::Auto), start_of_line: true, } } fn set_color(&mut self, spec: &ColorSpec) { if self.spec != *spec { self.spec = spec.clone(); self.start_of_line = true; } } fn reset(&mut self) { self.spec = ColorSpec::new(); let _ = self.stream.reset(); } } impl Write for Term { // Color one line at a time because Travis does not preserve color setting // across output lines. fn write(&mut self, mut buf: &[u8]) -> Result<usize> { if self.spec.is_none() { return self.stream.write(buf); } let len = buf.len(); while !buf.is_empty() { if self.start_of_line { let _ = self.stream.set_color(&self.spec); } match buf.iter().position(|byte| *byte == b'\n') { Some(line_len) => { self.stream.write_all(&buf[..line_len + 1])?; self.start_of_line = true; buf = &buf[line_len + 1..]; } None => { self.stream.write_all(buf)?; self.start_of_line = false; break; } } } Ok(len) } fn flush(&mut self) -> Result<()> { self.stream.flush() } }
pub const ITIMER_REAL: i32 = 0; pub const ITIMER_VIRTUAL: i32 = 1; pub const ITIMER_PROF: i32 = 2;
use std::env; use std::fs; trait Validator<T> { fn new(line: &str) -> T; fn valid(&self) -> bool; } struct CharacterCountValidator { character: char, min: u32, max: u32, password: String, } impl Validator<CharacterCountValidator> for CharacterCountValidator { fn new(line: &str) -> CharacterCountValidator { let line_split = line.split(": ").collect::<Vec<&str>>(); let password = line_split.last().expect("Password not found on line!").to_string(); let char_split = line_split.first().expect("Line could not be parsed!").split(" ").collect::<Vec<&str>>(); let character = char_split.last().expect("Character could not be found in the line!").chars().nth(0).expect("Character was empty!"); let limits_split = char_split.first().expect("Character limits could not be found in line!").split("-").collect::<Vec<&str>>(); let min = limits_split.first().expect("Minimal limit could not be split from input line!").to_string().parse().unwrap(); let max = limits_split.last().expect("Maximum limit could not be split from input line!").to_string().parse().unwrap(); CharacterCountValidator { character, min, max, password } } fn valid(&self) -> bool { let chars = self.password.chars().filter(|&el|el == self.character); let count: u32 = chars.count() as u32; return count >= self.min && count <= self.max; } } struct CharacterPositionValidator { character: char, positions: [usize; 2], password: String, } impl CharacterPositionValidator { fn position_valid(&self, position: usize) -> Option<bool> { self.password.chars() .nth(position) .map(|char_on_position| char_on_position == self.character) } } impl Validator<CharacterPositionValidator> for CharacterPositionValidator { fn new(line: &str) -> CharacterPositionValidator { let line_split = line.split(": ").collect::<Vec<&str>>(); let password = line_split.last().expect("Password not found on line!").to_string(); let char_split = line_split.first().expect("Line could not be parsed!").split(" ").collect::<Vec<&str>>(); let character = char_split.last().expect("Character could not be found in the line!").chars().nth(0).expect("Character was empty!"); let positions_split = char_split.first().expect("Character positions could not be found in line!").split("-").collect::<Vec<&str>>(); let positions = [ positions_split.first().expect("First position could not be found in line").parse::<usize>().expect("Could not parse first position!"), positions_split.last().expect("First position could not be found in line").parse::<usize>().expect("Could not parse second position!"), ]; CharacterPositionValidator { character, password, positions } } fn valid(&self) -> bool { let mut results = [false, false]; for (index, &position) in self.positions.iter().enumerate() { match self.position_valid(position) { Some(is_valid) => results[index] = is_valid, None => return false, } } results.len() == self.positions.len() && results.iter().filter(|result|**result).count() == 1 } } struct Challenge<T> { challenges: Vec<T> } trait ChallengeAnswerer<T> { fn new<'a, I>(lines: I) -> Challenge<T> where I: IntoIterator<Item = &'a str>; fn answer(&self) -> usize; } impl ChallengeAnswerer<CharacterPositionValidator> for Challenge<CharacterPositionValidator> { fn new<'a, I>(lines: I) -> Challenge<CharacterPositionValidator> where I: IntoIterator<Item = &'a str>, { Challenge::<CharacterPositionValidator> { challenges: lines.into_iter().map(|line|CharacterPositionValidator::new(line)).collect() } } fn answer(&self) -> usize { self.challenges.iter().filter(|validator| validator.valid()).count() } } impl ChallengeAnswerer<CharacterCountValidator> for Challenge<CharacterCountValidator> { fn new<'a, I>(lines: I) -> Challenge<CharacterCountValidator> where I: IntoIterator<Item = &'a str>, { Challenge::<CharacterCountValidator> { challenges: lines.into_iter().map(|line|CharacterCountValidator::new(line)).collect() } } fn answer(&self) -> usize { self.challenges.iter().filter(|validator| validator.valid()).count() } } struct Input { filename: String, } impl Input { fn new(filename: String) -> Input { Input { filename } } fn parse_as_character_position_challenge(&self) -> Challenge<CharacterPositionValidator> { let contents = self.file_contents(); let lines: Vec<&str> = contents.lines().collect(); Challenge::<CharacterPositionValidator>::new(lines) } fn parse_as_character_count_challenge(&self) -> Challenge<CharacterCountValidator> { let contents = self.file_contents(); let lines: Vec<&str> = contents.lines().collect(); Challenge::<CharacterCountValidator>::new(lines) } fn file_contents(&self) -> String { println!("Loading contents from file: {}", self.filename); return fs::read_to_string(&self.filename).expect("Something went wrong loading contents from file"); } } fn main() { let input = Input::new(env::args().nth(1).unwrap_or("input.txt".to_string())); let challenge_one = input.parse_as_character_position_challenge(); let challenge_two = input.parse_as_character_count_challenge(); println!("Answer one: {}", challenge_one.answer()); println!("Answer two: {}", challenge_two.answer()); }
use amethyst::{ core::transform::Transform, ecs::prelude::{Entities, Join, ReadStorage, System}, }; pub struct BelowZero; impl<'s> System<'s> for BelowZero { type SystemData = (Entities<'s>, ReadStorage<'s, Transform>); fn run(&mut self, (entities, transforms): Self::SystemData) { for (e, transform) in (&entities, &transforms).join() { let ball_pos = transform.translation(); if ball_pos.y <= -10.0 { entities.delete(e).unwrap(); } } } }
use std::collections::HashSet; use std::io::Cursor; use meilidb_core::DocumentId; use meilidb_schema::SchemaAttr; use rmp_serde::decode::{Deserializer as RmpDeserializer, ReadReader}; use rmp_serde::decode::{Error as RmpError}; use serde::{de, forward_to_deserialize_any}; use crate::database::Index; pub struct Deserializer<'a> { pub document_id: DocumentId, pub index: &'a Index, pub fields: Option<&'a HashSet<SchemaAttr>>, } impl<'de, 'a, 'b> de::Deserializer<'de> for &'b mut Deserializer<'a> { type Error = RmpError; fn deserialize_any<V>(self, visitor: V) -> Result<V::Value, Self::Error> where V: de::Visitor<'de> { self.deserialize_map(visitor) } forward_to_deserialize_any! { bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string bytes byte_buf option unit unit_struct newtype_struct seq tuple tuple_struct struct enum identifier ignored_any } fn deserialize_map<V>(self, visitor: V) -> Result<V::Value, Self::Error> where V: de::Visitor<'de> { let schema = &self.index.lease_inner().schema; let documents = &self.index.lease_inner().raw.documents; let document_attributes = documents.document_fields(self.document_id); let document_attributes = document_attributes.filter_map(|result| { match result { Ok(value) => Some(value), Err(e) => { // TODO: must log the error // error!("sled iter error; {}", e); None }, } }); let iter = document_attributes.filter_map(|(attr, value)| { if self.fields.map_or(true, |f| f.contains(&attr)) { let attribute_name = schema.attribute_name(attr); Some((attribute_name, Value::new(value))) } else { None } }); let map_deserializer = de::value::MapDeserializer::new(iter); visitor.visit_map(map_deserializer) } } struct Value<A>(RmpDeserializer<ReadReader<Cursor<A>>>) where A: AsRef<[u8]>; impl<A> Value<A> where A: AsRef<[u8]> { fn new(value: A) -> Value<A> { Value(RmpDeserializer::new(Cursor::new(value))) } } impl<'de, A> de::IntoDeserializer<'de, RmpError> for Value<A> where A: AsRef<[u8]>, { type Deserializer = Self; fn into_deserializer(self) -> Self::Deserializer { self } } impl<'de, 'a, A> de::Deserializer<'de> for Value<A> where A: AsRef<[u8]>, { type Error = RmpError; fn deserialize_any<V>(mut self, visitor: V) -> Result<V::Value, Self::Error> where V: de::Visitor<'de> { self.0.deserialize_any(visitor) } forward_to_deserialize_any! { bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string bytes byte_buf option unit unit_struct newtype_struct seq tuple tuple_struct map struct enum identifier ignored_any } }
use std::mem::size_of; use cfg_if::cfg_if; use super::buffers; pub trait Encoder { fn logpack_encode(&self, buf: &mut buffers::BufEncoder) -> Result<(), (usize, usize)>; fn logpack_sizer(&self) -> usize; } macro_rules! simple { ($a:tt) => { impl Encoder for $a { #[inline(always)] fn logpack_encode(&self, buf: &mut buffers::BufEncoder) -> Result<(), (usize, usize)> { buf.put(self) } #[inline(always)] fn logpack_sizer(&self) -> usize { size_of::<Self>() } } } } simple!(usize); simple!(u64); simple!(u32); simple!(u16); simple!(u8); simple!(isize); simple!(i64); simple!(i32); simple!(i16); simple!(i8); simple!(bool); impl Encoder for () { #[inline(always)] fn logpack_encode(&self, _buf: &mut buffers::BufEncoder) -> Result<(), (usize, usize)> { Ok(()) } #[inline(always)] fn logpack_sizer(&self) -> usize { 0 } } pub fn encoded_string_len(value: &str) -> usize { let bytes = value.as_bytes(); let size = bytes.len(); if size < 0x40 { return 1 + size }; if size < 0x4000 { return 2 + size }; if size < 0x4000_0000 { return 4 + size }; if size < 0x4000_0000_0000_0000 { return 8 + size }; panic!("string length {}", size); } pub fn encode_stored_string(value: &str, buf: &mut buffers::BufEncoder) -> Result<(), (usize, usize)> { let bytes = value.as_bytes(); let size = bytes.len(); // TODO: fix little-endian assumption if size < 0x40 { (0u8 | ((size as u8) << 2) ).logpack_encode(buf)?; } else if size < 0x4000 { (1u16 | ((size as u16) << 2) ).logpack_encode(buf)?; } else if size < 0x4000_0000 { (2u32 | ((size as u32) << 2) ).logpack_encode(buf)?; } else if size < 0x4000_0000_0000_0000 { (3u64 | ((size as u64) << 2) ).logpack_encode(buf)?; } else { panic!("string length {}", size); } unsafe { let space = buf.reserve_space_by_size(size)?; ::std::ptr::copy_nonoverlapping(bytes.as_ptr(), space, size); } Ok(()) } impl<'a> Encoder for &'a str { #[inline(always)] fn logpack_encode(&self, buf: &mut buffers::BufEncoder) -> Result<(), (usize, usize)> { encode_stored_string(self, buf) } #[inline(always)] fn logpack_sizer(&self) -> usize { encoded_string_len(self) } } impl Encoder for String { #[inline(always)] fn logpack_encode(&self, buf: &mut buffers::BufEncoder) -> Result<(), (usize, usize)> { encode_stored_string(self.as_str(), buf) } #[inline(always)] fn logpack_sizer(&self) -> usize { encoded_string_len(self.as_str()) } } macro_rules! array_impls { ($($len:tt)+) => { $( impl<T> Encoder for [T; $len] where T: Encoder, { fn logpack_encode(&self, buf: &mut buffers::BufEncoder) -> Result<(), (usize, usize)> { for i in 0..$len { self[i].logpack_encode(buf)? } Ok(()) } fn logpack_sizer(&self) -> usize { let mut size = 0; for i in 0..$len { size += self[i].logpack_sizer(); } size } } )+ } } array_impls!(00 01 02 03 04 05 06 07 08 09 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32); macro_rules! tuple { ($(($type:ident, $num:tt)),*) => { impl<$($type),*> Encoder for ($($type),*) where $($type : Encoder),* { fn logpack_encode(&self, buf: &mut buffers::BufEncoder) -> Result<(), (usize, usize)> { $( $type::logpack_encode(&self.$num, buf)?; )* Ok(()) } fn logpack_sizer(&self) -> usize { let mut size = 0; $( size += $type::logpack_sizer(&self.$num); )* size } } } } tuple!((A, 0), (B, 1)); tuple!((A, 0), (B, 1), (C, 2)); tuple!((A, 0), (B, 1), (C, 2), (D, 3)); tuple!((A, 0), (B, 1), (C, 2), (D, 3), (E, 4)); tuple!((A, 0), (B, 1), (C, 2), (D, 3), (E, 4), (F, 5)); tuple!((A, 0), (B, 1), (C, 2), (D, 3), (E, 4), (F, 5), (G, 6)); tuple!((A, 0), (B, 1), (C, 2), (D, 3), (E, 4), (F, 5), (G, 6), (H, 7)); tuple!((A, 0), (B, 1), (C, 2), (D, 3), (E, 4), (F, 5), (G, 6), (H, 7), (I, 8)); tuple!((A, 0), (B, 1), (C, 2), (D, 3), (E, 4), (F, 5), (G, 6), (H, 7), (I, 8), (J, 9)); tuple!((A, 0), (B, 1), (C, 2), (D, 3), (E, 4), (F, 5), (G, 6), (H, 7), (I, 8), (J, 9), (K, 10)); tuple!((A, 0), (B, 1), (C, 2), (D, 3), (E, 4), (F, 5), (G, 6), (H, 7), (I, 8), (J, 9), (K, 10), (L, 11)); tuple!((A, 0), (B, 1), (C, 2), (D, 3), (E, 4), (F, 5), (G, 6), (H, 7), (I, 8), (J, 9), (K, 10), (L, 11), (M, 12)); tuple!((A, 0), (B, 1), (C, 2), (D, 3), (E, 4), (F, 5), (G, 6), (H, 7), (I, 8), (J, 9), (K, 10), (L, 11), (M, 12), (N, 13)); tuple!((A, 0), (B, 1), (C, 2), (D, 3), (E, 4), (F, 5), (G, 6), (H, 7), (I, 8), (J, 9), (K, 10), (L, 11), (M, 12), (N, 13), (O, 14)); tuple!((A, 0), (B, 1), (C, 2), (D, 3), (E, 4), (F, 5), (G, 6), (H, 7), (I, 8), (J, 9), (K, 10), (L, 11), (M, 12), (N, 13), (O, 14), (P, 15)); impl<T> Encoder for [T] where T: Encoder { #[inline(always)] fn logpack_encode(&self, buf: &mut buffers::BufEncoder) -> Result<(), (usize, usize)> { let size : u64 = self.len() as u64; size.logpack_encode(buf)?; for i in 0..size { self[i as usize].logpack_encode(buf)?; } Ok(()) } #[inline(always)] fn logpack_sizer(&self) -> usize { let mut size = 0; for i in 0..size { size += self[i as usize].logpack_sizer(); } size } } impl<T> Encoder for Box<T> where T: Encoder { #[inline(always)] fn logpack_encode(&self, buf: &mut buffers::BufEncoder) -> Result<(), (usize, usize)> { (**self).logpack_encode(buf) } #[inline(always)] fn logpack_sizer(&self) -> usize { (**self).logpack_sizer() } } impl<T> Encoder for *mut T { #[inline(always)] fn logpack_encode(&self, buf: &mut buffers::BufEncoder) -> Result<(), (usize, usize)> { (*self as u64).logpack_encode(buf) } #[inline(always)] fn logpack_sizer(&self) -> usize { (*self as u64).logpack_sizer() } } impl<T> Encoder for *const T { #[inline(always)] fn logpack_encode(&self, buf: &mut buffers::BufEncoder) -> Result<(), (usize, usize)> { (*self as u64).logpack_encode(buf) } #[inline(always)] fn logpack_sizer(&self) -> usize { (*self as u64).logpack_sizer() } } impl<T> Encoder for Option<T> where T: Encoder { #[inline(always)] fn logpack_encode(&self, buf: &mut buffers::BufEncoder) -> Result<(), (usize, usize)> { match self { &None => { (0u8).logpack_encode(buf) } &Some(ref val) => { (1u8).logpack_encode(buf)?; val.logpack_encode(buf) } } } #[inline(always)] fn logpack_sizer(&self) -> usize { match self { &None => 1, &Some(ref val) => { 1 + val.logpack_sizer() } } } } impl<T, E> Encoder for Result<T, E> where T: Encoder, E: Encoder { #[inline(always)] fn logpack_encode(&self, buf: &mut buffers::BufEncoder) -> Result<(), (usize, usize)> { match self { &Ok(ref val) => { (0u8).logpack_encode(buf)?; val.logpack_encode(buf) } &Err(ref val) => { (1u8).logpack_encode(buf)?; val.logpack_encode(buf) } } } #[inline(always)] fn logpack_sizer(&self) -> usize { match self { &Ok(ref val) => { 1 + val.logpack_sizer() } &Err(ref val) => { 1 + val.logpack_sizer() } } } } ////////////////////////////////////////////////////////////////////// use std::time::Duration; impl Encoder for Duration { #[inline(always)] fn logpack_encode(&self, buf: &mut buffers::BufEncoder) -> Result<(), (usize, usize)> { let secs: u64 = self.as_secs(); secs.logpack_encode(buf)?; let nanos: u32 = self.subsec_nanos(); nanos.logpack_encode(buf)?; Ok(()) } #[inline(always)] fn logpack_sizer(&self) -> usize { let secs: u64 = 0; let nanos: u32 = 0; secs.logpack_sizer() + nanos.logpack_sizer() } } cfg_if! { if #[cfg(unix)] { use std::time::Instant; #[cfg(not(any(target_os = "macos", target_os = "ios")))] impl Encoder for Instant { #[inline(always)] fn logpack_encode(&self, buf: &mut buffers::BufEncoder) -> Result<(), (usize, usize)> { use libc::timespec; let timespec = unsafe { ::std::mem::transmute::<_, &timespec>(&self) }; let secs: u64 = timespec.tv_sec as u64; secs.logpack_encode(buf)?; let nanos: u32 = timespec.tv_nsec as u32; nanos.logpack_encode(buf)?; Ok(()) } #[inline(always)] fn logpack_sizer(&self) -> usize { let secs: u64 = 0; let nanos: u32 = 0; secs.logpack_sizer() + nanos.logpack_sizer() } } } }
#![doc = "generated by AutoRust 0.1.0"] #![allow(unused_mut)] #![allow(unused_variables)] #![allow(unused_imports)] use super::{models, API_VERSION}; #[non_exhaustive] #[derive(Debug, thiserror :: Error)] #[allow(non_camel_case_types)] pub enum Error { #[error(transparent)] Namespaces_ListAuthorizationRules(#[from] namespaces::list_authorization_rules::Error), #[error(transparent)] Namespaces_GetAuthorizationRule(#[from] namespaces::get_authorization_rule::Error), #[error(transparent)] Namespaces_CreateOrUpdateAuthorizationRule(#[from] namespaces::create_or_update_authorization_rule::Error), #[error(transparent)] Namespaces_DeleteAuthorizationRule(#[from] namespaces::delete_authorization_rule::Error), #[error(transparent)] Namespaces_ListKeys(#[from] namespaces::list_keys::Error), #[error(transparent)] Namespaces_RegenerateKeys(#[from] namespaces::regenerate_keys::Error), #[error(transparent)] Queues_ListAuthorizationRules(#[from] queues::list_authorization_rules::Error), #[error(transparent)] Queues_GetAuthorizationRule(#[from] queues::get_authorization_rule::Error), #[error(transparent)] Queues_CreateOrUpdateAuthorizationRule(#[from] queues::create_or_update_authorization_rule::Error), #[error(transparent)] Queues_DeleteAuthorizationRule(#[from] queues::delete_authorization_rule::Error), #[error(transparent)] Queues_ListKeys(#[from] queues::list_keys::Error), #[error(transparent)] Queues_RegenerateKeys(#[from] queues::regenerate_keys::Error), #[error(transparent)] Topics_ListAuthorizationRules(#[from] topics::list_authorization_rules::Error), #[error(transparent)] Topics_GetAuthorizationRule(#[from] topics::get_authorization_rule::Error), #[error(transparent)] Topics_CreateOrUpdateAuthorizationRule(#[from] topics::create_or_update_authorization_rule::Error), #[error(transparent)] Topics_DeleteAuthorizationRule(#[from] topics::delete_authorization_rule::Error), #[error(transparent)] Topics_ListKeys(#[from] topics::list_keys::Error), #[error(transparent)] Topics_RegenerateKeys(#[from] topics::regenerate_keys::Error), #[error(transparent)] Namespaces_CheckNameAvailability(#[from] namespaces::check_name_availability::Error), #[error(transparent)] DisasterRecoveryConfigs_CheckNameAvailability(#[from] disaster_recovery_configs::check_name_availability::Error), #[error(transparent)] DisasterRecoveryConfigs_List(#[from] disaster_recovery_configs::list::Error), #[error(transparent)] DisasterRecoveryConfigs_Get(#[from] disaster_recovery_configs::get::Error), #[error(transparent)] DisasterRecoveryConfigs_CreateOrUpdate(#[from] disaster_recovery_configs::create_or_update::Error), #[error(transparent)] DisasterRecoveryConfigs_Delete(#[from] disaster_recovery_configs::delete::Error), #[error(transparent)] DisasterRecoveryConfigs_BreakPairing(#[from] disaster_recovery_configs::break_pairing::Error), #[error(transparent)] DisasterRecoveryConfigs_FailOver(#[from] disaster_recovery_configs::fail_over::Error), #[error(transparent)] DisasterRecoveryConfigs_ListAuthorizationRules(#[from] disaster_recovery_configs::list_authorization_rules::Error), #[error(transparent)] DisasterRecoveryConfigs_GetAuthorizationRule(#[from] disaster_recovery_configs::get_authorization_rule::Error), #[error(transparent)] DisasterRecoveryConfigs_ListKeys(#[from] disaster_recovery_configs::list_keys::Error), #[error(transparent)] EventHubs_ListByNamespace(#[from] event_hubs::list_by_namespace::Error), #[error(transparent)] Namespaces_Migrate(#[from] namespaces::migrate::Error), #[error(transparent)] MigrationConfigs_List(#[from] migration_configs::list::Error), #[error(transparent)] MigrationConfigs_Get(#[from] migration_configs::get::Error), #[error(transparent)] MigrationConfigs_CreateAndStartMigration(#[from] migration_configs::create_and_start_migration::Error), #[error(transparent)] MigrationConfigs_Delete(#[from] migration_configs::delete::Error), #[error(transparent)] MigrationConfigs_CompleteMigration(#[from] migration_configs::complete_migration::Error), #[error(transparent)] MigrationConfigs_Revert(#[from] migration_configs::revert::Error), #[error(transparent)] Namespaces_List(#[from] namespaces::list::Error), #[error(transparent)] Namespaces_ListByResourceGroup(#[from] namespaces::list_by_resource_group::Error), #[error(transparent)] Namespaces_Get(#[from] namespaces::get::Error), #[error(transparent)] Namespaces_CreateOrUpdate(#[from] namespaces::create_or_update::Error), #[error(transparent)] Namespaces_Update(#[from] namespaces::update::Error), #[error(transparent)] Namespaces_Delete(#[from] namespaces::delete::Error), #[error(transparent)] Namespaces_GetNetworkRuleSet(#[from] namespaces::get_network_rule_set::Error), #[error(transparent)] Namespaces_CreateOrUpdateNetworkRuleSet(#[from] namespaces::create_or_update_network_rule_set::Error), #[error(transparent)] Namespaces_ListNetworkRuleSets(#[from] namespaces::list_network_rule_sets::Error), #[error(transparent)] Operations_List(#[from] operations::list::Error), #[error(transparent)] PremiumMessagingRegions_List(#[from] premium_messaging_regions::list::Error), #[error(transparent)] Queues_ListByNamespace(#[from] queues::list_by_namespace::Error), #[error(transparent)] Queues_Get(#[from] queues::get::Error), #[error(transparent)] Queues_CreateOrUpdate(#[from] queues::create_or_update::Error), #[error(transparent)] Queues_Delete(#[from] queues::delete::Error), #[error(transparent)] Rules_ListBySubscriptions(#[from] rules::list_by_subscriptions::Error), #[error(transparent)] Rules_Get(#[from] rules::get::Error), #[error(transparent)] Rules_CreateOrUpdate(#[from] rules::create_or_update::Error), #[error(transparent)] Rules_Delete(#[from] rules::delete::Error), #[error(transparent)] Regions_ListBySku(#[from] regions::list_by_sku::Error), #[error(transparent)] Subscriptions_ListByTopic(#[from] subscriptions::list_by_topic::Error), #[error(transparent)] Subscriptions_Get(#[from] subscriptions::get::Error), #[error(transparent)] Subscriptions_CreateOrUpdate(#[from] subscriptions::create_or_update::Error), #[error(transparent)] Subscriptions_Delete(#[from] subscriptions::delete::Error), #[error(transparent)] Topics_ListByNamespace(#[from] topics::list_by_namespace::Error), #[error(transparent)] Topics_Get(#[from] topics::get::Error), #[error(transparent)] Topics_CreateOrUpdate(#[from] topics::create_or_update::Error), #[error(transparent)] Topics_Delete(#[from] topics::delete::Error), } pub mod namespaces { use super::{models, API_VERSION}; pub async fn list_authorization_rules( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, subscription_id: &str, ) -> std::result::Result<models::SbAuthorizationRuleListResult, list_authorization_rules::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/AuthorizationRules", operation_config.base_path(), subscription_id, resource_group_name, namespace_name ); let mut url = url::Url::parse(url_str).map_err(list_authorization_rules::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_authorization_rules::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(list_authorization_rules::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_authorization_rules::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::SbAuthorizationRuleListResult = serde_json::from_slice(rsp_body) .map_err(|source| list_authorization_rules::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body) .map_err(|source| list_authorization_rules::Error::DeserializeError(source, rsp_body.clone()))?; Err(list_authorization_rules::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod list_authorization_rules { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get_authorization_rule( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, authorization_rule_name: &str, subscription_id: &str, ) -> std::result::Result<models::SbAuthorizationRule, get_authorization_rule::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/AuthorizationRules/{}", operation_config.base_path(), subscription_id, resource_group_name, namespace_name, authorization_rule_name ); let mut url = url::Url::parse(url_str).map_err(get_authorization_rule::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get_authorization_rule::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(get_authorization_rule::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(get_authorization_rule::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::SbAuthorizationRule = serde_json::from_slice(rsp_body) .map_err(|source| get_authorization_rule::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body) .map_err(|source| get_authorization_rule::Error::DeserializeError(source, rsp_body.clone()))?; Err(get_authorization_rule::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod get_authorization_rule { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn create_or_update_authorization_rule( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, authorization_rule_name: &str, parameters: &models::SbAuthorizationRule, subscription_id: &str, ) -> std::result::Result<models::SbAuthorizationRule, create_or_update_authorization_rule::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/AuthorizationRules/{}", operation_config.base_path(), subscription_id, resource_group_name, namespace_name, authorization_rule_name ); let mut url = url::Url::parse(url_str).map_err(create_or_update_authorization_rule::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(create_or_update_authorization_rule::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(parameters).map_err(create_or_update_authorization_rule::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(create_or_update_authorization_rule::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(create_or_update_authorization_rule::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::SbAuthorizationRule = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update_authorization_rule::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update_authorization_rule::Error::DeserializeError(source, rsp_body.clone()))?; Err(create_or_update_authorization_rule::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod create_or_update_authorization_rule { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn delete_authorization_rule( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, authorization_rule_name: &str, subscription_id: &str, ) -> std::result::Result<delete_authorization_rule::Response, delete_authorization_rule::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/AuthorizationRules/{}", operation_config.base_path(), subscription_id, resource_group_name, namespace_name, authorization_rule_name ); let mut url = url::Url::parse(url_str).map_err(delete_authorization_rule::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(delete_authorization_rule::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(delete_authorization_rule::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(delete_authorization_rule::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(delete_authorization_rule::Response::Ok200), http::StatusCode::NO_CONTENT => Ok(delete_authorization_rule::Response::NoContent204), status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body) .map_err(|source| delete_authorization_rule::Error::DeserializeError(source, rsp_body.clone()))?; Err(delete_authorization_rule::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod delete_authorization_rule { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200, NoContent204, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_keys( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, authorization_rule_name: &str, subscription_id: &str, ) -> std::result::Result<models::AccessKeys, list_keys::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/AuthorizationRules/{}/listKeys", operation_config.base_path(), subscription_id, resource_group_name, namespace_name, authorization_rule_name ); let mut url = url::Url::parse(url_str).map_err(list_keys::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_keys::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list_keys::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_keys::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::AccessKeys = serde_json::from_slice(rsp_body).map_err(|source| list_keys::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| list_keys::Error::DeserializeError(source, rsp_body.clone()))?; Err(list_keys::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod list_keys { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn regenerate_keys( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, authorization_rule_name: &str, parameters: &models::RegenerateAccessKeyParameters, subscription_id: &str, ) -> std::result::Result<models::AccessKeys, regenerate_keys::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/AuthorizationRules/{}/regenerateKeys", operation_config.base_path(), subscription_id, resource_group_name, namespace_name, authorization_rule_name ); let mut url = url::Url::parse(url_str).map_err(regenerate_keys::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(regenerate_keys::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(parameters).map_err(regenerate_keys::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(regenerate_keys::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(regenerate_keys::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::AccessKeys = serde_json::from_slice(rsp_body) .map_err(|source| regenerate_keys::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body) .map_err(|source| regenerate_keys::Error::DeserializeError(source, rsp_body.clone()))?; Err(regenerate_keys::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod regenerate_keys { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn check_name_availability( operation_config: &crate::OperationConfig, subscription_id: &str, parameters: &models::CheckNameAvailability, ) -> std::result::Result<models::CheckNameAvailabilityResult, check_name_availability::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/providers/Microsoft.ServiceBus/CheckNameAvailability", operation_config.base_path(), subscription_id ); let mut url = url::Url::parse(url_str).map_err(check_name_availability::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(check_name_availability::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(parameters).map_err(check_name_availability::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(check_name_availability::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(check_name_availability::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::CheckNameAvailabilityResult = serde_json::from_slice(rsp_body) .map_err(|source| check_name_availability::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body) .map_err(|source| check_name_availability::Error::DeserializeError(source, rsp_body.clone()))?; Err(check_name_availability::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod check_name_availability { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn migrate( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, subscription_id: &str, parameters: &models::SbNamespaceMigrate, ) -> std::result::Result<(), migrate::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/migrate", operation_config.base_path(), subscription_id, resource_group_name, namespace_name ); let mut url = url::Url::parse(url_str).map_err(migrate::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(migrate::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(parameters).map_err(migrate::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(migrate::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(migrate::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| migrate::Error::DeserializeError(source, rsp_body.clone()))?; Err(migrate::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod migrate { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list( operation_config: &crate::OperationConfig, subscription_id: &str, ) -> std::result::Result<models::SbNamespaceListResult, list::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/providers/Microsoft.ServiceBus/namespaces", operation_config.base_path(), subscription_id ); let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::SbNamespaceListResult = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Err(list::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod list { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_by_resource_group( operation_config: &crate::OperationConfig, resource_group_name: &str, subscription_id: &str, ) -> std::result::Result<models::SbNamespaceListResult, list_by_resource_group::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces", operation_config.base_path(), subscription_id, resource_group_name ); let mut url = url::Url::parse(url_str).map_err(list_by_resource_group::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_by_resource_group::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(list_by_resource_group::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_by_resource_group::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::SbNamespaceListResult = serde_json::from_slice(rsp_body) .map_err(|source| list_by_resource_group::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body) .map_err(|source| list_by_resource_group::Error::DeserializeError(source, rsp_body.clone()))?; Err(list_by_resource_group::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod list_by_resource_group { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, subscription_id: &str, ) -> std::result::Result<models::SbNamespace, get::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}", operation_config.base_path(), subscription_id, resource_group_name, namespace_name ); let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::SbNamespace = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Err(get::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod get { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn create_or_update( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, parameters: &models::SbNamespace, subscription_id: &str, ) -> std::result::Result<create_or_update::Response, create_or_update::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}", operation_config.base_path(), subscription_id, resource_group_name, namespace_name ); let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(create_or_update::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(parameters).map_err(create_or_update::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(create_or_update::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::SbNamespace = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(create_or_update::Response::Ok200(rsp_value)) } http::StatusCode::CREATED => { let rsp_body = rsp.body(); let rsp_value: models::SbNamespace = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(create_or_update::Response::Created201(rsp_value)) } http::StatusCode::ACCEPTED => Ok(create_or_update::Response::Accepted202), status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Err(create_or_update::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod create_or_update { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200(models::SbNamespace), Created201(models::SbNamespace), Accepted202, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn update( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, parameters: &models::SbNamespaceUpdateParameters, subscription_id: &str, ) -> std::result::Result<update::Response, update::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}", operation_config.base_path(), subscription_id, resource_group_name, namespace_name ); let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PATCH); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(update::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(parameters).map_err(update::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::SbNamespace = serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(update::Response::Ok200(rsp_value)) } http::StatusCode::CREATED => { let rsp_body = rsp.body(); let rsp_value: models::SbNamespace = serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(update::Response::Created201(rsp_value)) } http::StatusCode::ACCEPTED => Ok(update::Response::Accepted202), status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?; Err(update::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod update { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200(models::SbNamespace), Created201(models::SbNamespace), Accepted202, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn delete( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, subscription_id: &str, ) -> std::result::Result<delete::Response, delete::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}", operation_config.base_path(), subscription_id, resource_group_name, namespace_name ); let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(delete::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(delete::Response::Ok200), http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202), http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204), status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?; Err(delete::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod delete { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200, Accepted202, NoContent204, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get_network_rule_set( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, subscription_id: &str, ) -> std::result::Result<models::NetworkRuleSet, get_network_rule_set::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/networkRuleSets/default", operation_config.base_path(), subscription_id, resource_group_name, namespace_name ); let mut url = url::Url::parse(url_str).map_err(get_network_rule_set::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get_network_rule_set::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get_network_rule_set::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(get_network_rule_set::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::NetworkRuleSet = serde_json::from_slice(rsp_body) .map_err(|source| get_network_rule_set::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body) .map_err(|source| get_network_rule_set::Error::DeserializeError(source, rsp_body.clone()))?; Err(get_network_rule_set::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod get_network_rule_set { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn create_or_update_network_rule_set( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, subscription_id: &str, parameters: &models::NetworkRuleSet, ) -> std::result::Result<models::NetworkRuleSet, create_or_update_network_rule_set::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/networkRuleSets/default", operation_config.base_path(), subscription_id, resource_group_name, namespace_name ); let mut url = url::Url::parse(url_str).map_err(create_or_update_network_rule_set::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(create_or_update_network_rule_set::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(parameters).map_err(create_or_update_network_rule_set::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(create_or_update_network_rule_set::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(create_or_update_network_rule_set::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::NetworkRuleSet = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update_network_rule_set::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update_network_rule_set::Error::DeserializeError(source, rsp_body.clone()))?; Err(create_or_update_network_rule_set::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod create_or_update_network_rule_set { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_network_rule_sets( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, subscription_id: &str, ) -> std::result::Result<models::NetworkRuleSetListResult, list_network_rule_sets::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/networkRuleSets", operation_config.base_path(), subscription_id, resource_group_name, namespace_name ); let mut url = url::Url::parse(url_str).map_err(list_network_rule_sets::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_network_rule_sets::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(list_network_rule_sets::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_network_rule_sets::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::NetworkRuleSetListResult = serde_json::from_slice(rsp_body) .map_err(|source| list_network_rule_sets::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body) .map_err(|source| list_network_rule_sets::Error::DeserializeError(source, rsp_body.clone()))?; Err(list_network_rule_sets::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod list_network_rule_sets { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod queues { use super::{models, API_VERSION}; pub async fn list_authorization_rules( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, queue_name: &str, subscription_id: &str, ) -> std::result::Result<models::SbAuthorizationRuleListResult, list_authorization_rules::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/queues/{}/authorizationRules", operation_config.base_path(), subscription_id, resource_group_name, namespace_name, queue_name ); let mut url = url::Url::parse(url_str).map_err(list_authorization_rules::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_authorization_rules::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(list_authorization_rules::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_authorization_rules::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::SbAuthorizationRuleListResult = serde_json::from_slice(rsp_body) .map_err(|source| list_authorization_rules::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body) .map_err(|source| list_authorization_rules::Error::DeserializeError(source, rsp_body.clone()))?; Err(list_authorization_rules::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod list_authorization_rules { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get_authorization_rule( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, queue_name: &str, authorization_rule_name: &str, subscription_id: &str, ) -> std::result::Result<models::SbAuthorizationRule, get_authorization_rule::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/queues/{}/authorizationRules/{}", operation_config.base_path(), subscription_id, resource_group_name, namespace_name, queue_name, authorization_rule_name ); let mut url = url::Url::parse(url_str).map_err(get_authorization_rule::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get_authorization_rule::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(get_authorization_rule::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(get_authorization_rule::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::SbAuthorizationRule = serde_json::from_slice(rsp_body) .map_err(|source| get_authorization_rule::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body) .map_err(|source| get_authorization_rule::Error::DeserializeError(source, rsp_body.clone()))?; Err(get_authorization_rule::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod get_authorization_rule { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn create_or_update_authorization_rule( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, queue_name: &str, authorization_rule_name: &str, parameters: &models::SbAuthorizationRule, subscription_id: &str, ) -> std::result::Result<models::SbAuthorizationRule, create_or_update_authorization_rule::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/queues/{}/authorizationRules/{}", operation_config.base_path(), subscription_id, resource_group_name, namespace_name, queue_name, authorization_rule_name ); let mut url = url::Url::parse(url_str).map_err(create_or_update_authorization_rule::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(create_or_update_authorization_rule::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(parameters).map_err(create_or_update_authorization_rule::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(create_or_update_authorization_rule::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(create_or_update_authorization_rule::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::SbAuthorizationRule = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update_authorization_rule::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update_authorization_rule::Error::DeserializeError(source, rsp_body.clone()))?; Err(create_or_update_authorization_rule::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod create_or_update_authorization_rule { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn delete_authorization_rule( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, queue_name: &str, authorization_rule_name: &str, subscription_id: &str, ) -> std::result::Result<delete_authorization_rule::Response, delete_authorization_rule::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/queues/{}/authorizationRules/{}", operation_config.base_path(), subscription_id, resource_group_name, namespace_name, queue_name, authorization_rule_name ); let mut url = url::Url::parse(url_str).map_err(delete_authorization_rule::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(delete_authorization_rule::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(delete_authorization_rule::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(delete_authorization_rule::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(delete_authorization_rule::Response::Ok200), http::StatusCode::NO_CONTENT => Ok(delete_authorization_rule::Response::NoContent204), status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body) .map_err(|source| delete_authorization_rule::Error::DeserializeError(source, rsp_body.clone()))?; Err(delete_authorization_rule::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod delete_authorization_rule { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200, NoContent204, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_keys( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, queue_name: &str, authorization_rule_name: &str, subscription_id: &str, ) -> std::result::Result<models::AccessKeys, list_keys::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/queues/{}/authorizationRules/{}/ListKeys", operation_config.base_path(), subscription_id, resource_group_name, namespace_name, queue_name, authorization_rule_name ); let mut url = url::Url::parse(url_str).map_err(list_keys::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_keys::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list_keys::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_keys::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::AccessKeys = serde_json::from_slice(rsp_body).map_err(|source| list_keys::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| list_keys::Error::DeserializeError(source, rsp_body.clone()))?; Err(list_keys::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod list_keys { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn regenerate_keys( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, queue_name: &str, authorization_rule_name: &str, parameters: &models::RegenerateAccessKeyParameters, subscription_id: &str, ) -> std::result::Result<models::AccessKeys, regenerate_keys::Error> { let http_client = operation_config.http_client(); let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/queues/{}/authorizationRules/{}/regenerateKeys" , operation_config . base_path () , subscription_id , resource_group_name , namespace_name , queue_name , authorization_rule_name) ; let mut url = url::Url::parse(url_str).map_err(regenerate_keys::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(regenerate_keys::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(parameters).map_err(regenerate_keys::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(regenerate_keys::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(regenerate_keys::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::AccessKeys = serde_json::from_slice(rsp_body) .map_err(|source| regenerate_keys::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body) .map_err(|source| regenerate_keys::Error::DeserializeError(source, rsp_body.clone()))?; Err(regenerate_keys::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod regenerate_keys { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_by_namespace( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, subscription_id: &str, skip: Option<i64>, top: Option<i64>, ) -> std::result::Result<models::SbQueueListResult, list_by_namespace::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/queues", operation_config.base_path(), subscription_id, resource_group_name, namespace_name ); let mut url = url::Url::parse(url_str).map_err(list_by_namespace::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_by_namespace::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); if let Some(skip) = skip { url.query_pairs_mut().append_pair("$skip", skip.to_string().as_str()); } if let Some(top) = top { url.query_pairs_mut().append_pair("$top", top.to_string().as_str()); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list_by_namespace::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_by_namespace::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::SbQueueListResult = serde_json::from_slice(rsp_body) .map_err(|source| list_by_namespace::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body) .map_err(|source| list_by_namespace::Error::DeserializeError(source, rsp_body.clone()))?; Err(list_by_namespace::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod list_by_namespace { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, queue_name: &str, subscription_id: &str, ) -> std::result::Result<models::SbQueue, get::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/queues/{}", operation_config.base_path(), subscription_id, resource_group_name, namespace_name, queue_name ); let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::SbQueue = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Err(get::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod get { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn create_or_update( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, queue_name: &str, parameters: &models::SbQueue, subscription_id: &str, ) -> std::result::Result<models::SbQueue, create_or_update::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/queues/{}", operation_config.base_path(), subscription_id, resource_group_name, namespace_name, queue_name ); let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(create_or_update::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(parameters).map_err(create_or_update::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(create_or_update::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::SbQueue = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Err(create_or_update::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod create_or_update { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn delete( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, queue_name: &str, subscription_id: &str, ) -> std::result::Result<delete::Response, delete::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/queues/{}", operation_config.base_path(), subscription_id, resource_group_name, namespace_name, queue_name ); let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(delete::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(delete::Response::Ok200), http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204), status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?; Err(delete::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod delete { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200, NoContent204, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod topics { use super::{models, API_VERSION}; pub async fn list_authorization_rules( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, topic_name: &str, subscription_id: &str, ) -> std::result::Result<models::SbAuthorizationRuleListResult, list_authorization_rules::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/topics/{}/authorizationRules", operation_config.base_path(), subscription_id, resource_group_name, namespace_name, topic_name ); let mut url = url::Url::parse(url_str).map_err(list_authorization_rules::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_authorization_rules::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(list_authorization_rules::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_authorization_rules::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::SbAuthorizationRuleListResult = serde_json::from_slice(rsp_body) .map_err(|source| list_authorization_rules::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body) .map_err(|source| list_authorization_rules::Error::DeserializeError(source, rsp_body.clone()))?; Err(list_authorization_rules::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod list_authorization_rules { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get_authorization_rule( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, topic_name: &str, authorization_rule_name: &str, subscription_id: &str, ) -> std::result::Result<models::SbAuthorizationRule, get_authorization_rule::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/topics/{}/authorizationRules/{}", operation_config.base_path(), subscription_id, resource_group_name, namespace_name, topic_name, authorization_rule_name ); let mut url = url::Url::parse(url_str).map_err(get_authorization_rule::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get_authorization_rule::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(get_authorization_rule::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(get_authorization_rule::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::SbAuthorizationRule = serde_json::from_slice(rsp_body) .map_err(|source| get_authorization_rule::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body) .map_err(|source| get_authorization_rule::Error::DeserializeError(source, rsp_body.clone()))?; Err(get_authorization_rule::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod get_authorization_rule { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn create_or_update_authorization_rule( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, topic_name: &str, authorization_rule_name: &str, parameters: &models::SbAuthorizationRule, subscription_id: &str, ) -> std::result::Result<models::SbAuthorizationRule, create_or_update_authorization_rule::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/topics/{}/authorizationRules/{}", operation_config.base_path(), subscription_id, resource_group_name, namespace_name, topic_name, authorization_rule_name ); let mut url = url::Url::parse(url_str).map_err(create_or_update_authorization_rule::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(create_or_update_authorization_rule::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(parameters).map_err(create_or_update_authorization_rule::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(create_or_update_authorization_rule::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(create_or_update_authorization_rule::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::SbAuthorizationRule = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update_authorization_rule::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update_authorization_rule::Error::DeserializeError(source, rsp_body.clone()))?; Err(create_or_update_authorization_rule::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod create_or_update_authorization_rule { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn delete_authorization_rule( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, topic_name: &str, authorization_rule_name: &str, subscription_id: &str, ) -> std::result::Result<delete_authorization_rule::Response, delete_authorization_rule::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/topics/{}/authorizationRules/{}", operation_config.base_path(), subscription_id, resource_group_name, namespace_name, topic_name, authorization_rule_name ); let mut url = url::Url::parse(url_str).map_err(delete_authorization_rule::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(delete_authorization_rule::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(delete_authorization_rule::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(delete_authorization_rule::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(delete_authorization_rule::Response::Ok200), http::StatusCode::NO_CONTENT => Ok(delete_authorization_rule::Response::NoContent204), status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body) .map_err(|source| delete_authorization_rule::Error::DeserializeError(source, rsp_body.clone()))?; Err(delete_authorization_rule::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod delete_authorization_rule { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200, NoContent204, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_keys( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, topic_name: &str, authorization_rule_name: &str, subscription_id: &str, ) -> std::result::Result<models::AccessKeys, list_keys::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/topics/{}/authorizationRules/{}/ListKeys", operation_config.base_path(), subscription_id, resource_group_name, namespace_name, topic_name, authorization_rule_name ); let mut url = url::Url::parse(url_str).map_err(list_keys::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_keys::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list_keys::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_keys::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::AccessKeys = serde_json::from_slice(rsp_body).map_err(|source| list_keys::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| list_keys::Error::DeserializeError(source, rsp_body.clone()))?; Err(list_keys::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod list_keys { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn regenerate_keys( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, topic_name: &str, authorization_rule_name: &str, parameters: &models::RegenerateAccessKeyParameters, subscription_id: &str, ) -> std::result::Result<models::AccessKeys, regenerate_keys::Error> { let http_client = operation_config.http_client(); let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/topics/{}/authorizationRules/{}/regenerateKeys" , operation_config . base_path () , subscription_id , resource_group_name , namespace_name , topic_name , authorization_rule_name) ; let mut url = url::Url::parse(url_str).map_err(regenerate_keys::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(regenerate_keys::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(parameters).map_err(regenerate_keys::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(regenerate_keys::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(regenerate_keys::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::AccessKeys = serde_json::from_slice(rsp_body) .map_err(|source| regenerate_keys::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body) .map_err(|source| regenerate_keys::Error::DeserializeError(source, rsp_body.clone()))?; Err(regenerate_keys::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod regenerate_keys { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_by_namespace( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, subscription_id: &str, skip: Option<i64>, top: Option<i64>, ) -> std::result::Result<models::SbTopicListResult, list_by_namespace::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/topics", operation_config.base_path(), subscription_id, resource_group_name, namespace_name ); let mut url = url::Url::parse(url_str).map_err(list_by_namespace::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_by_namespace::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); if let Some(skip) = skip { url.query_pairs_mut().append_pair("$skip", skip.to_string().as_str()); } if let Some(top) = top { url.query_pairs_mut().append_pair("$top", top.to_string().as_str()); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list_by_namespace::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_by_namespace::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::SbTopicListResult = serde_json::from_slice(rsp_body) .map_err(|source| list_by_namespace::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body) .map_err(|source| list_by_namespace::Error::DeserializeError(source, rsp_body.clone()))?; Err(list_by_namespace::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod list_by_namespace { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, topic_name: &str, subscription_id: &str, ) -> std::result::Result<models::SbTopic, get::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/topics/{}", operation_config.base_path(), subscription_id, resource_group_name, namespace_name, topic_name ); let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::SbTopic = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Err(get::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod get { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn create_or_update( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, topic_name: &str, parameters: &models::SbTopic, subscription_id: &str, ) -> std::result::Result<models::SbTopic, create_or_update::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/topics/{}", operation_config.base_path(), subscription_id, resource_group_name, namespace_name, topic_name ); let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(create_or_update::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(parameters).map_err(create_or_update::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(create_or_update::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::SbTopic = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Err(create_or_update::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod create_or_update { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn delete( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, topic_name: &str, subscription_id: &str, ) -> std::result::Result<delete::Response, delete::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/topics/{}", operation_config.base_path(), subscription_id, resource_group_name, namespace_name, topic_name ); let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(delete::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(delete::Response::Ok200), http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204), status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?; Err(delete::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod delete { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200, NoContent204, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod disaster_recovery_configs { use super::{models, API_VERSION}; pub async fn check_name_availability( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, subscription_id: &str, parameters: &models::CheckNameAvailability, ) -> std::result::Result<models::CheckNameAvailabilityResult, check_name_availability::Error> { let http_client = operation_config.http_client(); let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/disasterRecoveryConfigs/CheckNameAvailability" , operation_config . base_path () , subscription_id , resource_group_name , namespace_name) ; let mut url = url::Url::parse(url_str).map_err(check_name_availability::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(check_name_availability::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(parameters).map_err(check_name_availability::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(check_name_availability::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(check_name_availability::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::CheckNameAvailabilityResult = serde_json::from_slice(rsp_body) .map_err(|source| check_name_availability::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body) .map_err(|source| check_name_availability::Error::DeserializeError(source, rsp_body.clone()))?; Err(check_name_availability::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod check_name_availability { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, subscription_id: &str, ) -> std::result::Result<models::ArmDisasterRecoveryListResult, list::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/disasterRecoveryConfigs", operation_config.base_path(), subscription_id, resource_group_name, namespace_name ); let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::ArmDisasterRecoveryListResult = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Err(list::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod list { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, alias: &str, subscription_id: &str, ) -> std::result::Result<models::ArmDisasterRecovery, get::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/disasterRecoveryConfigs/{}", operation_config.base_path(), subscription_id, resource_group_name, namespace_name, alias ); let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::ArmDisasterRecovery = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Err(get::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod get { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn create_or_update( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, alias: &str, parameters: &models::ArmDisasterRecovery, subscription_id: &str, ) -> std::result::Result<create_or_update::Response, create_or_update::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/disasterRecoveryConfigs/{}", operation_config.base_path(), subscription_id, resource_group_name, namespace_name, alias ); let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(create_or_update::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(parameters).map_err(create_or_update::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(create_or_update::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::ArmDisasterRecovery = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(create_or_update::Response::Ok200(rsp_value)) } http::StatusCode::CREATED => Ok(create_or_update::Response::Created201), status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Err(create_or_update::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod create_or_update { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200(models::ArmDisasterRecovery), Created201, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn delete( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, alias: &str, subscription_id: &str, ) -> std::result::Result<(), delete::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/disasterRecoveryConfigs/{}", operation_config.base_path(), subscription_id, resource_group_name, namespace_name, alias ); let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(delete::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?; Err(delete::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod delete { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn break_pairing( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, alias: &str, subscription_id: &str, ) -> std::result::Result<(), break_pairing::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/disasterRecoveryConfigs/{}/breakPairing", operation_config.base_path(), subscription_id, resource_group_name, namespace_name, alias ); let mut url = url::Url::parse(url_str).map_err(break_pairing::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(break_pairing::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(break_pairing::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(break_pairing::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| break_pairing::Error::DeserializeError(source, rsp_body.clone()))?; Err(break_pairing::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod break_pairing { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn fail_over( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, alias: &str, subscription_id: &str, ) -> std::result::Result<(), fail_over::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/disasterRecoveryConfigs/{}/failover", operation_config.base_path(), subscription_id, resource_group_name, namespace_name, alias ); let mut url = url::Url::parse(url_str).map_err(fail_over::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(fail_over::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(fail_over::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(fail_over::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| fail_over::Error::DeserializeError(source, rsp_body.clone()))?; Err(fail_over::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod fail_over { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_authorization_rules( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, alias: &str, subscription_id: &str, ) -> std::result::Result<models::SbAuthorizationRuleListResult, list_authorization_rules::Error> { let http_client = operation_config.http_client(); let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/disasterRecoveryConfigs/{}/AuthorizationRules" , operation_config . base_path () , subscription_id , resource_group_name , namespace_name , alias) ; let mut url = url::Url::parse(url_str).map_err(list_authorization_rules::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_authorization_rules::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(list_authorization_rules::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_authorization_rules::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::SbAuthorizationRuleListResult = serde_json::from_slice(rsp_body) .map_err(|source| list_authorization_rules::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body) .map_err(|source| list_authorization_rules::Error::DeserializeError(source, rsp_body.clone()))?; Err(list_authorization_rules::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod list_authorization_rules { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get_authorization_rule( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, alias: &str, authorization_rule_name: &str, subscription_id: &str, ) -> std::result::Result<models::SbAuthorizationRule, get_authorization_rule::Error> { let http_client = operation_config.http_client(); let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/disasterRecoveryConfigs/{}/AuthorizationRules/{}" , operation_config . base_path () , subscription_id , resource_group_name , namespace_name , alias , authorization_rule_name) ; let mut url = url::Url::parse(url_str).map_err(get_authorization_rule::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get_authorization_rule::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(get_authorization_rule::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(get_authorization_rule::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::SbAuthorizationRule = serde_json::from_slice(rsp_body) .map_err(|source| get_authorization_rule::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body) .map_err(|source| get_authorization_rule::Error::DeserializeError(source, rsp_body.clone()))?; Err(get_authorization_rule::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod get_authorization_rule { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_keys( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, alias: &str, authorization_rule_name: &str, subscription_id: &str, ) -> std::result::Result<models::AccessKeys, list_keys::Error> { let http_client = operation_config.http_client(); let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/disasterRecoveryConfigs/{}/AuthorizationRules/{}/listKeys" , operation_config . base_path () , subscription_id , resource_group_name , namespace_name , alias , authorization_rule_name) ; let mut url = url::Url::parse(url_str).map_err(list_keys::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_keys::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list_keys::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_keys::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::AccessKeys = serde_json::from_slice(rsp_body).map_err(|source| list_keys::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| list_keys::Error::DeserializeError(source, rsp_body.clone()))?; Err(list_keys::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod list_keys { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod event_hubs { use super::{models, API_VERSION}; pub async fn list_by_namespace( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, subscription_id: &str, ) -> std::result::Result<models::EventHubListResult, list_by_namespace::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/eventhubs", operation_config.base_path(), subscription_id, resource_group_name, namespace_name ); let mut url = url::Url::parse(url_str).map_err(list_by_namespace::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_by_namespace::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list_by_namespace::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_by_namespace::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::EventHubListResult = serde_json::from_slice(rsp_body) .map_err(|source| list_by_namespace::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body) .map_err(|source| list_by_namespace::Error::DeserializeError(source, rsp_body.clone()))?; Err(list_by_namespace::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod list_by_namespace { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod migration_configs { use super::{models, API_VERSION}; pub async fn list( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, subscription_id: &str, ) -> std::result::Result<models::MigrationConfigListResult, list::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/migrationConfigurations", operation_config.base_path(), subscription_id, resource_group_name, namespace_name ); let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::MigrationConfigListResult = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Err(list::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod list { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, config_name: &str, subscription_id: &str, ) -> std::result::Result<models::MigrationConfigProperties, get::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/migrationConfigurations/{}", operation_config.base_path(), subscription_id, resource_group_name, namespace_name, config_name ); let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::MigrationConfigProperties = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Err(get::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod get { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn create_and_start_migration( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, config_name: &str, parameters: &models::MigrationConfigProperties, subscription_id: &str, ) -> std::result::Result<create_and_start_migration::Response, create_and_start_migration::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/migrationConfigurations/{}", operation_config.base_path(), subscription_id, resource_group_name, namespace_name, config_name ); let mut url = url::Url::parse(url_str).map_err(create_and_start_migration::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(create_and_start_migration::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(parameters).map_err(create_and_start_migration::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(create_and_start_migration::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(create_and_start_migration::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::MigrationConfigProperties = serde_json::from_slice(rsp_body) .map_err(|source| create_and_start_migration::Error::DeserializeError(source, rsp_body.clone()))?; Ok(create_and_start_migration::Response::Ok200(rsp_value)) } http::StatusCode::CREATED => Ok(create_and_start_migration::Response::Created201), status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body) .map_err(|source| create_and_start_migration::Error::DeserializeError(source, rsp_body.clone()))?; Err(create_and_start_migration::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod create_and_start_migration { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200(models::MigrationConfigProperties), Created201, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn delete( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, config_name: &str, subscription_id: &str, ) -> std::result::Result<delete::Response, delete::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/migrationConfigurations/{}", operation_config.base_path(), subscription_id, resource_group_name, namespace_name, config_name ); let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(delete::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(delete::Response::Ok200), http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204), status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?; Err(delete::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod delete { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200, NoContent204, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn complete_migration( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, config_name: &str, subscription_id: &str, ) -> std::result::Result<(), complete_migration::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/migrationConfigurations/{}/upgrade", operation_config.base_path(), subscription_id, resource_group_name, namespace_name, config_name ); let mut url = url::Url::parse(url_str).map_err(complete_migration::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(complete_migration::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(complete_migration::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(complete_migration::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body) .map_err(|source| complete_migration::Error::DeserializeError(source, rsp_body.clone()))?; Err(complete_migration::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod complete_migration { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn revert( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, config_name: &str, subscription_id: &str, ) -> std::result::Result<(), revert::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/migrationConfigurations/{}/revert", operation_config.base_path(), subscription_id, resource_group_name, namespace_name, config_name ); let mut url = url::Url::parse(url_str).map_err(revert::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(revert::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(revert::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(revert::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| revert::Error::DeserializeError(source, rsp_body.clone()))?; Err(revert::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod revert { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod operations { use super::{models, API_VERSION}; pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<models::OperationListResult, list::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/providers/Microsoft.ServiceBus/operations", operation_config.base_path(),); let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::OperationListResult = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Err(list::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod list { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod premium_messaging_regions { use super::{models, API_VERSION}; pub async fn list( operation_config: &crate::OperationConfig, subscription_id: &str, ) -> std::result::Result<models::PremiumMessagingRegionsListResult, list::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/providers/Microsoft.ServiceBus/premiumMessagingRegions", operation_config.base_path(), subscription_id ); let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::PremiumMessagingRegionsListResult = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Err(list::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod list { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod rules { use super::{models, API_VERSION}; pub async fn list_by_subscriptions( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, topic_name: &str, subscription_name: &str, subscription_id: &str, skip: Option<i64>, top: Option<i64>, ) -> std::result::Result<models::RuleListResult, list_by_subscriptions::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/topics/{}/subscriptions/{}/rules", operation_config.base_path(), subscription_id, resource_group_name, namespace_name, topic_name, subscription_name ); let mut url = url::Url::parse(url_str).map_err(list_by_subscriptions::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_by_subscriptions::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); if let Some(skip) = skip { url.query_pairs_mut().append_pair("$skip", skip.to_string().as_str()); } if let Some(top) = top { url.query_pairs_mut().append_pair("$top", top.to_string().as_str()); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(list_by_subscriptions::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_by_subscriptions::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::RuleListResult = serde_json::from_slice(rsp_body) .map_err(|source| list_by_subscriptions::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body) .map_err(|source| list_by_subscriptions::Error::DeserializeError(source, rsp_body.clone()))?; Err(list_by_subscriptions::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod list_by_subscriptions { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, topic_name: &str, subscription_name: &str, rule_name: &str, subscription_id: &str, ) -> std::result::Result<models::Rule, get::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/topics/{}/subscriptions/{}/rules/{}", operation_config.base_path(), subscription_id, resource_group_name, namespace_name, topic_name, subscription_name, rule_name ); let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::Rule = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Err(get::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod get { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn create_or_update( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, topic_name: &str, subscription_name: &str, rule_name: &str, parameters: &models::Rule, subscription_id: &str, ) -> std::result::Result<models::Rule, create_or_update::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/topics/{}/subscriptions/{}/rules/{}", operation_config.base_path(), subscription_id, resource_group_name, namespace_name, topic_name, subscription_name, rule_name ); let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(create_or_update::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(parameters).map_err(create_or_update::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(create_or_update::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::Rule = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Err(create_or_update::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod create_or_update { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn delete( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, topic_name: &str, subscription_name: &str, rule_name: &str, subscription_id: &str, ) -> std::result::Result<delete::Response, delete::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/topics/{}/subscriptions/{}/rules/{}", operation_config.base_path(), subscription_id, resource_group_name, namespace_name, topic_name, subscription_name, rule_name ); let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(delete::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(delete::Response::Ok200), http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204), status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?; Err(delete::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod delete { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200, NoContent204, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod regions { use super::{models, API_VERSION}; pub async fn list_by_sku( operation_config: &crate::OperationConfig, subscription_id: &str, sku: &str, ) -> std::result::Result<models::PremiumMessagingRegionsListResult, list_by_sku::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/providers/Microsoft.ServiceBus/sku/{}/regions", operation_config.base_path(), subscription_id, sku ); let mut url = url::Url::parse(url_str).map_err(list_by_sku::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_by_sku::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list_by_sku::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_by_sku::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::PremiumMessagingRegionsListResult = serde_json::from_slice(rsp_body).map_err(|source| list_by_sku::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| list_by_sku::Error::DeserializeError(source, rsp_body.clone()))?; Err(list_by_sku::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod list_by_sku { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod subscriptions { use super::{models, API_VERSION}; pub async fn list_by_topic( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, topic_name: &str, subscription_id: &str, skip: Option<i64>, top: Option<i64>, ) -> std::result::Result<models::SbSubscriptionListResult, list_by_topic::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/topics/{}/subscriptions", operation_config.base_path(), subscription_id, resource_group_name, namespace_name, topic_name ); let mut url = url::Url::parse(url_str).map_err(list_by_topic::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_by_topic::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); if let Some(skip) = skip { url.query_pairs_mut().append_pair("$skip", skip.to_string().as_str()); } if let Some(top) = top { url.query_pairs_mut().append_pair("$top", top.to_string().as_str()); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list_by_topic::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_by_topic::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::SbSubscriptionListResult = serde_json::from_slice(rsp_body).map_err(|source| list_by_topic::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| list_by_topic::Error::DeserializeError(source, rsp_body.clone()))?; Err(list_by_topic::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod list_by_topic { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, topic_name: &str, subscription_name: &str, subscription_id: &str, ) -> std::result::Result<models::SbSubscription, get::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/topics/{}/subscriptions/{}", operation_config.base_path(), subscription_id, resource_group_name, namespace_name, topic_name, subscription_name ); let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::SbSubscription = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Err(get::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod get { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn create_or_update( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, topic_name: &str, subscription_name: &str, parameters: &models::SbSubscription, subscription_id: &str, ) -> std::result::Result<models::SbSubscription, create_or_update::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/topics/{}/subscriptions/{}", operation_config.base_path(), subscription_id, resource_group_name, namespace_name, topic_name, subscription_name ); let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(create_or_update::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(parameters).map_err(create_or_update::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(create_or_update::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::SbSubscription = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Err(create_or_update::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod create_or_update { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn delete( operation_config: &crate::OperationConfig, resource_group_name: &str, namespace_name: &str, topic_name: &str, subscription_name: &str, subscription_id: &str, ) -> std::result::Result<delete::Response, delete::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ServiceBus/namespaces/{}/topics/{}/subscriptions/{}", operation_config.base_path(), subscription_id, resource_group_name, namespace_name, topic_name, subscription_name ); let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(delete::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(delete::Response::Ok200), http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204), status_code => { let rsp_body = rsp.body(); let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?; Err(delete::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod delete { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200, NoContent204, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } }
pub struct Score { pub team1: u32, pub team2: u32, pub in_goal_last_frame: bool, } impl Default for Score { fn default() -> Self { Score { team1: 0, team2: 0, in_goal_last_frame: false, } } }
use crate::typing::*; use serde::{Deserialize, Serialize}; use serde_with::skip_serializing_none; use std::borrow::Cow; use crate::TelegramApiMethod; pub mod webhook; pub mod send; pub mod info; pub mod inline; use webhook::*; use send::*; use info::*; use inline::*; #[derive(Debug, Serialize, Deserialize, Clone)] pub struct GetMe {} macro_rules! impl_api_method { ($($name: ty : $method_name: tt -> $ret: ty),*) => { $( impl TelegramApiMethod for $name { const METHOD: &'static str = $method_name; type Response = $ret; } )* }; } #[rustfmt::skip] impl_api_method!( GetMe: "GetMe" -> User, GetUpdates: "GetUpdates" -> Vec<Update>, GetWebhookInfo: "GetWebhookInfo" -> WebhookInfo, SetWebhook: "SetWebhook" -> bool, DeleteWebhook: "deleteWebhook" -> bool, SendMessage<'_>: "SendMessage" -> Message, ForwardMessage<'_>: "ForwardMessage"-> Message, SendPhoto<'_>: "SendPhoto" -> Message, SendAudio<'_>: "SendAudio" -> Message, SendDocument<'_>: "SendDocument" -> Message, SendVideo<'_>: "SendVideo" -> Message, SendAnimation: "SendAnimation" -> Message, SendVoice: "SendVoice" -> Message, SendVideoNote: "SendVideoNote" -> Message, SendMediaGroup: "SendMediaGroup" -> Message, SendLocation: "SendLocation" -> Message, // todo either Message or bool EditMessageLiveLocation: "EditMessageLiveLocation" -> Message, StopMessageLiveLocation: "StopMessageLiveLocation" -> Message, SendVenue: "SendVenue" -> Message, SendContact: "SendContact" -> Message, SendPoll: "SendPoll" -> Message, SendChatAction: "SendChatAction" -> bool, GetUserProfilePhotos: "GetUserProfilePhotos" -> UserProfilePhotos, GetFile: "GetFile" -> File ); #[cfg(test)] mod test { use crate::method::GetMe; #[test] fn serde_get_me() { let me = GetMe {}; let result = serde_json::to_string(&me).unwrap(); let expected = r#"{}"#; assert_eq!(result, expected); } }
use crate::libs::bcdice::js::CommandResult; #[derive(Debug, Clone)] pub struct Message(Vec<MessageToken>); #[derive(Debug, Clone)] pub enum MessageToken { Text(String), Reference(Reference), Command(Command), } #[derive(Debug, Clone)] pub struct Reference { pub name: Vec<Message>, pub args: Vec<Argument>, pub option: Option<Message>, } #[derive(Debug, Clone)] pub struct Command { pub name: Message, pub args: Vec<Argument>, pub text: Message, } #[derive(Debug, Clone)] pub struct Argument { pub value: Message, pub option: Option<Message>, } impl Message { pub fn new(msg_tokens: Vec<MessageToken>) -> Self { Self(msg_tokens) } pub fn from_str(text: &str) -> Self { super::message_parser::message(text).unwrap() } pub fn map(self, f: impl FnMut(MessageToken) -> Message) -> Self { Self::new(self.0.into_iter().map(f).map(|m| m.0).flatten().collect()) } pub fn flatten(self) -> Self { let mut flatten = vec![]; for m_token in self.0 { match m_token { MessageToken::Command(Command { name, args, text }) => { let name = name.flatten(); let args = args .into_iter() .map(|arg| Argument { value: arg.value.flatten(), option: arg.option.map(|arg_option| arg_option.flatten()), }) .collect(); let text = text.flatten(); flatten.push(MessageToken::Command(Command { name, args, text })); } MessageToken::Reference(Reference { name, args, option }) => { let name = name.into_iter().map(|name| name.flatten()).collect(); let args = args .into_iter() .map(|arg| Argument { value: arg.value.flatten(), option: arg.option.map(|arg_option| arg_option.flatten()), }) .collect(); let option = option.map(|option| option.flatten()); flatten.push(MessageToken::Reference(Reference { name, args, option })); } MessageToken::Text(text) => { if let Some(MessageToken::Text(f_text)) = flatten.last_mut() { *f_text += &text; } else { flatten.push(MessageToken::Text(text)); } } } } Self(flatten) } } impl std::ops::Deref for Message { type Target = Vec<MessageToken>; fn deref(&self) -> &Self::Target { &self.0 } } impl std::ops::DerefMut for Message { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.0 } } impl std::convert::Into<Vec<MessageToken>> for Message { fn into(self) -> Vec<MessageToken> { self.0 } } impl std::convert::From<Vec<MessageToken>> for Message { fn from(m_tokens: Vec<MessageToken>) -> Self { Self(m_tokens) } } impl std::convert::From<&CommandResult> for Message { fn from(command_result: &CommandResult) -> Self { Self(vec![MessageToken::Text(command_result.text.clone())]) } } impl Reference { pub fn to_ref_text(&self) -> String { let name = self .name .iter() .map(|a_name| format!("{}", a_name)) .collect::<Vec<_>>() .join("::"); let args = if self.args.len() > 0 { format!( "[{}]", self.args .iter() .map(|arg| format!("{}", arg)) .collect::<Vec<_>>() .join(",") ) } else { String::from("") }; let option = if let Some(option) = &self.option { format!(".{}", option) } else { String::from("") }; format!("{}{}{}", name, args, option) } } impl std::fmt::Display for Message { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.write_str( &self .iter() .map(|m_token| format!("{}", m_token)) .collect::<Vec<_>>() .join(""), ) } } impl std::fmt::Display for MessageToken { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Self::Text(text) => write!(f, "{}", text), Self::Command(command) => write!(f, "{}", command), Self::Reference(reference) => write!(f, "{}", reference), } } } impl std::fmt::Display for Command { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { if self.args.len() > 0 { write!( f, "{{\\{}[{}]{}}}", self.name, self.args .iter() .map(|arg| format!("{}", arg)) .collect::<Vec<_>>() .join(","), self.text ) } else { let text = format!("{}", self.text); if text.len() > 0 { write!(f, "{{\\{} {}}}", self.name, text) } else { write!(f, "{{\\{}}}", self.name) } } } } impl std::fmt::Display for Reference { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{{{}}}", self.to_ref_text()) } } impl std::fmt::Display for Argument { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { if let Some(option) = &self.option { write!(f, "{}={}", self.value, option) } else { write!(f, "{}", self.value) } } }
use nannou::noise::{NoiseFn, Perlin}; use nannou::prelude::*; struct Model {} fn main() { nannou::app(model).update(update).simple_window(view).run(); } fn model(_app: &App) -> Model { Model {} } fn update(_app: &App, _model: &mut Model, _update: Update) {} fn view(app: &App, _model: &Model, frame: Frame) { let draw = app.draw(); let t = app.time; draw.background().color(Rgb::new(0.1,0.1,0.1)); draw.to_frame(app, &frame).unwrap(); } struct Grid { pub cells: Vec<Cell>, pub margin: f32, pub row_heights: Vec<f32>, pub col_widths: Vec<f32>, pub palette: Vec<f32> } impl Grid { // pub fn new() -> Grid { // // } pub fn draw(){ } } struct Cell{ color: Color, orientation: Direction } enum Direction { Up, Left, Down, Right, } fn direction_theta(dir: Direction) -> f32 { match dir { Direction::Up=> 0.0, Direction::Left=> 90.0, Direction::Down => 180.0, Direction::Right => 270.0 } }
use dataloader::{ cached::{Item, Loader as CachedLoader}, Loader, }; use std::collections::BTreeMap; pub mod character; pub mod movie; pub mod movie_character; pub mod user; type _DataLoader<K, V, B> = Loader<K, V, (), B>; type CachedDataLoader<K, V, B> = CachedLoader<K, V, (), B, Cache<K, V, B>>; type Cache<K, V, F> = BTreeMap<K, Item<K, V, (), F>>;
#![cfg(all(test, feature = "test_e2e"))] use azure_core::prelude::*; use azure_cosmos::prelude::*; use azure_cosmos::responses::QueryDocumentsResponseRaw; use futures::stream::StreamExt; mod setup; const FN_BODY: &str = r#" function tax(income) { if (income == undefined) throw 'no input'; if (income < 1000) return income * 0.1; else if (income < 10000) return income * 0.2; else return income * 0.4; }"#; #[tokio::test] async fn user_defined_function00() -> Result<(), azure_cosmos::Error> { const DATABASE_NAME: &str = "test-cosmos-db-udf"; const COLLECTION_NAME: &str = "test-udf"; const USER_DEFINED_FUNCTION_NAME: &str = "test"; let client = setup::initialize().unwrap(); // create a temp database let _create_database_response = client .create_database( azure_core::Context::new(), DATABASE_NAME, CreateDatabaseOptions::new(), ) .await .unwrap(); let database_client = client.into_database_client(DATABASE_NAME); // create a temp collection let _create_collection_response = database_client .create_collection( Context::new(), COLLECTION_NAME, CreateCollectionOptions::new("/id"), ) .await .unwrap(); let collection_client = database_client .clone() .into_collection_client(COLLECTION_NAME); let user_defined_function_client = collection_client .clone() .into_user_defined_function_client(USER_DEFINED_FUNCTION_NAME); let ret = user_defined_function_client .create_user_defined_function() .execute("body") .await?; let stream = collection_client .list_user_defined_functions() .max_item_count(3) .consistency_level(&ret); let mut stream = Box::pin(stream.stream()); while let Some(ret) = stream.next().await { let ret = ret.unwrap(); assert_eq!(ret.item_count, 1); } let ret = user_defined_function_client .replace_user_defined_function() .consistency_level(&ret) .execute(FN_BODY) .await?; let query_stmt = format!("SELECT udf.{}(100)", USER_DEFINED_FUNCTION_NAME); let ret: QueryDocumentsResponseRaw<serde_json::Value> = collection_client .query_documents() .consistency_level(&ret) .max_item_count(2i32) .execute(&query_stmt) .await? .into_raw(); assert_eq!(ret.item_count, 1); let fn_return = ret.results[0].as_object().unwrap(); let value = fn_return.iter().take(1).next().unwrap().1.as_f64().unwrap(); assert_eq!(value, 10.0); let query_stmt = format!("SELECT udf.{}(10000)", USER_DEFINED_FUNCTION_NAME); let ret: QueryDocumentsResponseRaw<serde_json::Value> = collection_client .query_documents() .consistency_level(&ret) .max_item_count(2i32) .execute(&query_stmt) .await? .into_raw(); assert_eq!(ret.item_count, 1); let fn_return = ret.results[0].as_object().unwrap(); let value = fn_return .into_iter() .take(1) .next() .unwrap() .1 .as_f64() .unwrap(); assert_eq!(value, 4000.0); let _ret = user_defined_function_client .delete_user_defined_function() .consistency_level(&ret) .execute() .await?; // delete the database database_client .delete_database(Context::new(), DeleteDatabaseOptions::new()) .await?; Ok(()) }
use async_trait::async_trait; use common::cache::Cache; use common::infrastructure::cache::InMemCache; use common::result::Result; use crate::domain::token::{Data, TokenId, TokenRepository}; #[derive(Default)] pub struct InMemTokenRepository { cache: InMemCache<TokenId, Data>, } impl InMemTokenRepository { pub fn new() -> Self { InMemTokenRepository { cache: InMemCache::new(), } } pub fn cache(&self) -> &InMemCache<TokenId, Data> { &self.cache } } #[async_trait] impl Cache<TokenId, Data> for InMemTokenRepository { async fn get(&self, token_id: &TokenId) -> Option<Data> { self.cache.get(token_id).await } async fn set(&self, token_id: TokenId, data: Data) -> Result<()> { self.cache.set(token_id, data).await } async fn delete(&self, token_id: &TokenId) -> Result<()> { self.cache.delete(token_id).await } } impl TokenRepository for InMemTokenRepository {} #[cfg(test)] mod tests { use super::*; #[tokio::test] async fn test() { fn check_trait_impl<T: TokenRepository>(_repo: &T) {} let repo = InMemTokenRepository::new(); check_trait_impl(&repo); let mut data = Data::new(); data.add("user_id", "U002"); repo.set(TokenId::from("T123"), data.clone()).await.unwrap(); repo.set(TokenId::from("T124"), data.clone()).await.unwrap(); let saved_data = repo.get(&TokenId::from("T123")).await.unwrap(); assert!(saved_data.get("user_id").is_some()); assert_eq!(data.get("user_id"), saved_data.get("user_id")); assert!(repo.get(&TokenId::from("T777")).await.is_none()); assert!(repo.get(&TokenId::from("T123")).await.is_some()); assert!(repo.delete(&TokenId::from("T123")).await.is_ok()); assert!(repo.get(&TokenId::from("T124")).await.is_some()); } }
use crate::shape::bar::Bar; use crate::{ BandScale, BarLabelPosition, BarsValues, Error, LinearScale, Orientation, Scale, View, }; use std::collections::HashMap; use svg::node::Node; const DEFAULT_BAR_LABEL_VISIBLE: bool = true; const DEFAULT_BAR_LABEL_POSITION: BarLabelPosition = BarLabelPosition::Center; /// VerticalBarView represents a chart view with vertical bars. #[derive(Clone)] pub struct VerticalBarView { x_scale: BandScale, y_scale: LinearScale, bars: Vec<Bar>, bar_label_visible: bool, bar_label_position: BarLabelPosition, } impl VerticalBarView { /// Create a new VerticalBarView. pub fn new(x_scale: BandScale, y_scale: LinearScale) -> Self { Self { x_scale, y_scale, bars: Vec::new(), bar_label_visible: DEFAULT_BAR_LABEL_VISIBLE, bar_label_position: DEFAULT_BAR_LABEL_POSITION, } } /// Configure label visibility for bars. pub fn set_bar_label_visible(mut self, bar_label_visible: bool) -> Self { self.bar_label_visible = bar_label_visible; self } /// Configure label position for bars. pub fn set_bar_label_position(mut self, bar_label_position: BarLabelPosition) -> Self { self.bar_label_position = bar_label_position; self } /// Set values for bars. pub fn set_data(mut self, bars_values: &[BarsValues]) -> Result<Self, Error> { if bars_values.is_empty() { return Err(Error::DataIsEmpty); } // Populate a map of category to tuples of (value, fill_color, stroke_color). let x_scale_domain = self.x_scale.ticks(); let mut bars_categories = HashMap::new(); for bv_opts in bars_values.iter() { if bv_opts.values().len() > self.x_scale.ticks().len() { return Err(Error::CategoriesCountIsLess); } for (i, value) in bv_opts.values().iter().enumerate() { let category = &x_scale_domain[i]; bars_categories.entry(category).or_insert_with(Vec::new); if let Some(category_entries) = bars_categories.get_mut(&category) { category_entries.push((value, bv_opts.fill_color(), bv_opts.stroke_color())); }; } } // Create vector of bars from the bars_categories map. let mut bars = Vec::new(); for (category, category_entries) in bars_categories.iter() { let mut value_acc = 0_f32; let mut start = self.y_scale.scale(&value_acc); let mut end = start; for category_entry in category_entries.iter() { let value = category_entry.0; let fill_color = category_entry.1; let stroke_color = category_entry.2; value_acc += value; if self.y_scale.is_range_reversed() { end = start; start = self.y_scale.scale(&value_acc); } else { start = end; end = self.y_scale.scale(&value_acc); } let bar = Bar::new( start, end, *value, self.x_scale.bandwidth(), self.x_scale.scale(&category.to_string()), Orientation::Vertical, ) .set_fill_color(fill_color) .set_stroke_color(stroke_color) .set_label_visible(self.bar_label_visible) .set_label_position(self.bar_label_position); bars.push(bar); } } self.bars = bars; Ok(self) } } impl View for VerticalBarView { /// Get bar view SVG representation. fn to_svg(&self) -> svg::node::element::Group { let mut res = svg::node::element::Group::new(); for bar in self.bars.iter() { res.append(bar.to_svg()); } res } } #[cfg(test)] mod tests { use super::*; use crate::color::{COLOR_HEX_BLUE_2, COLOR_HEX_BLUE_4}; use crate::Color; #[test] fn vertical_bar_basic() { let expected_svg_group = r##"<g> <g class="bar" transform="translate(3.2258034,0)"> <rect fill="#5095e5" height="66" shape-rendering="crispEdges" stroke="#1960b2" stroke-width="1" width="29.032257" x="0" y="34"/> <text dy=".35em" fill="#080808" font-family="sans-serif" font-size="14px" text-anchor="middle" x="14.516129" y="67"> 66 </text> </g> </g>"##; let x_scale = BandScale::new( vec!["A".to_string(), "B".to_string(), "C".to_string()], 0, 100, ); let y_scale = LinearScale::new(0_f32, 100_f32, 100, 0); let data = vec![BarsValues::new(vec![66_f32]) .set_fill_color(Color::new_from_hex(COLOR_HEX_BLUE_4)) .set_stroke_color(Color::new_from_hex(COLOR_HEX_BLUE_2))]; let vertical_bar = VerticalBarView::new(x_scale, y_scale) .set_data(&data) .expect("unable to set data"); let vertical_bar_svg = vertical_bar.to_svg(); assert_eq!(vertical_bar_svg.to_string(), expected_svg_group); } }
use derive_more::Display; use thiserror::Error as ThisError; #[derive(Debug, ThisError, Display)] pub enum InfraError { #[display(fmt = "home directory not found")] HomeDirectoryNotFound, #[display(fmt = "favorites not found")] FavoritesNotFound, } #[derive(Debug, ThisError, Display)] pub enum ProjectRootPathError { #[display(fmt = ".git directory not found")] DotGitNotFound, #[display(fmt = "git command not found")] GitCommandNotFound, #[display(fmt = "cannot convert path to String")] ConnotConvertToString, } #[derive(Debug, ThisError, Display)] pub enum CurrentDirectoryPathError { #[display(fmt = "cannot access current directory")] CannotAccessCurrentDirectory, #[display(fmt = "cannot convert path to String")] ConnotConvertToString, }
#[doc = "Reader of register FMC_CSQIER"] pub type R = crate::R<u32, super::FMC_CSQIER>; #[doc = "Writer for register FMC_CSQIER"] pub type W = crate::W<u32, super::FMC_CSQIER>; #[doc = "Register FMC_CSQIER `reset()`'s with value 0x0002_0000"] impl crate::ResetValue for super::FMC_CSQIER { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0x0002_0000 } } #[doc = "Reader of field `TCIE`"] pub type TCIE_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TCIE`"] pub struct TCIE_W<'a> { w: &'a mut W, } impl<'a> TCIE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } #[doc = "Reader of field `SCIE`"] pub type SCIE_R = crate::R<bool, bool>; #[doc = "Write proxy for field `SCIE`"] pub struct SCIE_W<'a> { w: &'a mut W, } impl<'a> SCIE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1); self.w } } #[doc = "Reader of field `SEIE`"] pub type SEIE_R = crate::R<bool, bool>; #[doc = "Write proxy for field `SEIE`"] pub struct SEIE_W<'a> { w: &'a mut W, } impl<'a> SEIE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2); self.w } } #[doc = "Reader of field `SUEIE`"] pub type SUEIE_R = crate::R<bool, bool>; #[doc = "Write proxy for field `SUEIE`"] pub struct SUEIE_W<'a> { w: &'a mut W, } impl<'a> SUEIE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3); self.w } } #[doc = "Reader of field `CMDTCIE`"] pub type CMDTCIE_R = crate::R<bool, bool>; #[doc = "Write proxy for field `CMDTCIE`"] pub struct CMDTCIE_W<'a> { w: &'a mut W, } impl<'a> CMDTCIE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4); self.w } } impl R { #[doc = "Bit 0 - TCIE"] #[inline(always)] pub fn tcie(&self) -> TCIE_R { TCIE_R::new((self.bits & 0x01) != 0) } #[doc = "Bit 1 - SCIE"] #[inline(always)] pub fn scie(&self) -> SCIE_R { SCIE_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 2 - SEIE"] #[inline(always)] pub fn seie(&self) -> SEIE_R { SEIE_R::new(((self.bits >> 2) & 0x01) != 0) } #[doc = "Bit 3 - SUEIE"] #[inline(always)] pub fn sueie(&self) -> SUEIE_R { SUEIE_R::new(((self.bits >> 3) & 0x01) != 0) } #[doc = "Bit 4 - CMDTCIE"] #[inline(always)] pub fn cmdtcie(&self) -> CMDTCIE_R { CMDTCIE_R::new(((self.bits >> 4) & 0x01) != 0) } } impl W { #[doc = "Bit 0 - TCIE"] #[inline(always)] pub fn tcie(&mut self) -> TCIE_W { TCIE_W { w: self } } #[doc = "Bit 1 - SCIE"] #[inline(always)] pub fn scie(&mut self) -> SCIE_W { SCIE_W { w: self } } #[doc = "Bit 2 - SEIE"] #[inline(always)] pub fn seie(&mut self) -> SEIE_W { SEIE_W { w: self } } #[doc = "Bit 3 - SUEIE"] #[inline(always)] pub fn sueie(&mut self) -> SUEIE_W { SUEIE_W { w: self } } #[doc = "Bit 4 - CMDTCIE"] #[inline(always)] pub fn cmdtcie(&mut self) -> CMDTCIE_W { CMDTCIE_W { w: self } } }
#[doc = "Register `CR1` reader"] pub type R = crate::R<CR1_SPEC>; #[doc = "Register `CR1` writer"] pub type W = crate::W<CR1_SPEC>; #[doc = "Field `UE` reader - USART enable"] pub type UE_R = crate::BitReader; #[doc = "Field `UE` writer - USART enable"] pub type UE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `UESM` reader - USART enable in Stop mode"] pub type UESM_R = crate::BitReader; #[doc = "Field `UESM` writer - USART enable in Stop mode"] pub type UESM_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `RE` reader - Receiver enable"] pub type RE_R = crate::BitReader; #[doc = "Field `RE` writer - Receiver enable"] pub type RE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TE` reader - Transmitter enable"] pub type TE_R = crate::BitReader; #[doc = "Field `TE` writer - Transmitter enable"] pub type TE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `IDLEIE` reader - IDLE interrupt enable"] pub type IDLEIE_R = crate::BitReader; #[doc = "Field `IDLEIE` writer - IDLE interrupt enable"] pub type IDLEIE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `RXNEIE` reader - RXNE interrupt enable"] pub type RXNEIE_R = crate::BitReader; #[doc = "Field `RXNEIE` writer - RXNE interrupt enable"] pub type RXNEIE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TCIE` reader - Transmission complete interrupt enable"] pub type TCIE_R = crate::BitReader; #[doc = "Field `TCIE` writer - Transmission complete interrupt enable"] pub type TCIE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TXEIE` reader - interrupt enable"] pub type TXEIE_R = crate::BitReader; #[doc = "Field `TXEIE` writer - interrupt enable"] pub type TXEIE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `PEIE` reader - PE interrupt enable"] pub type PEIE_R = crate::BitReader; #[doc = "Field `PEIE` writer - PE interrupt enable"] pub type PEIE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `PS` reader - Parity selection"] pub type PS_R = crate::BitReader; #[doc = "Field `PS` writer - Parity selection"] pub type PS_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `PCE` reader - Parity control enable"] pub type PCE_R = crate::BitReader; #[doc = "Field `PCE` writer - Parity control enable"] pub type PCE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `WAKE` reader - Receiver wakeup method"] pub type WAKE_R = crate::BitReader; #[doc = "Field `WAKE` writer - Receiver wakeup method"] pub type WAKE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `M0` reader - Word length"] pub type M0_R = crate::BitReader; #[doc = "Field `M0` writer - Word length"] pub type M0_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `MME` reader - Mute mode enable"] pub type MME_R = crate::BitReader; #[doc = "Field `MME` writer - Mute mode enable"] pub type MME_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `CMIE` reader - Character match interrupt enable"] pub type CMIE_R = crate::BitReader; #[doc = "Field `CMIE` writer - Character match interrupt enable"] pub type CMIE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `OVER8` reader - Oversampling mode"] pub type OVER8_R = crate::BitReader; #[doc = "Field `OVER8` writer - Oversampling mode"] pub type OVER8_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `DEDT0` reader - DEDT0"] pub type DEDT0_R = crate::BitReader; #[doc = "Field `DEDT0` writer - DEDT0"] pub type DEDT0_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `DEDT1` reader - DEDT1"] pub type DEDT1_R = crate::BitReader; #[doc = "Field `DEDT1` writer - DEDT1"] pub type DEDT1_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `DEDT2` reader - DEDT2"] pub type DEDT2_R = crate::BitReader; #[doc = "Field `DEDT2` writer - DEDT2"] pub type DEDT2_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `DEDT3` reader - DEDT3"] pub type DEDT3_R = crate::BitReader; #[doc = "Field `DEDT3` writer - DEDT3"] pub type DEDT3_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `DEDT4` reader - Driver Enable de-assertion time"] pub type DEDT4_R = crate::BitReader; #[doc = "Field `DEDT4` writer - Driver Enable de-assertion time"] pub type DEDT4_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `DEAT0` reader - DEAT0"] pub type DEAT0_R = crate::BitReader; #[doc = "Field `DEAT0` writer - DEAT0"] pub type DEAT0_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `DEAT1` reader - DEAT1"] pub type DEAT1_R = crate::BitReader; #[doc = "Field `DEAT1` writer - DEAT1"] pub type DEAT1_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `DEAT2` reader - DEAT2"] pub type DEAT2_R = crate::BitReader; #[doc = "Field `DEAT2` writer - DEAT2"] pub type DEAT2_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `DEAT3` reader - DEAT3"] pub type DEAT3_R = crate::BitReader; #[doc = "Field `DEAT3` writer - DEAT3"] pub type DEAT3_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `DEAT4` reader - Driver Enable assertion time"] pub type DEAT4_R = crate::BitReader; #[doc = "Field `DEAT4` writer - Driver Enable assertion time"] pub type DEAT4_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `RTOIE` reader - Receiver timeout interrupt enable"] pub type RTOIE_R = crate::BitReader; #[doc = "Field `RTOIE` writer - Receiver timeout interrupt enable"] pub type RTOIE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `EOBIE` reader - End of Block interrupt enable"] pub type EOBIE_R = crate::BitReader; #[doc = "Field `EOBIE` writer - End of Block interrupt enable"] pub type EOBIE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `M1` reader - Word length"] pub type M1_R = crate::BitReader; #[doc = "Field `M1` writer - Word length"] pub type M1_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `FIFOEN` reader - FIFOEN"] pub type FIFOEN_R = crate::BitReader; #[doc = "Field `FIFOEN` writer - FIFOEN"] pub type FIFOEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TXFEIE` reader - TXFEIE"] pub type TXFEIE_R = crate::BitReader; #[doc = "Field `TXFEIE` writer - TXFEIE"] pub type TXFEIE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `RXFFIE` reader - RXFFIE"] pub type RXFFIE_R = crate::BitReader; #[doc = "Field `RXFFIE` writer - RXFFIE"] pub type RXFFIE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; impl R { #[doc = "Bit 0 - USART enable"] #[inline(always)] pub fn ue(&self) -> UE_R { UE_R::new((self.bits & 1) != 0) } #[doc = "Bit 1 - USART enable in Stop mode"] #[inline(always)] pub fn uesm(&self) -> UESM_R { UESM_R::new(((self.bits >> 1) & 1) != 0) } #[doc = "Bit 2 - Receiver enable"] #[inline(always)] pub fn re(&self) -> RE_R { RE_R::new(((self.bits >> 2) & 1) != 0) } #[doc = "Bit 3 - Transmitter enable"] #[inline(always)] pub fn te(&self) -> TE_R { TE_R::new(((self.bits >> 3) & 1) != 0) } #[doc = "Bit 4 - IDLE interrupt enable"] #[inline(always)] pub fn idleie(&self) -> IDLEIE_R { IDLEIE_R::new(((self.bits >> 4) & 1) != 0) } #[doc = "Bit 5 - RXNE interrupt enable"] #[inline(always)] pub fn rxneie(&self) -> RXNEIE_R { RXNEIE_R::new(((self.bits >> 5) & 1) != 0) } #[doc = "Bit 6 - Transmission complete interrupt enable"] #[inline(always)] pub fn tcie(&self) -> TCIE_R { TCIE_R::new(((self.bits >> 6) & 1) != 0) } #[doc = "Bit 7 - interrupt enable"] #[inline(always)] pub fn txeie(&self) -> TXEIE_R { TXEIE_R::new(((self.bits >> 7) & 1) != 0) } #[doc = "Bit 8 - PE interrupt enable"] #[inline(always)] pub fn peie(&self) -> PEIE_R { PEIE_R::new(((self.bits >> 8) & 1) != 0) } #[doc = "Bit 9 - Parity selection"] #[inline(always)] pub fn ps(&self) -> PS_R { PS_R::new(((self.bits >> 9) & 1) != 0) } #[doc = "Bit 10 - Parity control enable"] #[inline(always)] pub fn pce(&self) -> PCE_R { PCE_R::new(((self.bits >> 10) & 1) != 0) } #[doc = "Bit 11 - Receiver wakeup method"] #[inline(always)] pub fn wake(&self) -> WAKE_R { WAKE_R::new(((self.bits >> 11) & 1) != 0) } #[doc = "Bit 12 - Word length"] #[inline(always)] pub fn m0(&self) -> M0_R { M0_R::new(((self.bits >> 12) & 1) != 0) } #[doc = "Bit 13 - Mute mode enable"] #[inline(always)] pub fn mme(&self) -> MME_R { MME_R::new(((self.bits >> 13) & 1) != 0) } #[doc = "Bit 14 - Character match interrupt enable"] #[inline(always)] pub fn cmie(&self) -> CMIE_R { CMIE_R::new(((self.bits >> 14) & 1) != 0) } #[doc = "Bit 15 - Oversampling mode"] #[inline(always)] pub fn over8(&self) -> OVER8_R { OVER8_R::new(((self.bits >> 15) & 1) != 0) } #[doc = "Bit 16 - DEDT0"] #[inline(always)] pub fn dedt0(&self) -> DEDT0_R { DEDT0_R::new(((self.bits >> 16) & 1) != 0) } #[doc = "Bit 17 - DEDT1"] #[inline(always)] pub fn dedt1(&self) -> DEDT1_R { DEDT1_R::new(((self.bits >> 17) & 1) != 0) } #[doc = "Bit 18 - DEDT2"] #[inline(always)] pub fn dedt2(&self) -> DEDT2_R { DEDT2_R::new(((self.bits >> 18) & 1) != 0) } #[doc = "Bit 19 - DEDT3"] #[inline(always)] pub fn dedt3(&self) -> DEDT3_R { DEDT3_R::new(((self.bits >> 19) & 1) != 0) } #[doc = "Bit 20 - Driver Enable de-assertion time"] #[inline(always)] pub fn dedt4(&self) -> DEDT4_R { DEDT4_R::new(((self.bits >> 20) & 1) != 0) } #[doc = "Bit 21 - DEAT0"] #[inline(always)] pub fn deat0(&self) -> DEAT0_R { DEAT0_R::new(((self.bits >> 21) & 1) != 0) } #[doc = "Bit 22 - DEAT1"] #[inline(always)] pub fn deat1(&self) -> DEAT1_R { DEAT1_R::new(((self.bits >> 22) & 1) != 0) } #[doc = "Bit 23 - DEAT2"] #[inline(always)] pub fn deat2(&self) -> DEAT2_R { DEAT2_R::new(((self.bits >> 23) & 1) != 0) } #[doc = "Bit 24 - DEAT3"] #[inline(always)] pub fn deat3(&self) -> DEAT3_R { DEAT3_R::new(((self.bits >> 24) & 1) != 0) } #[doc = "Bit 25 - Driver Enable assertion time"] #[inline(always)] pub fn deat4(&self) -> DEAT4_R { DEAT4_R::new(((self.bits >> 25) & 1) != 0) } #[doc = "Bit 26 - Receiver timeout interrupt enable"] #[inline(always)] pub fn rtoie(&self) -> RTOIE_R { RTOIE_R::new(((self.bits >> 26) & 1) != 0) } #[doc = "Bit 27 - End of Block interrupt enable"] #[inline(always)] pub fn eobie(&self) -> EOBIE_R { EOBIE_R::new(((self.bits >> 27) & 1) != 0) } #[doc = "Bit 28 - Word length"] #[inline(always)] pub fn m1(&self) -> M1_R { M1_R::new(((self.bits >> 28) & 1) != 0) } #[doc = "Bit 29 - FIFOEN"] #[inline(always)] pub fn fifoen(&self) -> FIFOEN_R { FIFOEN_R::new(((self.bits >> 29) & 1) != 0) } #[doc = "Bit 30 - TXFEIE"] #[inline(always)] pub fn txfeie(&self) -> TXFEIE_R { TXFEIE_R::new(((self.bits >> 30) & 1) != 0) } #[doc = "Bit 31 - RXFFIE"] #[inline(always)] pub fn rxffie(&self) -> RXFFIE_R { RXFFIE_R::new(((self.bits >> 31) & 1) != 0) } } impl W { #[doc = "Bit 0 - USART enable"] #[inline(always)] #[must_use] pub fn ue(&mut self) -> UE_W<CR1_SPEC, 0> { UE_W::new(self) } #[doc = "Bit 1 - USART enable in Stop mode"] #[inline(always)] #[must_use] pub fn uesm(&mut self) -> UESM_W<CR1_SPEC, 1> { UESM_W::new(self) } #[doc = "Bit 2 - Receiver enable"] #[inline(always)] #[must_use] pub fn re(&mut self) -> RE_W<CR1_SPEC, 2> { RE_W::new(self) } #[doc = "Bit 3 - Transmitter enable"] #[inline(always)] #[must_use] pub fn te(&mut self) -> TE_W<CR1_SPEC, 3> { TE_W::new(self) } #[doc = "Bit 4 - IDLE interrupt enable"] #[inline(always)] #[must_use] pub fn idleie(&mut self) -> IDLEIE_W<CR1_SPEC, 4> { IDLEIE_W::new(self) } #[doc = "Bit 5 - RXNE interrupt enable"] #[inline(always)] #[must_use] pub fn rxneie(&mut self) -> RXNEIE_W<CR1_SPEC, 5> { RXNEIE_W::new(self) } #[doc = "Bit 6 - Transmission complete interrupt enable"] #[inline(always)] #[must_use] pub fn tcie(&mut self) -> TCIE_W<CR1_SPEC, 6> { TCIE_W::new(self) } #[doc = "Bit 7 - interrupt enable"] #[inline(always)] #[must_use] pub fn txeie(&mut self) -> TXEIE_W<CR1_SPEC, 7> { TXEIE_W::new(self) } #[doc = "Bit 8 - PE interrupt enable"] #[inline(always)] #[must_use] pub fn peie(&mut self) -> PEIE_W<CR1_SPEC, 8> { PEIE_W::new(self) } #[doc = "Bit 9 - Parity selection"] #[inline(always)] #[must_use] pub fn ps(&mut self) -> PS_W<CR1_SPEC, 9> { PS_W::new(self) } #[doc = "Bit 10 - Parity control enable"] #[inline(always)] #[must_use] pub fn pce(&mut self) -> PCE_W<CR1_SPEC, 10> { PCE_W::new(self) } #[doc = "Bit 11 - Receiver wakeup method"] #[inline(always)] #[must_use] pub fn wake(&mut self) -> WAKE_W<CR1_SPEC, 11> { WAKE_W::new(self) } #[doc = "Bit 12 - Word length"] #[inline(always)] #[must_use] pub fn m0(&mut self) -> M0_W<CR1_SPEC, 12> { M0_W::new(self) } #[doc = "Bit 13 - Mute mode enable"] #[inline(always)] #[must_use] pub fn mme(&mut self) -> MME_W<CR1_SPEC, 13> { MME_W::new(self) } #[doc = "Bit 14 - Character match interrupt enable"] #[inline(always)] #[must_use] pub fn cmie(&mut self) -> CMIE_W<CR1_SPEC, 14> { CMIE_W::new(self) } #[doc = "Bit 15 - Oversampling mode"] #[inline(always)] #[must_use] pub fn over8(&mut self) -> OVER8_W<CR1_SPEC, 15> { OVER8_W::new(self) } #[doc = "Bit 16 - DEDT0"] #[inline(always)] #[must_use] pub fn dedt0(&mut self) -> DEDT0_W<CR1_SPEC, 16> { DEDT0_W::new(self) } #[doc = "Bit 17 - DEDT1"] #[inline(always)] #[must_use] pub fn dedt1(&mut self) -> DEDT1_W<CR1_SPEC, 17> { DEDT1_W::new(self) } #[doc = "Bit 18 - DEDT2"] #[inline(always)] #[must_use] pub fn dedt2(&mut self) -> DEDT2_W<CR1_SPEC, 18> { DEDT2_W::new(self) } #[doc = "Bit 19 - DEDT3"] #[inline(always)] #[must_use] pub fn dedt3(&mut self) -> DEDT3_W<CR1_SPEC, 19> { DEDT3_W::new(self) } #[doc = "Bit 20 - Driver Enable de-assertion time"] #[inline(always)] #[must_use] pub fn dedt4(&mut self) -> DEDT4_W<CR1_SPEC, 20> { DEDT4_W::new(self) } #[doc = "Bit 21 - DEAT0"] #[inline(always)] #[must_use] pub fn deat0(&mut self) -> DEAT0_W<CR1_SPEC, 21> { DEAT0_W::new(self) } #[doc = "Bit 22 - DEAT1"] #[inline(always)] #[must_use] pub fn deat1(&mut self) -> DEAT1_W<CR1_SPEC, 22> { DEAT1_W::new(self) } #[doc = "Bit 23 - DEAT2"] #[inline(always)] #[must_use] pub fn deat2(&mut self) -> DEAT2_W<CR1_SPEC, 23> { DEAT2_W::new(self) } #[doc = "Bit 24 - DEAT3"] #[inline(always)] #[must_use] pub fn deat3(&mut self) -> DEAT3_W<CR1_SPEC, 24> { DEAT3_W::new(self) } #[doc = "Bit 25 - Driver Enable assertion time"] #[inline(always)] #[must_use] pub fn deat4(&mut self) -> DEAT4_W<CR1_SPEC, 25> { DEAT4_W::new(self) } #[doc = "Bit 26 - Receiver timeout interrupt enable"] #[inline(always)] #[must_use] pub fn rtoie(&mut self) -> RTOIE_W<CR1_SPEC, 26> { RTOIE_W::new(self) } #[doc = "Bit 27 - End of Block interrupt enable"] #[inline(always)] #[must_use] pub fn eobie(&mut self) -> EOBIE_W<CR1_SPEC, 27> { EOBIE_W::new(self) } #[doc = "Bit 28 - Word length"] #[inline(always)] #[must_use] pub fn m1(&mut self) -> M1_W<CR1_SPEC, 28> { M1_W::new(self) } #[doc = "Bit 29 - FIFOEN"] #[inline(always)] #[must_use] pub fn fifoen(&mut self) -> FIFOEN_W<CR1_SPEC, 29> { FIFOEN_W::new(self) } #[doc = "Bit 30 - TXFEIE"] #[inline(always)] #[must_use] pub fn txfeie(&mut self) -> TXFEIE_W<CR1_SPEC, 30> { TXFEIE_W::new(self) } #[doc = "Bit 31 - RXFFIE"] #[inline(always)] #[must_use] pub fn rxffie(&mut self) -> RXFFIE_W<CR1_SPEC, 31> { RXFFIE_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "Control register 1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cr1::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cr1::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct CR1_SPEC; impl crate::RegisterSpec for CR1_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`cr1::R`](R) reader structure"] impl crate::Readable for CR1_SPEC {} #[doc = "`write(|w| ..)` method takes [`cr1::W`](W) writer structure"] impl crate::Writable for CR1_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets CR1 to value 0"] impl crate::Resettable for CR1_SPEC { const RESET_VALUE: Self::Ux = 0; }
//! Loop for accepting new connections //! and passing on all network packets use types::event::*; use types::*; use std::fmt::Debug; use std::io::Write; use std::net::ToSocketAddrs; use std::sync::mpsc::Sender; use std::sync::Mutex; use futures::{Future, Stream}; use websocket::server::async::Server; use websocket::OwnedMessage; #[cfg(feature = "proxied")] mod hyperuse { pub use hyper::header::{Header, HeaderFormat}; use hyper::Error as HyperError; use std::fmt::{Formatter, Result as FmtResult}; pub use std::net::IpAddr; use std::str; #[derive(Clone, Debug)] pub struct XForwardedFor { pub addrs: Vec<IpAddr>, } impl Header for XForwardedFor { fn header_name() -> &'static str { return "X-Forwarded-For"; } fn parse_header(raw: &[Vec<u8>]) -> Result<Self, HyperError> { if raw.len() != 1 { return Err(HyperError::Header); } let s = match str::from_utf8(&raw[0]) { Ok(s) => s, Err(e) => return Err(HyperError::Utf8(e)), }; let mut addrs = vec![]; for s in s.split(',') { addrs.push(match s.parse() { Ok(v) => v, Err(_) => return Err(HyperError::Header), }); } Ok(Self { addrs }) } } impl HeaderFormat for XForwardedFor { fn fmt_header(&self, fmt: &mut Formatter) -> FmtResult { let strs = self .addrs .iter() .map(|x| x.to_string()) .collect::<Vec<String>>(); write!(fmt, "{}", strs.join(", ")) } } } #[cfg(feature = "proxied")] use self::hyperuse::*; use tokio_core::reactor::Core; const RESPONSE_STR: &'static [u8] = b"\ 418 IM_A_TEAPOT\n\ Content-Type: text/html\n\ \n\ <body>\ <img src=\"https://upload.wikimedia.org/wikipedia/commons/thumb/4/44/Black_tea_pot_cropped.jpg/330px-Black_tea_pot_cropped.jpg\"\ </body>"; pub fn run_acceptor<A>(addr: A, channel: Sender<ConnectionEvent>) where A: ToSocketAddrs + Debug, { info!( target: "server", "starting server at {:?}", addr ); let mut reactor = Core::new().unwrap(); let handle = reactor.handle(); let socket = Server::bind(addr, &handle).unwrap(); let f = socket .incoming() .map_err(|e| { info!( "A client failed to connect with error: {}", e.error ); if let Some(mut stream) = e.stream { // Make a best-effort attempt to // send a response, if this fails // we ignore it stream.write_all(RESPONSE_STR).err(); } }) // The following two operators filter out // all connection errors from the stream. // We don't want to crash the server when // somebody connects directly. We end up // simply dropping connections when this // happens, causing nginx to return a 502 // (if we are proxying with nginx for https) .then(|v| -> Result<_, ()> { match v { Ok(inner) => Ok(Some(inner)), Err(_) => Ok(None) } }) .filter_map(|x| x) .for_each(move |(upgrade, addr)| { let id = ConnectionId::new(); // Make a best-effort attempt to // set TCP_NODELAY. If this fails, // then the client will just be // using a less optimal stream. #[cfg(feature="nodelay")] upgrade.stream.set_nodelay(true).err(); let origin = upgrade.origin().map(|x| x.to_owned()); #[cfg(feature="proxied")] let realaddr = match upgrade.request.headers.get::<XForwardedFor>() { Some(v) => match v.addrs.get(0) { Some(v) => *v, None => addr.ip() }, None => addr.ip() }; #[cfg(not(feature="proxied"))] let realaddr = addr.ip(); let f = upgrade.accept() .and_then({ let channel = channel.clone(); move |(s, _)| { info!( "Created new connection with id {} and addr {}", id.0, realaddr ); let (sink, stream) = s.split(); channel.send(ConnectionEvent::ConnectionOpen(ConnectionOpen { conn: id, sink: Mutex::new(Some(sink)), addr: realaddr, origin: origin })).map_err(|e| { error!(target: "server", "Channel send error: {}", e) }) // Swallow error since if this errors // we are most likely shutting down. // The error will be logged anyway. .err(); stream.take_while(|m| Ok(!m.is_close())).for_each({ let channel = channel.clone(); move |m| { if m != OwnedMessage::Binary(vec![5]) { debug!( "{:?} sent {:?}", id, m ); } channel.send(ConnectionEvent::Message(Message{ conn: id, msg: m })).map_err(|e| { error!(target: "server", "Channel send error: {}", e) }) // Swallow error since we logged it // and are probably shutting down. .err(); Ok(()) } }) } }); handle.spawn( f.map_err({ let channel = channel.clone(); move |e| { info!( "Connection {:?} closed with error: {}", id, e ); channel .send(ConnectionEvent::ConnectionClose(ConnectionClose { conn: id, })) .map_err(|e| error!("Channel send error: {}", e)) .unwrap(); } }).map({ let channel = channel.clone(); move |_| { info!( "Connection {:?} closed", id ); channel .send(ConnectionEvent::ConnectionClose(ConnectionClose { conn: id, })) .map_err(|e| error!("Channel send error: {}", e)) .unwrap(); } }) .or_else(|_| -> Result<(), ()> { Ok(()) }), ); Ok(()) }); reactor.run(f).unwrap(); }
use std::cmp::Ordering; /// if the list size grows greater than the load factor, we split it. /// If the list size shrinks below the load factor, we join two lists. pub const DEFAULT_LOAD_FACTOR: usize = 1000; /// Inserts into a list while maintaining a preexisting ordering. pub fn insert_sorted<T: Ord>(vec: &mut Vec<T>, val: T) { match vec.binary_search(&val) { Ok(i) | Err(i) => vec.insert(i, val), } } /// Inserts a value into a list of lists, as in SortedList. /// /// Does not handle empty sublists except for a single empty list. /// returns the index of the list that was inserted into. pub fn insert_list_of_lists<T: Ord>(list_list: &mut Vec<Vec<T>>, val: T) -> usize { if list_list.len() == 1 && list_list[0].len() == 0 { list_list[0].push(val); return 0; } let list_i = match list_list.binary_search_by(|list| { let first = list.first().unwrap(); let last = list.last().unwrap(); if last < &val { Ordering::Less } else if first > &val { Ordering::Greater } else { Ordering::Equal } }) { Ok(i) => i, Err(0) => 0, Err(n) => n - 1, // TODO: how fair is this? }; insert_sorted(&mut list_list[list_i], val); list_i }
// use diesel::PgConnection; // use diesel::BelongingToDsl; use chrono::NaiveDate; // use crate::schema; use crate::schema::courses; // use crate::schema::user_courses; // use crate::db_connection::PgPooledConnection; #[derive(Identifiable, Queryable, Serialize, Deserialize, Debug, Clone, PartialEq)] // #[table_name="courses"] pub struct Course { pub id: i32, pub title: String, pub thumbnail: Option<String>, pub video_url: Option<String>, pub description: Option<String>, pub cate_id: i32, pub price: f64, pub created_at: NaiveDate, } #[derive(Insertable, Deserialize, Serialize, AsChangeset, Debug, Clone, PartialEq)] #[table_name="courses"] pub struct NewCourse { pub title: String, pub thumbnail: Option<String>, pub video_url: Option<String>, pub description: Option<String>, pub cate_id: Option<i32>, pub price: f64, pub created_at: NaiveDate, }
mod data_storage; mod error_helper; mod file_storage; mod metadata_storage; pub use file_storage::FileStorage; pub use metadata_storage::ROOT_INODE;
// Copyright 2019 King's College London. // Created by the Software Development Team <http://soft-dev.org/>. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! ykpack -- Serialiser and deserialiser for carrying data from compile-time to run-time. //! //! This crate allows ykrustc to serialise various compile-time information for later //! deserialisation by the Yorick runtime. //! //! The encoder and decoder API is structured in such a way that each item -- or "Pack" -- can be //! streamed to/from the serialised format one item at a time. This helps to reduce memory //! consumption. //! //! The MIR data is serialised in the msgpack format in the following form: //! //! ----------- //! pack_0: \ //! ... - Packs. //! pack_n / //! sentinel -- End of packs marker. //! ----------- //! //! Where each pack_i is an instance of `Some(Pack)` and the sentinel is a `None`. //! //! The version field is automatically written and checked by the `Encoder` and `Decoder` //! respectively. mod decode; mod encode; mod types; pub use decode::Decoder; pub use encode::Encoder; pub use types::*; #[cfg(test)] mod tests { use super::{ BasicBlock, Decoder, DefId, Encoder, Mir, Pack, Place, Rvalue, Statement, Terminator, }; use fallible_iterator::{self, FallibleIterator}; use std::io::{Cursor, Seek, SeekFrom}; // Get a cursor to serialise to and deserialise from. For real, we'd be reading from a file, // but for tests we use a vector of bytes. fn get_curs() -> Cursor<Vec<u8>> { let buf: Vec<u8> = Vec::new(); Cursor::new(buf) } // Rewind a cursor to the beginning. fn rewind_curs(curs: &mut Cursor<Vec<u8>>) { curs.seek(SeekFrom::Start(0)).unwrap(); } // Makes some sample stuff to round trip test. fn get_sample_packs() -> Vec<Pack> { let dummy_term = Terminator::Abort; let stmts1_b1 = vec![Statement::Nop; 16]; let stmts1_b2 = vec![Statement::Nop; 3]; let blocks1 = vec![ BasicBlock::new(stmts1_b1, dummy_term.clone()), BasicBlock::new(stmts1_b2, dummy_term.clone()), ]; let mir1 = Pack::Mir(Mir::new(DefId::new(1, 2), String::from("item1"), blocks1)); let stmts2_b1 = vec![Statement::Nop; 7]; let stmts2_b2 = vec![Statement::Nop; 200]; let stmts2_b3 = vec![Statement::Nop; 1]; let blocks2 = vec![ BasicBlock::new(stmts2_b1, dummy_term.clone()), BasicBlock::new(stmts2_b2, dummy_term.clone()), BasicBlock::new(stmts2_b3, dummy_term.clone()), ]; let mir2 = Pack::Mir(Mir::new(DefId::new(4, 5), String::from("item2"), blocks2)); vec![mir1, mir2] } // Check serialising and deserialising works for zero packs. #[test] fn test_empty() { let mut curs = get_curs(); let enc = Encoder::from(&mut curs); enc.done().unwrap(); rewind_curs(&mut curs); let mut dec = Decoder::from(&mut curs); assert!(dec.next().unwrap().is_none()); } // Check a typical serialising and deserialising session. #[test] fn test_basic() { let inputs = get_sample_packs(); let mut curs = get_curs(); let mut enc = Encoder::from(&mut curs); for md in &inputs { enc.serialise(md.clone()).unwrap(); } enc.done().unwrap(); rewind_curs(&mut curs); let dec = Decoder::from(&mut curs); // Obtain two fallible iterators, so we can zip them. let expect_iter = fallible_iterator::convert(inputs.into_iter().map(|e| Ok(e))); let mut itr = dec.zip(expect_iter); while let Some((got, expect)) = itr.next().unwrap() { assert_eq!(expect, got); } } #[test] #[should_panic(expected = "not marked done")] fn test_encode_not_done() { let inputs = get_sample_packs(); let mut curs = get_curs(); let mut enc = Encoder::from(&mut curs); for md in &inputs { enc.serialise(md.clone()).unwrap(); } // We expect this to panic, as the encoder wasn't finalised with a call to `enc.done()`. } #[test] fn test_text_dump() { let stmts_t1_b0 = vec![ Statement::Nop, Statement::Assign(Place::Local(42), Rvalue::Place(Place::Local(43))), Statement::Assign( Place::Local(44), Rvalue::Phi(vec![Place::Local(100), Place::Local(200)]), ), ]; let term_t1_b0 = Terminator::Abort; let stmts_t1_b1 = vec![Statement::Unimplemented]; let term_t1_b1 = Terminator::Goto { target_bb: 50 }; let blocks_t1 = vec![ BasicBlock::new(stmts_t1_b0, term_t1_b0), BasicBlock::new(stmts_t1_b1, term_t1_b1), ]; let tirs = vec![ Pack::Mir(Mir::new(DefId::new(1, 2), String::from("item1"), blocks_t1)), Pack::Mir(Mir::new( DefId::new(3, 4), String::from("item2"), Vec::new(), )), ]; let mut got = String::new(); for pack in tirs { got.push_str(&format!("{}", pack)); } let got_lines = got.split("\n"); let expect = "[Begin TIR for item1]\n\ DefId(1, 2): bb0: Nop Assign(Local(42), Place(Local(43))) Assign(Local(44), Phi([Local(100), Local(200)])) term: Abort bb1: Unimplemented term: Goto { target_bb: 50 } [End TIR for item1] [Begin TIR for item2] DefId(3, 4): [End TIR for item2]\n"; let expect_lines = expect.split("\n"); assert_eq!(got_lines.clone().count(), expect_lines.clone().count()); for (got, expect) in got_lines.zip(expect_lines) { assert_eq!(got.trim(), expect.trim()); } } }
//! Our fleet of remote cameras. //! //! This is the umbrella module for code that works with all of our remote cameras as a fleet, //! hense the plural. pub mod handlers; mod camera; mod config; mod image; pub use self::config::{CameraConfig, Config};
//!# A custom derive implementation for `#[derive(Logpack)]` #![crate_type = "proc-macro"] #![recursion_limit = "250"] extern crate proc_macro; mod type_derive; mod encode_derive; use std::process::Command; use std::collections::HashSet; use proc_macro::TokenStream; use proc_macro2::{TokenStream as Tokens}; use syn::{DeriveInput, GenericParam, Generics}; use quote::quote; #[proc_macro_derive(Logpack, attributes(Logpack))] pub fn derive(input: TokenStream) -> TokenStream { let input: DeriveInput = syn::parse(input).unwrap(); let name = &input.ident; let a = type_derive::derive(&input); let b = encode_derive::derive(&input); let res = quote!(#a #b); if let Some((_, value)) = std::env::vars().find(|(key, _)| key.as_str() == "LOGPACK_DERIVE_SAVE_DIR") { let dir = std::path::Path::new(value.as_str()); tokens_to_rustfmt_file(&dir.join(format!("derive_logpack_{}.rs", name)), &res); } res.into() } fn tokens_to_rustfmt_file(filename: &std::path::Path, expanded: &Tokens) { let mut file = std::fs::File::create(&filename).unwrap(); use std::io::Write; file.write_all(format!("{}", expanded).as_bytes()).unwrap(); Command::new("rustfmt") .args(&[filename]) .output() .expect("failed to execute process"); } fn add_trait_bounds( mut generics: Generics, skip_set: &HashSet<String>, trait_names: &[Tokens], ) -> Generics { for param in &mut generics.params { if let GenericParam::Type(ref mut type_param) = *param { if let Some(_) = skip_set.get(&type_param.ident.to_string()) { continue; } for trait_name in trait_names { let bound = syn::parse(quote! { #trait_name }.into()).unwrap(); type_param.bounds.push(bound); } let bound = syn::parse(quote! { 'static }.into()).unwrap(); type_param.bounds.push(bound); } } generics }
#[macro_use] extern crate nom; use crate::event::{guard_event, Event, EventType}; use chrono::Timelike; use nom::types::CompleteStr; use std::collections::HashMap; mod event; /** You've sneaked into another supply closet - this time, it's across from the prototype suit manufacturing lab. You need to sneak inside and fix the issues with the suit, but there's a guard stationed outside the lab, so this is as close as you can safely get. As you search the closet for anything that might help, you discover that you're not the first person to want to sneak in. Covering the walls, someone has spent an hour starting every midnight for the past few months secretly observing this guard post! They've been writing down the ID of the one guard on duty that night - the Elves seem to have decided that one guard was enough for the overnight shift - as well as when they fall asleep or wake up while at their post (your puzzle input). For example, consider the following records, which have already been organized into chronological order: [1518-11-01 00:00] Guard #10 begins shift [1518-11-01 00:05] falls asleep [1518-11-01 00:25] wakes up [1518-11-01 00:30] falls asleep [1518-11-01 00:55] wakes up [1518-11-01 23:58] Guard #99 begins shift [1518-11-02 00:40] falls asleep [1518-11-02 00:50] wakes up [1518-11-03 00:05] Guard #10 begins shift [1518-11-03 00:24] falls asleep [1518-11-03 00:29] wakes up [1518-11-04 00:02] Guard #99 begins shift [1518-11-04 00:36] falls asleep [1518-11-04 00:46] wakes up [1518-11-05 00:03] Guard #99 begins shift [1518-11-05 00:45] falls asleep [1518-11-05 00:55] wakes up Timestamps are written using year-month-day hour:minute format. The guard falling asleep or waking up is always the one whose shift most recently started. Because all asleep/awake times are during the midnight hour (00:00 - 00:59), only the minute portion (00 - 59) is relevant for those events. Visually, these records show that the guards are asleep at these times: Date ID Minute 000000000011111111112222222222333333333344444444445555555555 012345678901234567890123456789012345678901234567890123456789 11-01 #10 .....####################.....#########################..... 11-02 #99 ........................................##########.......... 11-03 #10 ........................#####............................... 11-04 #99 ....................................##########.............. 11-05 #99 .............................................##########..... The columns are Date, which shows the month-day portion of the relevant day; ID, which shows the guard on duty that day; and Minute, which shows the minutes during which the guard was asleep within the midnight hour. (The Minute column's header shows the minute's ten's digit in the first row and the one's digit in the second row.) Awake is shown as ., and asleep is shown as #. Note that guards count as asleep on the minute they fall asleep, and they count as awake on the minute they wake up. For example, because Guard #10 wakes up at 00:25 on 1518-11-01, minute 25 is marked as awake. If you can figure out the guard most likely to be asleep at a specific time, you might be able to trick that guard into working tonight so you can have the best chance of sneaking in. You have two strategies for choosing the best guard/minute combination. Strategy 1: Find the guard that has the most minutes asleep. What minute does that guard spend asleep the most? In the example above, Guard #10 spent the most minutes asleep, a total of 50 minutes (20+25+5), while Guard #99 only slept for a total of 30 minutes (10+10+10). Guard #10 was asleep most during minute 24 (on two days, whereas any other minute the guard was asleep was only seen on one day). While this example listed the entries in chronological order, your entries are in the order you found them. You'll need to organize them before they can be analyzed. What is the ID of the guard you chose multiplied by the minute you chose? */ fn main() -> Result<(), std::io::Error> { let file = include_str!("../input"); let mut events: Vec<Event> = Vec::new(); for line in file.lines() { let (_, event) = guard_event(CompleteStr(line)).unwrap(); events.push(event); } events.sort(); let mut minutes_asleep: HashMap<u32, Vec<(u32, u32)>> = HashMap::new(); let mut id: u32 = 0; let mut start_sleep: Option<u32> = None; for event in events { match event.kind { EventType::StartsShift(guard_id) => { id = guard_id; start_sleep = None; } EventType::FallsAsleep => { start_sleep = Some(event.time.minute()); } EventType::WakesUp => { let list = minutes_asleep.entry(id).or_insert_with(Vec::new); list.push((start_sleep.unwrap(), event.time.minute())); } } } let mut most_minutes: u32 = 0; for (guard_id, list) in minutes_asleep.iter() { let sum: u32 = list.iter().fold(0, |mut sum, &val| { sum += val.1 - val.0; sum }); if sum > most_minutes { id = *guard_id; most_minutes = sum; } } println!("Id: {}", id); println!("Minutes: {}", most_minutes); let mut mins = [0u32; 60]; let list = &minutes_asleep[&id]; for range in list.iter() { for min in (*range).0..(*range).1 { mins[min as usize] += 1; } } let max: u32 = *mins.iter().max().unwrap(); let max_index: u32 = mins.iter().position(|&x| x == max).unwrap() as u32; println!("Minute: {}({})", max_index, max); println!("Answer: {}", id * max_index); Ok(()) }
#![allow(dead_code, unused_must_use, unused_imports, unstable)] extern crate rustc_serialize; extern crate log; // Old imports, refine last use utils; use std::fmt; use std::str; use std::string; use std::ops::Drop; use self::rustc_serialize::base64::{STANDARD, FromBase64, ToBase64}; // New imports for Rust 1.0 use std::path::Path; use std::io::{BufWriter, BufReader}; use std::fs::{File, PathExt, remove_file}; /// A result type that's specfici to the Reader module. /// TODO Decide if this is necessary pub type ReaderResult<T, E> = Result<T, E>; /// Reader struct of its basic properties. pub struct Reader<'a> { /// Path to file where the Reader is created. path: &'a Path, /// BufferedReader for reading the file. Initialized with the Path. read_buffer: BufReader<File>, /// BufferedWriter for writing to the file. Initialized with the Path. write_buffer: BufWriter<File>, /// Index counter to know how many records exist. id_count: u64, } /// ReaderFile traits pub trait ReaderFile { // Opens a new File to the Path provided. // Returns a boxed File. fn open(&self) -> File; // Inserts a string to the database. fn insert_string(&mut self, String); } impl fmt::Debug for Reader { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "Reader: ( path: {} )", self.path.display()) } } impl ToString for Reader { fn to_string(&self) -> String { format!("{:?}", self) } } impl Reader { /// Creates a new Reader from the Path provided. /// Opens a new BufReader and BufWriter (with Append mode) to the file. /// If the file doesn't exist, it is created. // TODO: create a .lock file to let other readers know the database is in use (see: #2). pub fn new<P: AsRef<Path>>(path: P) -> Reader { Reader::file_lock_create(path); // if file_lock exists, panic and crash with appropriate error. // Error: A .lock file already exists, if this is from a previous session, consider // deleting the .lock file. // Check if file exists or not. If not, create it. if path.is_file() { let mut file = File::create(&path); file.write_all(b"0"); file.flush(); } // Create a buffer_writer and buffer_reader. let buffer_reader = BufReader::new(&path); let buffer_writer = BufWriter::new(&path); // Get the current_record count or set it to zero. See old_controller for logic. let mut record_count_string; buffer_reader.read_line(record_count_string); //let record_count = record_count_string.trim().parse::<u64>(); Reader { path: path.clone(), read_buffer: buffer_reader, write_buffer: buffer_writer, //id_count: record_count, id_count: 0, } } /// This is a helder function that realistically shouldn't exist in production. /// Used primarily for "spilling" the entire database file into a Vec<String> fn spill(&mut self) -> Vec<String> { let mut result: Vec<String> = Vec::new(); let mut buffer_reader = BufReader::new(File::open(&self.path.clone())); for line_iter in buffer_reader.lines() { result.push(line_iter.unwrap().trim().to_string()); } return result; } /// Inserts a &str into the database. fn insert_str(&mut self, item: &str) { self.write_buffer.write_line(item); self.write_buffer.flush(); self.id_count = self.id_count + 1; self.update_counter(self.id_count); } /// Inserts a byte array into the database. fn insert(&mut self, item: &[u8]) { self.write_buffer.write(item); self.write_buffer.flush(); self.id_count = self.id_count + 1; self.update_counter(self.id_count); } /// Read a &str from the database fn read_line(&mut self) -> String { match self.read_buffer.read_line() { Ok(string) => { string.to_string() }, Err(..) => { error!("Unable to read next line. BufReader error."); "".to_string() }, } } /// Creates a .lock file to let other processes know that the database is in use. /// This is still unfinished and should be considered broken. fn file_lock_create(lockpath: &Path) -> Path { // Check if lock path exists, if not return false and create a new lock file if lockpath.exists() { return lockpath } // Create a filename.lock } /// Removes .lock file when the reader process is completed. fn file_lock_remove(&self, filelock: &Path) -> bool { try!(remove_file(&filelock.clone())); filelock.exists() } /// Updates database counter on disk. fn update_counter(&self, value: u64) { // Get file to open for writing let mut buffer_writer = BufWriter::new(self.file); buffer_writer.write_line(value.to_string().as_slice()); buffer_writer.flush(); } } impl Drop for Reader { fn drop(&mut self) { // Remove the lock file. } } impl ReaderFile for Reader { fn open(&self) -> File { // Open file } fn insert_string(&mut self, item: String) { self.insert_str(&*item); } } #[cfg(test)] mod test { use super::*; #[test] fn test_open_file() { let reader = Reader::new(&Path::new("tests/base-test.txt")); } #[test] fn test_create_file() { use std::rand; let mut path_str = String::from_str("tests/"); path_str.push_str(&*rand::random::<usize>().to_string()); path_str.push_str(".txt"); let (tempdir, apath) = setup(); let path = tempdir.path().join(rand::random::<usize>().to_string()); assert!(!path.exists()); let reader = Reader::new(&path.clone()); assert!(path.exists()); fs::unlink(&path); } #[test] fn test_read_file() { // We should output the entire contents of the database file we open // into standard output. let (tempdir, path) = setup(); let mut reader = Reader::new(&path); let expected = vec!["2".to_string(), "10 11".to_string(), "20 21".to_string()]; assert_eq!(expected, reader.spill()); } #[test] fn test_write_string_to_file() { let (tempdir, path) = setup(); let mut reader = Reader::new(&path); let expected = vec!["3".to_string(), "10 11".to_string(), "20 21".to_string(), "30 31".to_string()]; reader.insert_string("30 31".to_string()); assert_eq![expected, reader.spill()]; } #[test] fn test_write_str_to_file() { let (tempdir, path) = setup(); let mut reader = Reader::new(&path); let expected = vec!["3".to_string(), "10 11".to_string(), "20 21".to_string(), "30 31".to_string()]; reader.insert_str("30 31"); assert_eq![expected, reader.spill()]; } #[test] fn test_file_path_lock() { let (tempdir, path) = setup(); let mut expected = path.clone(); expected.pop(); // Surely, there's a less ugly way to take the filename of a Path and convert it to a string?! let mut filename_lock: String = str::from_utf8(path.filename().unwrap()).unwrap().to_string(); filename_lock.push_str(".lock"); expected = expected.join(filename_lock); let reader = Reader::new(&expected.clone()); assert!(expected.exists() && expected.is_file()); } #[test] fn test_reader_show() { let reader: Reader = Reader::new(&Path::new("tests/file.txt")); assert_eq!("Reader: ( path: tests/file.txt )", reader.to_string()); } /// Test setup code. Current functions: /// - Create a new file with `TempDir` and a random name. /// - Write a 2x2 matrix of records into the base-test.txt file /// - Returns a tuple of `TempDir` and `Path` to the file. /// - The path is for r/w access and `TempDir` is so that the directory /// isn't deleted before the test is completed. #[allow(dead_code, unused_must_use)] fn setup() -> (TempDir, Path) { use std::rand; let tmpdir = match TempDir::new("txtdb-tests") { Ok(dir) => dir, Err(..) => panic!("Cannot create test directory. Tests will fail."), }; let final_dir = tmpdir.path().join(rand::random::<usize>().to_string()); let mut file = File::create(&final_dir.clone()); file.write_str("2\n10 11\n20 21\n"); (tmpdir, final_dir) } }
use rayon::prelude::*; use std::collections::HashSet; use std::fs::DirEntry; use std::fs::Metadata; use std::io; use std::path::Path; use std::sync::Mutex; use rust_decimal::Decimal; use rust_decimal::prelude::FromPrimitive; use rust_decimal::prelude::Zero; use rust_decimal::prelude::One; use nom::is_digit; use nom::types::CompleteByteSlice; extern crate rayon; pub type OutputSize = Decimal; pub struct ShardedSet { _internal: [Mutex<HashSet<u64>>; 8], } // unsafe impl Sync for ShardedMap {} impl ShardedSet { fn insert(&self, val: u64) -> bool { self._internal[(val % 8) as usize] .lock() .unwrap() .insert(val) } pub fn new() -> ShardedSet { ShardedSet { _internal: [ Mutex::new(HashSet::new()), Mutex::new(HashSet::new()), Mutex::new(HashSet::new()), Mutex::new(HashSet::new()), Mutex::new(HashSet::new()), Mutex::new(HashSet::new()), Mutex::new(HashSet::new()), Mutex::new(HashSet::new()), ], } } } fn calculate_size<SizeReader>( config: &Config<SizeReader>, depth: u64, dir: &Path, terminating_char: char, record: &ShardedSet, ) -> OutputSize where SizeReader: Fn(&Metadata) -> OutputSize + Sync + Send, { let metadata = if config.follow_symlink { dir.metadata() } else { dir.symlink_metadata() }; match metadata { Ok(ref metadata) => { if should_skip(&metadata, &record) { Decimal::new(0, 0) } else { let file_size = (config.size_reader)(metadata); if metadata.is_dir() { let size: OutputSize = dir .read_dir() .unwrap() .collect::<Vec<_>>() .par_chunks(8) .map(|e: &[io::Result<DirEntry>]| { e.into_iter() .map(|e| match &e { Ok(p) => calculate_size( config, depth + 1, &p.path(), terminating_char, record.clone(), ), _ => unimplemented!(), }) .sum::<OutputSize>() }) .sum::<OutputSize>() + file_size; if depth <= config.max_depth { print!( "{}\t{}{}", config.convert_size(size), dir.to_str().unwrap(), terminating_char ); } size } else { if config.display_files && depth <= config.max_depth { print!( "{}\t{}{}", config.convert_size(file_size as OutputSize), dir.to_str().unwrap(), terminating_char ); } file_size } } } Err(e) => { println!("{:?} at {}", e, dir.to_str().unwrap()); Decimal::new(0, 0) } } } trait SizeConverterType = Fn(OutputSize) -> String + Sync + Send; pub struct Config<SizeReader> where SizeReader: Fn(&Metadata) -> OutputSize + Sync + Send, { pub display_files: bool, pub max_depth: u64, pub follow_symlink: bool, pub block_size: OutputSize, pub size_reader: SizeReader, pub human_readable: bool, } const SIZE_CHARS: [char; 9] = [std::char::MAX, 'K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y']; fn byte_size() -> Decimal { Decimal::from_parts(1024, 0, 0, false, 0) } impl<SizeReader> Config<SizeReader> where SizeReader: Fn(&Metadata) -> OutputSize + Sync + Send, { pub fn convert_human_readable(&self, size: OutputSize) -> String { let mut output = size; // human readable are always in 1024 blocks let mut iterations = 0; while output >= byte_size() && SIZE_CHARS.len() >= iterations { iterations += 1; output /= byte_size(); } let str_part = if output.fract().is_zero() { format!("{:.0}", output) } else { format!("{:.1}", output.round_dp(1)) }; if SIZE_CHARS[iterations] == std::char::MAX { format!("{}", str_part) } else { format!("{}{}", str_part, SIZE_CHARS[iterations]) } } pub fn convert_size(&self, size: OutputSize) -> String { if self.human_readable { // convert with human readable return self.convert_human_readable(size) } let mut k = size / self.block_size; k += if (size % self.block_size).is_zero() { Decimal::zero() } else { Decimal::one() }; // k += if size - k * block_size > 0 { 1 } else { 0 }; k.to_string() } } pub fn unsigned_numeric(v: String) -> Result<(), String> { if let Err(_) = v.parse::<u64>() { Err(String::from("Value has to be a number and >= 0")) } else { Ok(()) } } #[derive(PartialEq, Eq, Debug)] pub struct BlockSize(u64, usize, usize); // block_size_multiplier, block_size_power, block_size pub fn block_size_builder(block_size: BlockSize) -> OutputSize { let BlockSize(multiplier, power, block_size) = block_size; let multiplier: OutputSize = Decimal::from_u64(multiplier).unwrap(); let mut block_size = Decimal::from_u32(block_size as u32).unwrap(); // no checked_pow for decimal, argh... for _ in 0..power { block_size *= block_size } block_size * multiplier } named!(block_size_parser<CompleteByteSlice, (Option<CompleteByteSlice>, Option<char>, Option<char>)>, do_parse!( numeric: opt!(complete!(take_while1!( is_digit ))) >> unit: opt!( complete!(one_of!("KMGTPEZY")) ) >> unit2: opt!( complete!(one_of!("B")) ) >> ((numeric, unit, unit2)) ) ); pub fn block_size(input: &[u8]) -> BlockSize { let result = block_size_parser(CompleteByteSlice(input)).unwrap().1; BlockSize( result.0.map_or(1u64, |u| { std::str::from_utf8(*u).unwrap().parse::<u64>().unwrap() }), match result.1 { Some('K') => 1, Some('M') => 2, Some('G') => 3, Some('T') => 4, Some('P') => 5, Some('E') => 6, Some('Z') => 7, Some('Y') => 8, None => 1, _ => unreachable!(), }, match result.2 { Some('B') => 1000, None => 1024, _ => unreachable!(), }, ) } #[cfg(test)] mod tests { use super::*; #[test] fn test_block_size_reader() { assert_eq!(block_size("123KB".as_bytes()), BlockSize(123, 1, 1000)); assert_eq!(block_size("KB".as_bytes()), BlockSize(1, 1, 1000)); assert_eq!(block_size("".as_bytes()), BlockSize(1, 1, 1024)); assert_eq!(block_size("1".as_bytes()), BlockSize(1, 1, 1024)); assert_eq!(block_size("M".as_bytes()), BlockSize(1, 2, 1024)); } } #[cfg(target_os = "macos")] fn should_skip(metadata: &Metadata, record: &ShardedSet) -> bool { use std::os::unix::fs::MetadataExt; !record.insert(metadata.ino()) } #[cfg(target_os = "linux")] fn should_skip(metadata: &Metadata, record: &ShardedSet) -> bool { use std::os::linux::fs::MetadataExt; !record.insert(metadata.st_ino()) } #[cfg(target_os = "macos")] pub fn size_block_reader(metadata: &Metadata) -> OutputSize { use std::os::unix::fs::MetadataExt; Decimal::from_u64(metadata.blocks()).unwrap() * Decimal::new(512, 0) } #[cfg(target_os = "macos")] pub fn apparent_size_reader(metadata: &Metadata) -> OutputSize { use std::os::unix::fs::MetadataExt; Decimal::from_u64(metadata.size()).unwrap() } #[cfg(target_os = "linux")] pub fn size_block_reader(metadata: &Metadata) -> OutputSize { use std::os::linux::fs::MetadataExt; Decimal::from_u64(metadata.st_blocks()).unwrap() * Decimal::new(512, 0) } #[cfg(target_os = "linux")] pub fn apparent_size_reader(metadata: &Metadata) -> OutputSize { use std::os::linux::fs::MetadataExt; Decimal::from_u64(metadata.st_size()).unwrap() } pub fn execute<SizeReader>( paths: &Vec<&Path>, config: &Config<SizeReader>, terminating_char: char, record: &ShardedSet, ) -> OutputSize where SizeReader: Fn(&Metadata) -> OutputSize + Sync + Send, { paths .into_par_iter() .map(|p| calculate_size(&config, 0, p, terminating_char, &record)) .sum() }
use crate::Guid; use core::{ prefab::{Prefab, PrefabValue}, Scalar, }; use serde::{Deserialize, Serialize}; use std::collections::HashMap; #[derive(Debug, Clone, Hash, PartialEq, Eq, Serialize, Deserialize)] pub enum Reference { None, Named(String), Guid(Guid), } impl Default for Reference { fn default() -> Self { Self::None } } #[derive(Debug, Default, Clone, Serialize, Deserialize)] pub struct Program { pub version: usize, #[serde(default)] pub name: String, #[serde(default)] pub types: Vec<Type>, #[serde(default)] pub traits: Vec<Trait>, #[serde(default)] pub functions: Vec<Function>, #[serde(default)] pub events: Vec<Event>, #[serde(default)] pub variables: Vec<Variable>, #[serde(default)] pub operations: Vec<Operation>, } impl Prefab for Program {} #[derive(Debug, Default, Clone, Serialize, Deserialize)] pub struct Event { pub name: String, #[serde(default)] pub input_constrains: Vec<TypeConstraint>, #[serde(default)] pub output_constrains: Vec<TypeConstraint>, #[serde(default)] pub variables: Vec<Variable>, pub nodes: Vec<Node>, } impl Prefab for Event {} #[derive(Debug, Default, Clone, Serialize, Deserialize)] pub struct Variable { pub name: String, pub type_name: String, } impl Prefab for Variable {} #[derive(Debug, Default, Clone, Serialize, Deserialize)] pub struct Type { pub name: String, #[serde(default)] pub fields: Vec<Field>, #[serde(default)] pub traits_implementation: HashMap<String, Vec<Method>>, #[serde(default)] pub export: bool, } impl Prefab for Type {} #[derive(Debug, Default, Clone, Serialize, Deserialize)] pub struct Field { pub name: String, pub type_name: String, #[serde(default)] pub public: bool, } impl Prefab for Field {} #[derive(Debug, Default, Clone, Serialize, Deserialize)] pub struct Trait { pub name: String, #[serde(default)] pub methods: Vec<Method>, #[serde(default)] pub export: bool, } impl Prefab for Trait {} #[derive(Debug, Default, Clone, Serialize, Deserialize)] pub struct Method { pub name: String, pub owner_trait: Reference, #[serde(default)] pub input_constrains: Vec<TypeConstraint>, #[serde(default)] pub output_constrains: Vec<TypeConstraint>, #[serde(default)] pub variables: Vec<Variable>, #[serde(default)] pub associated: bool, #[serde(default)] pub nodes: Vec<Node>, #[serde(default)] pub public: bool, #[serde(default)] pub help: String, } impl Prefab for Method {} #[derive(Debug, Clone, Serialize, Deserialize)] pub enum TypeConstraint { Any, Type(Reference), ImplementTraits(Vec<Reference>), Node, } impl Prefab for TypeConstraint {} impl Default for TypeConstraint { fn default() -> Self { Self::Any } } #[derive(Debug, Default, Clone, Serialize, Deserialize)] pub struct Operation { pub name: String, #[serde(default)] pub input_constrains: Vec<TypeConstraint>, #[serde(default)] pub output_constrains: Vec<TypeConstraint>, #[serde(default)] pub help: String, } impl Prefab for Operation {} #[derive(Debug, Default, Clone, Serialize, Deserialize)] pub struct Function { pub name: String, #[serde(default)] pub input_constrains: Vec<TypeConstraint>, #[serde(default)] pub output_constrains: Vec<TypeConstraint>, #[serde(default)] pub variables: Vec<Variable>, pub nodes: Vec<Node>, #[serde(default)] pub help: String, } impl Prefab for Function {} #[derive(Debug, Clone, Serialize, Deserialize)] pub enum Link { None, NodeIndexed(Reference, usize), } impl Prefab for Link {} impl Default for Link { fn default() -> Self { Self::None } } #[derive(Debug, Default, Clone, Serialize, Deserialize)] pub struct Node { pub id: Reference, pub node_type: NodeType, #[serde(default)] pub next_node: Reference, #[serde(default)] pub input_links: Vec<Link>, #[serde(default)] pub x: Scalar, #[serde(default)] pub y: Scalar, } impl Prefab for Node {} #[derive(Debug, Default, Clone, Serialize, Deserialize)] pub struct IfElse { #[serde(default)] pub next_node_true: Reference, #[serde(default)] pub next_node_false: Reference, } impl Prefab for IfElse {} #[derive(Debug, Clone, Serialize, Deserialize)] pub enum NodeType { None, Entry, Knot, Halt, /// body entry node Loop(Reference), IfElse(IfElse), Break, Continue, GetInstance, GetGlobalVariable(String), GetLocalVariable(String), GetInput(usize), SetOutput(usize), GetValue(Value), GetListItem(usize), GetObjectItem(String), MutateValue, CallOperation(String), CallFunction(String), /// (type name, method name) CallMethod(String, String), } impl NodeType { pub fn is_entry(&self) -> bool { matches!(self, Self::Entry) } pub fn is_input_output_flow_in_out(&self) -> (bool, bool, bool, bool) { match self { Self::None => (false, false, false, false), Self::Entry => (false, false, false, true), Self::Knot => (false, false, true, true), Self::Halt => (false, false, true, true), Self::Loop(_) => (false, false, true, true), Self::IfElse(_) => (true, false, true, true), Self::Break => (false, false, true, false), Self::Continue => (false, false, true, false), Self::GetInstance => (false, true, false, true), Self::GetGlobalVariable(_) => (false, true, false, true), Self::GetLocalVariable(_) => (false, true, false, true), Self::GetInput(_) => (false, true, false, true), Self::SetOutput(_) => (true, false, true, true), Self::GetValue(_) => (false, true, false, true), Self::GetListItem(_) => (true, true, true, true), Self::GetObjectItem(_) => (true, true, true, true), Self::MutateValue => (true, false, true, true), Self::CallOperation(_) => (true, true, true, true), Self::CallFunction(_) => (true, true, true, true), Self::CallMethod(_, _) => (true, true, true, true), } } } impl Prefab for NodeType {} impl Default for NodeType { fn default() -> Self { Self::None } } #[derive(Debug, Clone, Serialize, Deserialize)] pub struct Value { pub type_name: String, pub data: PrefabValue, } impl Prefab for Value {}
#[doc = "Reader of register CTB_SW_SQ_CTRL"] pub type R = crate::R<u32, super::CTB_SW_SQ_CTRL>; #[doc = "Writer for register CTB_SW_SQ_CTRL"] pub type W = crate::W<u32, super::CTB_SW_SQ_CTRL>; #[doc = "Register CTB_SW_SQ_CTRL `reset()`'s with value 0"] impl crate::ResetValue for super::CTB_SW_SQ_CTRL { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `P2_SQ_CTRL23`"] pub type P2_SQ_CTRL23_R = crate::R<bool, bool>; #[doc = "Write proxy for field `P2_SQ_CTRL23`"] pub struct P2_SQ_CTRL23_W<'a> { w: &'a mut W, } impl<'a> P2_SQ_CTRL23_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10); self.w } } #[doc = "Reader of field `P3_SQ_CTRL23`"] pub type P3_SQ_CTRL23_R = crate::R<bool, bool>; #[doc = "Write proxy for field `P3_SQ_CTRL23`"] pub struct P3_SQ_CTRL23_W<'a> { w: &'a mut W, } impl<'a> P3_SQ_CTRL23_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11); self.w } } impl R { #[doc = "Bit 10 - for D51"] #[inline(always)] pub fn p2_sq_ctrl23(&self) -> P2_SQ_CTRL23_R { P2_SQ_CTRL23_R::new(((self.bits >> 10) & 0x01) != 0) } #[doc = "Bit 11 - for D52, D62"] #[inline(always)] pub fn p3_sq_ctrl23(&self) -> P3_SQ_CTRL23_R { P3_SQ_CTRL23_R::new(((self.bits >> 11) & 0x01) != 0) } } impl W { #[doc = "Bit 10 - for D51"] #[inline(always)] pub fn p2_sq_ctrl23(&mut self) -> P2_SQ_CTRL23_W { P2_SQ_CTRL23_W { w: self } } #[doc = "Bit 11 - for D52, D62"] #[inline(always)] pub fn p3_sq_ctrl23(&mut self) -> P3_SQ_CTRL23_W { P3_SQ_CTRL23_W { w: self } } }
#[cfg(test)] extern crate mockall; use std::collections::HashMap; #[cfg_attr(test, mockall::automock)] pub trait Shape { fn spec(&self) -> HashMap<String, u8>; fn area(&self) -> u16; } pub struct Square { pub x: u8, pub y: u8, } impl Shape for Square { fn spec<'a>(&self) -> HashMap<String, u8> { [ ("x".to_string(), self.x), ("y".to_string(), self.y), ].iter().cloned().collect() } fn area(&self) -> u16 { self.x as u16 * self.y as u16 } } #[cfg(test)] mod tests { use super::*; #[test] fn test_spec() { let x = 7; let y = 3; let mut expected : HashMap<String, u8> = HashMap::new(); expected.insert("x".to_string(), x); expected.insert("y".to_string(), y); let under_test = Square{ x:x, y:y }; let actual = under_test.spec(); assert_eq!(expected, actual); } #[test] fn test_area() { let x : u8 = 7; let y : u8 = 3; let expected : u16 = x as u16 * y as u16; let under_test = Square{ x:x, y:y }; let actual = under_test.area(); assert_eq!(expected, actual); } #[test] fn area_doesnt_overflow() { let x : u8 = 255; let y = x; let expected : u16 = 255*255; let under_test = Square{ x:x, y:y }; let actual = under_test.area(); assert_eq!(expected, actual); } }
use std::sync::Arc; use common::error::Error; use common::result::Result; use crate::domain::token::{Token, TokenService}; pub struct AuthorizationService { token_serv: Arc<TokenService>, } impl AuthorizationService { pub fn new(token_serv: Arc<TokenService>) -> Self { AuthorizationService { token_serv } } pub async fn authorize(&self, token: &Token) -> Result<String> { if let Ok(data) = self.token_serv.validate(token).await { if let Some(user_id) = data.get("user_id") { return Ok(user_id.to_string()); } } Err(Error::new("authorization", "unauthorized") .set_status(401) .set_message("User is not logged in") .build()) } } #[cfg(test)] mod tests { use super::*; use crate::domain::token::Data; use crate::mocks; #[tokio::test] async fn authorize() { let c = mocks::container(); let mut user = mocks::validated_user1(); c.user_repo().save(&mut user).await.unwrap(); let mut data = Data::new(); data.add("user_id", user.base().id().value()); let token = c.token_serv().create(data).await.unwrap(); let serv = c.authorization_serv(); let user_id = serv.authorize(&token).await.unwrap(); assert_eq!(user_id, user.base().id().to_string()); assert!(serv.authorize(&Token::new("invalid")).await.is_err()); } }
//! This crate implements the "eytzinger" (aka BFS) array layout where //! a binary search tree is stored by layer (instead of as a sorted array). //! This can have significant performance benefits //! (see [Khuong, Paul-Virak, and Pat Morin. "Array layouts for comparison-based searching."][1]). //! //! # Usage //! //! ``` //! use eytzinger::SliceExt; //! let mut data = [0, 1, 2, 3, 4, 5, 6]; //! data.eytzingerize(&mut eytzinger::permutation::InplacePermutator); //! assert_eq!(data, [3, 1, 5, 0, 2, 4, 6]); //! assert_eq!(data.eytzinger_search(&5), Some(2)); //! assert_eq!(data.eytzinger_search_by(|x| x.cmp(&6)), Some(6)); //! ``` //! //! [1]: https://arxiv.org/pdf/1509.05053.pdf #![warn(missing_docs, missing_debug_implementations)] use std::cmp::{Ord, Ordering}; use std::borrow::Borrow; use permutation::*; /// The basic building blocks this crate is made of. pub mod foundation { /// Given an array size (`n`), tree layer index (`ipk`) and element index (`li`), /// this function computes the index of this value in a sorted array. /// /// This is basically the core of this crate, everything else is trivial. /// Also, you usually want to use `PermutationGenerator` instead for convenience. /// /// # How it works /// /// This computes the magic function: /// /// ```text /// f(n, k) = 2^floor(log2(n+1)) * 2k - max(0, (1+k) * 2^floor(log2(n+1)) - (n + 1)) - 1 /// (where n ∈ ℕ, k ∈ [0, 1]) /// ``` /// /// Because this is integer math: `k = zk * 2^-ipk` /// And because we only care about certain values of zk: `zk = li * 2 + 1` /// /// Even though I discovered this on my own and am quite certain that it's correct, /// I only have a very vague feeling about **why** it works. If you want to understand this /// (you really don't!), have a look at this sequence: /// /// ```text /// a_n = (2n - 2^floor(log2(2n)) + 1) / 2^floor(log2(2n)) /// (1/2, 1/4, 3/4, 1/8, 3/8, 5/8, 7/8, 1/16, 3/16, 5/16, 7/16, 9/16, 11/16, 13/16, 15/16, ...) /// ``` /// /// The basic idea is that this sequence basically establishes a mapping between a sorted array /// and an eytzinger array: If you look at the plot sideways (literally), you can see the tree /// with all its layers. The only secret sauce here is `2^floor(log2(x))` which is elementary to /// get exponentially growing windows (to build tree layers). #[inline] pub fn get_permutation_element_by_node(n: usize, ipk: usize, li: usize) -> usize { let zk = li * 2 + 1; // k = zk * 2^-ipk let last_power_of_two = (n + 2).next_power_of_two() / 2; let y = (last_power_of_two >> (ipk - 1)) * zk; let kp = y >> 1; let x = kp + last_power_of_two; // (1+k) * last_power_of_two let x = x.saturating_sub(n + 1); //println!("n={} x={} y={} z={} kp={} lpot={}", n, x,y,z, kp, last_power_of_two); y - x - 1 } /// Converts an index in an eytzinger array to the corresponding tree coordinates `(ipk, li)`. #[inline] pub fn index_to_node(i: usize) -> (usize, usize) { let ipk = (i + 2).next_power_of_two().trailing_zeros() as usize; let li = i + 1 - (1 << (ipk - 1)); (ipk, li) } /// Given an array size (`n`) and an index into the eytzinger array (`ì`), /// this function computes the index of this value in a sorted array. /// /// This is simply `index_to_node` + `get_permutation_element_by_node`. #[inline] pub fn get_permutation_element(n: usize, i: usize) -> usize { let (ipk, li) = index_to_node(i); get_permutation_element_by_node(n, ipk, li) } } /// Abstractions around applying generic permutations using generic implementations. /// /// You should pick one that matches your use case. pub mod permutation { use std::iter::{Cloned, Enumerate}; use std::slice::Iter; /// A generic permutation. pub trait Permutation { /// An iterator through the permutation. /// This may be more efficient than indexing a counter. type Iter: Iterator<Item=usize>; /// Get an iterator. fn iterable(&self) -> Self::Iter; /// Index into this permutation. fn index(&self, i: usize) -> usize; } impl<'a> Permutation for &'a [usize] { type Iter = Cloned<Iter<'a, usize>>; #[inline] fn iterable(&self) -> Self::Iter { self.iter().cloned() } #[inline] fn index(&self, i: usize) -> usize { self[i] } } /// A generic permutator. pub trait Permutator<T, P: ?Sized + Permutation> { /// Applies the given permutation to the given array. fn permute(&mut self, data: &mut [T], permutation: &P); } /// Simple permutator that does not allocate. /// /// Worst-case runtime is in `O(n^2)`, so you should only use this for small permutations. #[derive(Clone, Copy, Debug, Default)] pub struct InplacePermutator; impl<T, P: ?Sized + Permutation> Permutator<T, P> for InplacePermutator { #[inline] fn permute(&mut self, data: &mut [T], permutation: &P) { for (i, mut p) in permutation.iterable().enumerate() { while p < i { p = permutation.index(p); } if p > i { data.swap(i, p); } } } } /// Simple permutator that stack-allocates a copy of the data (using recursion). /// /// Worst-case runtime is `O(n)`, but this takes `O(n)` stack space so it WILL NOT work for large permutations. #[derive(Clone, Copy, Debug, Default)] pub struct StackCopyPermutator; fn recursive_permute<T: Clone, P: ?Sized + Permutation>(data: &mut [T], permutation: &mut Enumerate<P::Iter>) { if let Some((i, p)) = permutation.next() { let item = data[p].clone(); recursive_permute::<T, P>(data, permutation); data[i] = item; } } impl<T: Clone, P: ?Sized + Permutation> Permutator<T, P> for StackCopyPermutator { #[inline] fn permute(&mut self, data: &mut [T], permutation: &P) { let mut iter = permutation.iterable().enumerate(); recursive_permute::<T, P>(data, &mut iter); } } /// Simple permutator that heap-allocates a copy of the data. /// /// Worst-case runtime is `O(n)`, taking `O(n)` heap space in a reusable buffer. /// This is an acceptable permutator for large permutations, provided that the data /// is (efficiently) cloneable. #[derive(Debug, Default)] pub struct HeapCopyPermutator<T> { buffer: Vec<T>, } impl<T: Clone, P: ?Sized + Permutation> Permutator<T, P> for HeapCopyPermutator<T> { #[inline] fn permute(&mut self, data: &mut [T], permutation: &P) { self.buffer.clear(); self.buffer.extend(permutation.iterable().map(|i| data[i].clone())); for (i, t) in self.buffer.drain(..).enumerate() { data[i] = t; } } } /// Permutator that uses an auxiliary heap buffer to ensure linear runtime. /// /// Worst-case runtime is `O(n)`, taking `O(n)` heap space in a reusable buffer. /// The buffer we allocate uses exactly `sizeof(usize) * data.len()` bytes. /// This is a decent permutator for large permutations. #[derive(Debug, Default)] #[cfg(feature = "heap-permutator")] pub struct HeapPermutator { buffer: Vec<Option<nonmax::NonMaxUsize>>, } #[cfg(feature = "heap-permutator")] impl<T, P: ?Sized + Permutation> Permutator<T, P> for HeapPermutator { #[inline] fn permute(&mut self, data: &mut [T], permutation: &P) { use std::convert::TryInto; self.buffer.clear(); self.buffer.resize(data.len(), None); for mut i in 0..data.len() { let mut j = permutation.index(i); if j < i { j = self.buffer[j].take().unwrap().get(); } data.swap(i, j); if let Some(x) = self.buffer[i].take() { i = x.get(); } if j != i { self.buffer[j] = Some(i.try_into().unwrap()); self.buffer[i] = Some(j.try_into().unwrap()); } } } } /// Permutator that uses an auxiliary heap buffer to ensure linear runtime. /// /// Worst-case runtime is `O(n)`, worst-case memory usage is also `O(n)`. /// The difference to `HeapPermutator` is that this implementation uses a `HashMap` /// instead of a `Vec` for storage. Hence, the exact the memory usage can no longer be /// predicted - but in return, it should be lower in most cases. /// This is a decent permutator for large permutations. #[derive(Debug, Default)] #[cfg(feature = "heap-permutator-sparse")] pub struct SparseHeapPermutator { buffer: std::collections::HashMap<usize, usize, nohash_hasher::BuildNoHashHasher<usize>>, } #[cfg(feature = "heap-permutator-sparse")] impl<T, P: ?Sized + Permutation> Permutator<T, P> for SparseHeapPermutator { #[inline] fn permute(&mut self, data: &mut [T], permutation: &P) { self.buffer.clear(); for mut i in 0..data.len() { let mut j = permutation.index(i); if j < i { j = self.buffer.remove(&j).unwrap(); } data.swap(i, j); if let Some(x) = self.buffer.remove(&i) { i = x; } if j != i { self.buffer.insert(j, i); self.buffer.insert(i, j); } } } } } /// Generates a permutation that transforms a sorted array into an eytzinger array. /// /// This is an iterator which yields a permutation (indexes into the sorted array) /// in the order of an eytzinger array. #[derive(Clone, Debug)] pub struct PermutationGenerator { size: usize, ipk: usize, li: usize, } impl PermutationGenerator { /// Generate a new permutation for a sorted array of a given size. #[inline] pub fn new(size: usize) -> PermutationGenerator { PermutationGenerator { size, ipk: 1, li: 0, } } } impl Iterator for PermutationGenerator { type Item = usize; #[inline] fn next(&mut self) -> Option<usize> { let k2 = 1 << (self.ipk - 1); if k2 + self.li - 1 >= self.size { return None; } if self.li >= k2 { self.li = 0; self.ipk += 1; } let li = self.li; self.li += 1; Some(foundation::get_permutation_element_by_node(self.size, self.ipk, li)) } #[inline] fn size_hint(&self) -> (usize, Option<usize>) { let k2 = 1 << (self.ipk - 1); let size = self.size - (k2 + self.li - 1); (size, Some(size)) } } impl ExactSizeIterator for PermutationGenerator {} impl Permutation for PermutationGenerator { type Iter = PermutationGenerator; #[inline] fn iterable(&self) -> PermutationGenerator { self.clone() } #[inline] fn index(&self, i: usize) -> usize { foundation::get_permutation_element(self.size, i) } } /// Converts a sorted array to its eytzinger representation. /// /// # Example /// /// ``` /// let mut data = [0, 1, 2, 3, 4, 5, 6]; /// eytzinger::eytzingerize(&mut data, &mut eytzinger::permutation::InplacePermutator); /// assert_eq!(data, [3, 1, 5, 0, 2, 4, 6]); /// ``` #[inline] pub fn eytzingerize<T, P: Permutator<T, PermutationGenerator>>(data: &mut [T], permutator: &mut P) { let len = data.len(); permutator.permute(data, &PermutationGenerator::new(len)) } /// Eytzinger extension methods for slices. pub trait SliceExt<T> { /// Converts an already sorted array to its eytzinger representation. /// /// # Example /// /// ``` /// use eytzinger::SliceExt; /// let mut data = [0, 1, 2, 3, 4, 5, 6]; /// data.eytzingerize(&mut eytzinger::permutation::InplacePermutator); /// assert_eq!(data, [3, 1, 5, 0, 2, 4, 6]); /// ``` fn eytzingerize<P: Permutator<T, PermutationGenerator>>(&mut self, permutator: &mut P); /// Binary searches this eytzinger slice for a given element. /// /// If the value is found then `Some` is returned, containing the index of the matching element; /// if the value is not found then `None` is returned. /// /// # Example /// /// ``` /// use eytzinger::SliceExt; /// let s = [3, 1, 5, 0, 2, 4, 6]; /// assert_eq!(s.eytzinger_search(&5), Some(2)); /// assert_eq!(s.eytzinger_search(&6), Some(6)); /// assert_eq!(s.eytzinger_search(&7), None); /// ``` fn eytzinger_search<Q: ?Sized>(&self, x: &Q) -> Option<usize> where Q: Ord, T: Borrow<Q>; /// Binary searches this eytzinger slice with a comparator function. /// /// The comparator function should implement an order consistent with the sort order /// of the underlying eytzinger slice, returning an order code that indicates whether /// its argument is `Less`, `Equal` or `Greater` than the desired target. /// /// If a matching value is found then `Some` is returned, containing the index of the /// matching element; if no match is found then `None` is returned. /// /// # Examples /// /// ``` /// use eytzinger::SliceExt; /// let s = [3, 1, 5, 0, 2, 4, 6]; /// assert_eq!(s.eytzinger_search_by(|x| x.cmp(&5)), Some(2)); /// assert_eq!(s.eytzinger_search_by(|x| x.cmp(&6)), Some(6)); /// assert_eq!(s.eytzinger_search_by(|x| x.cmp(&7)), None); /// ``` fn eytzinger_search_by<'a, F>(&'a self, f: F) -> Option<usize> where F: FnMut(&'a T) -> Ordering, T: 'a; /// Binary searches this sorted slice with a key extraction function. /// /// Assumes that the slice is eytzinger-sorted by the key, for instance with /// `slice::sort_by_key` combined with `eytzinger::eytzingerize` using the /// same key extraction function. /// /// If a matching value is found then `Some` is returned, containing the index of the /// matching element; if no match is found then `None` is returned. /// /// # Examples /// /// ``` /// use eytzinger::SliceExt; /// let s = [(3, 'd'), (1, 'b'), (5, 'f'), (0, 'a'), (2, 'c'), (4, 'e'), (6, 'g')]; /// assert_eq!(s.eytzinger_search_by_key(&'f', |&(_, b)| b), Some(2)); /// assert_eq!(s.eytzinger_search_by_key(&'g', |&(_, b)| b), Some(6)); /// assert_eq!(s.eytzinger_search_by_key(&'x', |&(_, b)| b), None); /// ``` fn eytzinger_search_by_key<'a, B, F, Q: ?Sized>(&'a self, b: &Q, f: F) -> Option<usize> where B: Borrow<Q>, F: FnMut(&'a T) -> B, Q: Ord, T: 'a; } /// Binary searches this eytzinger slice with a comparator function. /// /// The comparator function should implement an order consistent with the sort order /// of the underlying eytzinger slice, returning an order code that indicates whether /// its argument is `Less`, `Equal` or `Greater` than the desired target. /// /// If a matching value is found then `Some` is returned, containing the index of the /// matching element; if no match is found then `None` is returned. /// /// # Examples /// /// ``` /// use eytzinger::eytzinger_search_by; /// let s = [3, 1, 5, 0, 2, 4, 6]; /// assert_eq!(eytzinger_search_by(&s, |x| x.cmp(&3)), Some(0)); /// assert_eq!(eytzinger_search_by(&s, |x| x.cmp(&5)), Some(2)); /// assert_eq!(eytzinger_search_by(&s, |x| x.cmp(&6)), Some(6)); /// assert_eq!(eytzinger_search_by(&s, |x| x.cmp(&7)), None); /// ``` #[inline] pub fn eytzinger_search_by<'a, T: 'a, F>(data: &'a [T], f: F) -> Option<usize> where F: FnMut(&'a T) -> Ordering { eytzinger_search_by_impl(data, f) } #[inline] #[cfg(not(feature = "branchless"))] fn eytzinger_search_by_impl<'a, T: 'a, F>(data: &'a [T], mut f: F) -> Option<usize> where F: FnMut(&'a T) -> Ordering { let mut i = 0; loop { match data.get(i) { Some(ref v) => { match f(v) { Ordering::Equal => return Some(i), o => { // I was hoping the optimizer could handle this but it can't // So here goes the evil hack: Ordering is -1/0/1 // So we use this dirty trick to map this to +2/X/+1 let o = o as usize; let o = (o >> 1) & 1; i = 2 * i + 1 + o; } }; } None => return None, } } } #[inline] #[cfg(feature = "branchless")] fn eytzinger_search_by_impl<'a, T: 'a, F>(data: &'a [T], mut f: F) -> Option<usize> where F: FnMut(&'a T) -> Ordering { let mut i = 0; while i < data.len() { let v = &data[i]; // this range check is optimized out :D i = match f(v) { Ordering::Greater | Ordering::Equal => 2 * i + 1, Ordering::Less => 2 * i + 2, }; } // magic from the paper to fix up the (incomplete) final tree layer // (only difference is that we recheck f() because this is exact search) let p = i + 1; let j = p >> (1 + (!p).trailing_zeros()); if j != 0 && (f(&data[j - 1]) == Ordering::Equal) { Some(j - 1) } else { None } } impl<T> SliceExt<T> for [T] { #[inline] fn eytzingerize<P: Permutator<T, PermutationGenerator>>(&mut self, permutator: &mut P) { eytzingerize(self, permutator) } #[inline] fn eytzinger_search<Q: ?Sized>(&self, x: &Q) -> Option<usize> where Q: Ord, T: Borrow<Q> { self.eytzinger_search_by(|e| e.borrow().cmp(x)) } #[inline] fn eytzinger_search_by<'a, F>(&'a self, f: F) -> Option<usize> where F: FnMut(&'a T) -> Ordering, T: 'a { eytzinger_search_by(self, f) } #[inline] fn eytzinger_search_by_key<'a, B, F, Q: ?Sized>(&'a self, b: &Q, mut f: F) -> Option<usize> where B: Borrow<Q>, F: FnMut(&'a T) -> B, Q: Ord, T: 'a { self.eytzinger_search_by(|k| f(k).borrow().cmp(b)) } } #[cfg(test)] #[macro_use] extern crate quickcheck; #[cfg(test)] mod tests { use super::*; use super::foundation::*; #[test] fn magic() { for (i, &v) in [0, 1, 1, 2, 3, 3, 3, 4, 5, 6, 7, 7, 7, 7, 7, 8].iter().enumerate() { assert_eq!(get_permutation_element_by_node(i + 1, 1, 0), v); } for (i, &v) in [0, 0, 1, 1, 1, 1, 2, 3, 3].iter().enumerate() { assert_eq!(get_permutation_element_by_node(i + 2, 2, 0), v); } for (i, &v) in [2, 3, 4, 5, 5, 6, 7, 8].iter().enumerate() { assert_eq!(get_permutation_element_by_node(i + 3, 2, 1), v); } for (i, &v) in [0, 0, 0, 0, 1, 1, 1].iter().enumerate() { assert_eq!(get_permutation_element_by_node(i + 4, 3, 0), v); } } const REF_PERMUTATIONS: &[&'static [usize]] = &[ &[], &[0], &[1, 0], &[1, 0, 2], &[2, 1, 3, 0], &[3, 1, 4, 0, 2], &[3, 1, 5, 0, 2, 4], &[3, 1, 5, 0, 2, 4, 6], &[4, 2, 6, 1, 3, 5, 7, 0], &[5, 3, 7, 1, 4, 6, 8, 0, 2], &[6, 3, 8, 1, 5, 7, 9, 0, 2, 4], &[7, 3, 0xb, 1, 5, 9, 0xd, 0, 2, 4, 6, 8, 0xa, 0xc, 0xe], ]; #[test] fn reference_permutations() { for &array in REF_PERMUTATIONS { let permut: Vec<_> = PermutationGenerator::new(array.len()).collect(); assert_eq!(array, permut.as_slice()); } } #[test] fn eytzingerize_simple() { let mut permutator = InplacePermutator; for &array in REF_PERMUTATIONS { let mut payload: Vec<_> = (0..array.len()).collect(); eytzingerize(payload.as_mut_slice(), &mut permutator); assert_eq!(payload, array); } } const NODE_INDEXES: &[(usize, usize)] = &[ (1, 0), (2, 0), (2, 1), (3, 0), (3, 1), (3, 2), (3, 3), (4, 0), (4, 1), (4, 2), (4, 3), (4, 4), (4, 5), (4, 6), (4, 7), ]; #[test] fn calc_index() { for (i, &x) in NODE_INDEXES.iter().enumerate() { assert_eq!(x, index_to_node(i)); } } #[test] fn simple_inplace_permutation() { let permutation: &[usize] = &[4, 2, 3, 0, 1]; let mut data = [1, 2, 3, 4, 5]; InplacePermutator.permute(&mut data, &permutation); assert_eq!(data, [5, 3, 4, 1, 2]); } #[test] #[cfg(feature = "heap-permutator")] fn simple_heap_permutation() { let permutation: &[usize] = &[4, 2, 3, 0, 1]; let mut data = [1, 2, 3, 4, 5]; HeapPermutator::default().permute(&mut data, &permutation); assert_eq!(data, [5, 3, 4, 1, 2]); } #[test] #[cfg(feature = "heap-permutator-sparse")] fn simple_heap_permutation_sparse() { let permutation: &[usize] = &[4, 2, 3, 0, 1]; let mut data = [1, 2, 3, 4, 5]; SparseHeapPermutator::default().permute(&mut data, &permutation); assert_eq!(data, [5, 3, 4, 1, 2]); } #[test] fn simple_heap_copy_permutation() { let permutation: &[usize] = &[4, 2, 3, 0, 1]; let mut data = [1, 2, 3, 4, 5]; HeapCopyPermutator::default().permute(&mut data, &permutation); assert_eq!(data, [5, 3, 4, 1, 2]); } #[test] fn search_negative() { let data: &[i32] = &[6, 2, 10, 0, 4, 8, 12]; for i in -10..20 { let expected = data.iter().position(|&x| x == i); assert_eq!(expected, data.eytzinger_search(&i)); } } fn test_permutation<P: Default>(junk: Vec<usize>) -> bool where for<'a> P: Permutator<usize, &'a [usize]> { // first create a permutation from the random array let mut perm: Vec<_> = (0..junk.len()).collect(); perm.sort_by_key(|&i| junk[i]); // now test let mut data: Vec<_> = (0..perm.len()).collect(); P::default().permute(data.as_mut_slice(), &perm.as_slice()); data == perm } quickcheck! { fn inplace_permutation(junk: Vec<usize>) -> bool { test_permutation::<InplacePermutator>(junk) } fn stack_permutation(junk: Vec<usize>) -> bool { test_permutation::<StackCopyPermutator>(junk) } #[cfg(feature = "heap-permutator")] fn heap_permutation(junk: Vec<usize>) -> bool { test_permutation::<HeapPermutator>(junk) } #[cfg(feature = "heap-permutator-sparse")] fn heap_permutation_sparse(junk: Vec<usize>) -> bool { test_permutation::<SparseHeapPermutator>(junk) } fn heap_copy_permutation(junk: Vec<usize>) -> bool { test_permutation::<HeapCopyPermutator<_>>(junk) } fn eytzinger_tree_invariants(length: usize) -> bool { let perm: Vec<_> = PermutationGenerator::new(length).collect(); let mut todo = Vec::new(); todo.push((0, 0..length)); let mut checked = 0; while let Some((i, range)) = todo.pop() { match perm.get(i) { Some(&v) => { if !(range.start <= v && v < range.end) { return false; } todo.push((2 * (i + 1) - 1, range.start..v)); todo.push((2 * (i + 1), v..range.end)); checked += 1; } None => continue, } } checked == length } fn search_works(data: Vec<usize>) -> bool { let mut data = data; data.sort(); data.dedup(); data.eytzingerize(&mut InplacePermutator); data.iter().enumerate().all(|(i, v)| data.eytzinger_search(v) == Some(i)) } } }
fn dig_pow(n: i64, p: i32) -> i64 { let mut digits : Vec<i64> = vec![]; let mut num = n; while num > 0 { digits.push(num % (10 as i64)); num = num / 10; } digits.reverse(); let mut sum: i64 = 0; let mut start: u32 = p as u32; for elem in digits{ sum += elem.pow(start); start += 1; } if n == 0 { return -1; } if sum % n == 0 { return sum / n; } else { return -1; } } #[test] fn test0() { assert_eq!(dig_pow(123, 1), -1); } #[test] fn test1() { assert_eq!(dig_pow(1231, 2), -1); } #[test] fn test2() { assert_eq!(dig_pow(11223, 1), -1); } #[test] fn test3() { assert_eq!(dig_pow(142323, 2), -1); } #[test] fn test4() { assert_eq!(dig_pow(121223, 1), -1); } #[test] fn test5() { assert_eq!(dig_pow(11111, 2), -1); } #[test] fn test6() { assert_eq!(dig_pow(89, 1), 1); } #[test] fn test7() { assert_eq!(dig_pow(1, 1), 1); } #[test] fn test8() { assert_eq!(dig_pow(0, 1), -1); } #[test] fn test9() { assert_eq!(dig_pow(2, 1), 1); } fn main() { println!("{}", dig_pow(1, 2)); }
// Copyright (c) The Starcoin Core Contributors // SPDX-License-Identifier: Apache-2.0 use crate::{chain::BlockChain, chain_metrics::CHAIN_METRICS}; use actix::Addr; use anyhow::{format_err, Error, Result}; use bus::{Broadcast, BusActor}; use config::NodeConfig; use crypto::HashValue; use logger::prelude::*; use starcoin_statedb::ChainStateDB; use starcoin_txpool_api::TxPoolSyncService; use std::sync::Arc; use storage::Store; use traits::{ChainReader, ChainService, ChainWriter, ConnectBlockError, ConnectResult, Consensus}; use types::{ account_address::AccountAddress, block::{Block, BlockDetail, BlockHeader, BlockInfo, BlockNumber, BlockState, BlockTemplate}, startup_info::StartupInfo, system_events::NewHeadBlock, transaction::{SignedUserTransaction, Transaction, TransactionInfo}, }; pub struct ChainServiceImpl<C, S, P> where C: Consensus, P: TxPoolSyncService + 'static, S: Store + 'static, { config: Arc<NodeConfig>, startup_info: StartupInfo, master: BlockChain<C, S>, storage: Arc<S>, txpool: P, bus: Addr<BusActor>, } impl<C, S, P> ChainServiceImpl<C, S, P> where C: Consensus, P: TxPoolSyncService + 'static, S: Store + 'static, { pub fn new( config: Arc<NodeConfig>, startup_info: StartupInfo, storage: Arc<S>, txpool: P, bus: Addr<BusActor>, ) -> Result<Self> { let master = BlockChain::new(config.clone(), startup_info.master, storage.clone())?; Ok(Self { config, startup_info, master, storage, txpool, bus, }) } pub fn find_or_fork( &mut self, header: &BlockHeader, ) -> Result<(bool, Option<BlockChain<C, S>>)> { CHAIN_METRICS.try_connect_count.inc(); let block_exist = self.block_exist(header.id()); let block_chain = if !block_exist { if self.block_exist(header.parent_hash()) { Some(BlockChain::new( self.config.clone(), header.parent_hash(), self.storage.clone(), )?) } else { None } } else { None }; Ok((block_exist, block_chain)) } pub fn block_exist(&self, block_id: HashValue) -> bool { if let Ok(Some(_)) = self.storage.get_block_info(block_id) { true } else { false } } pub fn state_at(&self, _root: HashValue) -> ChainStateDB { unimplemented!() } pub fn get_master(&self) -> &BlockChain<C, S> { &self.master } fn select_head(&mut self, new_branch: BlockChain<C, S>) -> Result<()> { let block = new_branch.head_block(); let block_header = block.header(); let total_difficulty = new_branch.get_total_difficulty()?; if total_difficulty > self.get_master().get_total_difficulty()? { let (enacted_blocks, retracted_blocks) = if block.header().parent_hash() == self.startup_info.master { (vec![block.clone()], vec![]) } else { // TODO: After review the impl of find_common_ancestor in storage. // we can just let find_ancestors do it work, no matter whether fork or not. self.find_ancestors(&new_branch)? }; debug_assert!(!enacted_blocks.is_empty()); debug_assert_eq!(enacted_blocks.last().unwrap(), &block); self.update_master(new_branch); self.commit_2_txpool(enacted_blocks, retracted_blocks); CHAIN_METRICS.broadcast_head_count.inc(); self.broadcast_2_bus(BlockDetail::new(block, total_difficulty)); } else { self.insert_branch(block_header); } CHAIN_METRICS .branch_total_count .set(self.startup_info.branches.len() as i64); self.save_startup() } fn update_master(&mut self, new_master: BlockChain<C, S>) { let header = new_master.current_header(); self.master = new_master; self.startup_info.update_master(&header); } fn insert_branch(&mut self, new_block_header: &BlockHeader) { self.startup_info.insert_branch(new_block_header); } fn save_startup(&self) -> Result<()> { let startup_info = self.startup_info.clone(); self.storage.save_startup_info(startup_info) } fn commit_2_txpool(&self, enacted: Vec<Block>, retracted: Vec<Block>) { if let Err(e) = self.txpool.rollback(enacted, retracted) { error!("rollback err : {:?}", e); } } fn find_ancestors(&self, new_branch: &BlockChain<C, S>) -> Result<(Vec<Block>, Vec<Block>)> { let block_enacted = new_branch.current_header().id(); let block_retracted = self.get_master().current_header().id(); let ancestor = self .storage .get_common_ancestor(block_enacted, block_retracted)? .ok_or_else(|| { format_err!( "Can not find ancestor with {:?} and {:?}.", block_enacted, block_retracted ) })?; let enacted = self.find_blocks_until(block_enacted, ancestor)?; let retracted = self.find_blocks_until(block_retracted, ancestor)?; debug!( "commit block num:{}, rollback block num:{}", enacted.len(), retracted.len(), ); Ok((enacted, retracted)) } fn find_blocks_until(&self, from: HashValue, until: HashValue) -> Result<Vec<Block>> { let mut blocks: Vec<Block> = Vec::new(); let mut tmp = from; loop { if tmp == until { break; }; let block = self .storage .get_block(tmp)? .ok_or_else(|| format_err!("Can not find block {:?}.", tmp))?; tmp = block.header().parent_hash(); blocks.push(block); } blocks.reverse(); Ok(blocks) } pub fn broadcast_2_bus(&self, block: BlockDetail) { let bus = self.bus.clone(); bus.do_send(Broadcast { msg: NewHeadBlock(Arc::new(block)), }); } } impl<C, S, P> ChainService for ChainServiceImpl<C, S, P> where C: Consensus, P: TxPoolSyncService, S: Store, { //TODO define connect result. fn try_connect(&mut self, block: Block) -> Result<ConnectResult<()>> { let (block_exist, fork) = self.find_or_fork(block.header())?; if block_exist { CHAIN_METRICS.duplicate_conn_count.inc(); Ok(ConnectResult::Err(ConnectBlockError::DuplicateConn)) } else if let Some(mut branch) = fork { let timer = CHAIN_METRICS .exe_block_time .with_label_values(&["time"]) .start_timer(); let connected = branch.apply(block.clone())?; timer.observe_duration(); if !connected { debug!("connected failed {:?}", block.header().id()); CHAIN_METRICS.verify_fail_count.inc(); Ok(ConnectResult::Err(ConnectBlockError::VerifyFailed)) } else { self.select_head(branch)?; Ok(ConnectResult::Ok(())) } } else { Ok(ConnectResult::Err(ConnectBlockError::FutureBlock)) } } fn try_connect_with_block_info( &mut self, block: Block, block_info: BlockInfo, ) -> Result<ConnectResult<()>> { let (block_exist, fork) = self.find_or_fork(block.header())?; if block_exist { CHAIN_METRICS.duplicate_conn_count.inc(); Ok(ConnectResult::Err(ConnectBlockError::DuplicateConn)) } else if let Some(mut branch) = fork { if C::verify(self.config.clone(), &branch, block.header()).is_ok() { // 2. commit block branch.append_block(block.id(), block_info.get_block_accumulator_info().clone())?; branch.commit(block, block_info, BlockState::Verified)?; self.select_head(branch)?; Ok(ConnectResult::Ok(())) } else { Ok(ConnectResult::Err(ConnectBlockError::VerifyFailed)) } } else { Ok(ConnectResult::Err(ConnectBlockError::FutureBlock)) } } fn get_header_by_hash(&self, hash: HashValue) -> Result<Option<BlockHeader>> { self.storage.get_block_header_by_hash(hash) } fn get_block_by_hash(&self, hash: HashValue) -> Result<Option<Block>> { self.storage.get_block_by_hash(hash) } fn get_block_state_by_hash(&self, hash: HashValue) -> Result<Option<BlockState>> { self.storage.get_block_state(hash) } fn get_block_info_by_hash(&self, hash: HashValue) -> Result<Option<BlockInfo>> { self.storage.get_block_info(hash) } fn get_transaction(&self, txn_hash: HashValue) -> Result<Option<Transaction>, Error> { self.storage.get_transaction(txn_hash) } fn get_block_txn_infos(&self, block_id: HashValue) -> Result<Vec<TransactionInfo>, Error> { self.storage.get_block_transaction_infos(block_id) } fn get_txn_info_by_block_and_index( &self, block_id: HashValue, idx: u64, ) -> Result<Option<TransactionInfo>, Error> { self.storage .get_transaction_info_by_block_and_index(block_id, idx) } fn master_head_header(&self) -> BlockHeader { self.get_master().current_header() } fn master_head_block(&self) -> Block { self.get_master().head_block() } fn master_block_by_number(&self, number: BlockNumber) -> Result<Option<Block>> { self.get_master().get_block_by_number(number) } fn master_block_header_by_number(&self, number: BlockNumber) -> Result<Option<BlockHeader>> { self.get_master().get_header_by_number(number) } fn master_startup_info(&self) -> StartupInfo { self.startup_info.clone() } fn master_blocks_by_number( &self, number: Option<BlockNumber>, count: u64, ) -> Result<Vec<Block>> { self.get_master().get_blocks_by_number(number, count) } fn create_block_template( &self, author: AccountAddress, auth_key_prefix: Option<Vec<u8>>, parent_hash: Option<HashValue>, user_txns: Vec<SignedUserTransaction>, ) -> Result<BlockTemplate> { let block_id = match parent_hash { Some(hash) => hash, None => self.get_master().current_header().id(), }; if let Ok(Some(_)) = self.get_block_by_hash(block_id) { //TODO ensure is need create a new chain? let block_chain = self.get_master().new_chain(block_id)?; block_chain .create_block_template(author, auth_key_prefix, Some(block_id), user_txns) .map(|t| t.0) } else { Err(format_err!("Block {:?} not exist.", block_id)) } } }
use super::User; use crate::utils; use crate::utils::generate_uuid; use actix_web::{error, web, HttpResponse}; use bcrypt::{hash, DEFAULT_COST}; use chrono::prelude::*; use serde::{Deserialize, Serialize}; use std::borrow::Borrow; use std::convert::TryFrom; //#region Event #[derive(Debug, Serialize, Clone)] #[serde(rename_all = "camelCase")] pub struct UserRegisteredData { name: String, start_date: Option<DateTime<Utc>>, address: Option<String>, phone_number: Option<String>, username: String, password: String, email: String, photo: Option<String>, } impl TryFrom<WebModel> for UserRegisteredData { type Error = String; fn try_from(user_model: WebModel) -> Result<Self, Self::Error> { User::validate_field_str("name", user_model.name.as_str())?; if let Some(address) = user_model.address.borrow() { User::validate_field_str("address", address)?; } if let Some(phone_number) = user_model.phone_number.borrow() { User::validate_field_str("phone_number", phone_number)?; } User::validate_field_str("username", user_model.username.as_str())?; User::validate_field_str("email", user_model.email.as_str())?; Ok(Self { name: user_model.name, start_date: user_model.start_date, address: user_model.address, phone_number: user_model.phone_number, username: user_model.username, password: hash(user_model.password, DEFAULT_COST).map_err(|e| e.to_string())?, email: user_model.email, photo: user_model.photo, }) } } //#endregion //#region Web #[derive(Deserialize)] #[serde(rename_all = "camelCase")] pub struct WebModel { name: String, start_date: Option<DateTime<Utc>>, address: Option<String>, phone_number: Option<String>, username: String, password: String, email: String, photo: Option<String>, } pub async fn register_async( user_model: web::Json<WebModel>, ) -> Result<HttpResponse, actix_web::Error> { // Validate and convert user_model to UserRegisteredData. let user = UserRegisteredData::try_from(user_model.into_inner()) .map_err(|e| error::ErrorBadRequest(e))?; let user_command_conn = utils::get_user_command_db_connection().map_err(|e| error::ErrorInternalServerError(e))?; user_command_conn .execute( r#"CREATE TABLE IF NOT EXISTS "user" ( id TEXT NOT NULL PRIMARY KEY, username TEXT NOT NULL UNIQUE, password TEXT NOT NULL, email TEXT NOT NULL UNIQUE )"#, &[], ) .map_err(|e| error::ErrorInternalServerError(e))?; // Check for username and email uniqueness. for row in &user_command_conn .query(r#"SELECT username, email FROM "user""#, &[]) .map_err(|e| error::ErrorInternalServerError(e))? { let username: String = row.get(0); let email: String = row.get(1); if username == user.username { return Err(error::ErrorBadRequest("username exists")); } if email == user.email { return Err(error::ErrorBadRequest("email exists")); } } let user_id = generate_uuid(); let username = user.username.clone(); // Save user in Postgres user_command_conn .execute( r#"INSERT INTO "user" (id, username, password, email) VALUES ($1, $2, $3, $4)"#, &[&user_id, &user.username, &user.password, &user.email], ) .map_err(|e| error::ErrorInternalServerError(e))?; // Persist to Event Store. let event_data = &UserRegisteredData::from(user); let event_store_conn = utils::get_event_store_db_connection().map_err(|e| error::ErrorInternalServerError(e))?; event_store_conn .execute( r#"CREATE TABLE IF NOT EXISTS "user" ( id SERIAL PRIMARY KEY, entity_id TEXT NOT NULL, type TEXT NOT NULL, body TEXT NOT NULL, inserted_at TIMESTAMP(6) NOT NULL DEFAULT (statement_timestamp() at time zone 'utc') )"#, &[], ) .map_err(|e| error::ErrorInternalServerError(e))?; event_store_conn .execute( r#"INSERT INTO "user" (entity_id, type, body) VALUES ($1, $2, $3)"#, &[ &user_id, &"UserRegistered", &serde_json::to_string(event_data) .map_err(|e| error::ErrorInternalServerError(e))?, ], ) .map_err(|e| error::ErrorInternalServerError(e))?; // Return successfully. Ok(HttpResponse::Created() .header("Location", format!("{}/users/{}", crate::ADDR, &username)) .body("user registered successfully")) } pub fn register(user_model: web::Json<WebModel>) -> Result<HttpResponse, actix_web::Error> { async_std::task::block_on(register_async(user_model)) } //#endregion
/* chapter 4 syntax and semantics */ fn main() { /* fn takes_two_of_the_same_things<T>(x: T, y: T) { // ... } */ } // output should be: /* */
use crate::vec2::Vec2; const EPSILON: f64 = 0.000001; fn is_vector_belonging_to_half_plane(v: &Vec2, (origin, direction): &(Vec2, Vec2)) -> bool { Vec2::det(*direction, *v - *origin) >= 0. } pub fn solve_linear_program_step( obj_dir: &Vec2, // The objective direction obj_max_norm: f64, // The objective maximum norm (h_i_ori, h_i_dir): &(Vec2, Vec2), // The half plane boundary origin and direction unit vector previous_half_planes: &[(Vec2, Vec2)], // The previous half planes constraints maximize_norm: bool, ) -> Option<Vec2> { debug_assert!(obj_max_norm >= 0.); debug_assert!( (obj_dir.sqr_norm() - 1.0).abs() < EPSILON, "The objective direction, {}, should be normalized, got a norm of {}!", obj_dir, obj_dir.norm() ); // Let's find the segment of h boundary intersection the circle of radius `obj_max_norm` // // obj_max_norm = || h_i_ori + t * h_i_dir || // 0 = || h_i_ori + t * h_i_dir || - obj_max_norm // 0 = || h_i_ori + t * h_i_dir ||^2 - obj_max_norm^2 // = (h_i_ori.x + t * h_i_dir.x)^2 + (h_i_ori.y + t * h_i_dir.y)^2 - obj_max_norm^2 // = h_i_ori.x^2 + 2*h_i_ori.x*t*h_i_dir.x + t^2*h_i_dir.x^2 // + h_i_ori.y^2 + 2*h_i_ori.y*t*h_i_dir.y + t^2*h_i_dir.y^2 // - obj_max_norm^2 // = (h_i_dir.x^2 + h_i_dir.y^2) * t^2 // + (2*h_i_ori.x*h_i_dir.x + 2*h_i_ori.y*h_i_dir.y) * t // + h_i_ori.x^2 + h_i_ori.y^2 - obj_max_norm^2 // = || h_i_dir ||^2 * t^2 // + 2 * h_i_ori . h_i_dir * t // + || h_i_ori ||^2 - obj_max_norm^2 // // We can solve this polynom //let poly_a = 1.0; let poly_b = 2.0 * *h_i_ori * *h_i_dir; let poly_c = h_i_ori.sqr_norm() - obj_max_norm.powi(2); let poly_det = poly_b.powi(2) - 4.0 /* * poly_a */ * poly_c; if poly_det < 0.0 { // println!("* Can't belong to Hi while keeping its norm, no solution"); return None; } let poly_det_sqrt = poly_det.sqrt(); match previous_half_planes.iter().try_fold( ( (-poly_b - poly_det_sqrt) / (2.0/* * poly_a */), (-poly_b + poly_det_sqrt) / (2.0/* * poly_a */), ), |(mut t_left, mut t_right), (h_k_ori, h_k_dir)| { // println!( // "* Vi belongs to Hi if Vi ∈ [{}, {}] * {} + {}, adding further constraint Hk=({}, {})", // t_left, t_right, h_i_dir, h_i_ori, h_k_ori, h_k_dir // ); // We are looking for intersection betwen h_k and h_i called inter. // h_k_dir_perp being the vector perpendicular to h_k_dir, we know that // 0 = h_k_dir_perp . (inter - h_k_ori) // Because inter belongs to the boundary of h we can define it as inter = h_i_ori + t * h_i_dir // We then have // 0 = h_k_dir_perp . (h_i_ori + t * h_i_dir - h_k_ori) // 0 = h_k_dir_perp . h_i_ori + t * h_k_dir_perp . h_i_dir - h_k_dir_perp . h_k_ori // t * h_k_dir_perp . h_i_dir = h_k_dir_perp . h_k_ori - h_k_dir_perp . h_i_ori + // t * h_k_dir_perp . h_i_dir = h_k_dir_perp . (h_k_ori - h_i_ori) // t = (h_k_dir_perp . (h_k_ori - h_i_ori)) / (h_k_dir_perp . h_i_dir) // t = det(h_k_dir, h_k_ori - h_i_ori) / det(h_k_dir, h_i_dir) // // If the denominator is 0 then h_i and h_k are parallel let t_numerator = Vec2::det(*h_k_dir, *h_k_ori - *h_i_ori); let t_denominator = Vec2::det(*h_k_dir, *h_i_dir); if t_denominator.abs() <= EPSILON { //println!("** Hk // Hi"); if t_numerator > 0. { //println!("*** Hk on the right side of Hi => No solution"); None } else { //println!("*** Hk on the left of Hi, it has no impact"); Some((t_left, t_right)) } } else { let t = t_numerator / t_denominator; //println!("** Hk intersects Hi at {}", t); if t_denominator < 0.0 { t_right = t_right.min(t); } else { t_left = t_left.max(t); } //println!("*** Vi range updated to [{}, {}]", t_left, t_right); if t_left > t_right { //println!("*** No valid segment belonging to Hi => No solution"); None } else { Some((t_left, t_right)) } } }, ) { Some((t_h_i_left, t_h_i_right)) => { // println!( // "* Vi belongs to Hi while respecting H[0..i-1] if Vi ∈ [{}, {}] * {} + {}", // t_h_i_left, t_h_i_right, h_i_dir, h_i_ori // ); let t_h_i = { // Compute the intersection between the objective and the line let t_h_i_numerator = Vec2::det(*obj_dir, -*h_i_ori); let t_h_i_denominator = Vec2::det(*obj_dir, *h_i_dir); if t_h_i_denominator.abs() <= EPSILON { // println!( // "** Vi // Hi, taking the rightmost valid intersection at {}", // t_h_i_right // ); t_h_i_right } else { let t_h_i = t_h_i_numerator / t_h_i_denominator; if maximize_norm { //println!("** Maximizing Vi norm"); if (t_h_i - t_h_i_left).abs() < (t_h_i - t_h_i_right).abs() { //println!("** Vi is the leftmost valid Hi bounds at {}", t_h_i_left); t_h_i_left } else { //println!("** Vi is the rightmost valid Hi bounds at {}", t_h_i_right); t_h_i_right } } else { //println!("** Preserve obj direction"); // println!( // "** Vi intersects Hi at {}, which is clamped to {}", // t_h_i, // t_h_i.max(t_h_i_left).min(t_h_i_right) // ); // Return the point closest to the intersection within the valid segment t_h_i.max(t_h_i_left).min(t_h_i_right) } } }; let candidate = *h_i_ori + t_h_i * *h_i_dir; //println!("** Candidate Vi = {}", candidate); if candidate * *obj_dir < 0.0 { //println!("*£ Candidate Vi goes backward"); None } else { //println!("* Candidate Vi validated"); Some(candidate) } } None => None, } } pub fn solve_linear_program( obj_dir: &Vec2, obj_max_norm: f64, half_planes: &[(Vec2, Vec2)], maximize_norm: bool, ) -> Option<Vec2> { half_planes .iter() .enumerate() .try_fold(*obj_dir * obj_max_norm, |v_im1, (i, h_i)| { if is_vector_belonging_to_half_plane(&v_im1, h_i) { //println!("Vi-1={} ∈ Hi=({}, {}) => Vi = Vi-1", v_im1, h_i.0, h_i.1); Some(v_im1) } else { //println!("Vi-1={} ∉ Hi=({}, {})", v_im1, h_i.0, h_i.1); solve_linear_program_step( obj_dir, obj_max_norm, h_i, &half_planes[0..i], maximize_norm, ) } }) } #[cfg(test)] mod tests { use super::*; use approx::assert_relative_eq; #[test] fn test_is_vector_belonging_to_half_plane() { assert_eq!( is_vector_belonging_to_half_plane( &Vec2::new(1.0, 1.0), &(Vec2::new(0.0, 2.0), Vec2::new(1.0, 0.0)) ), false ); assert_eq!( is_vector_belonging_to_half_plane( &Vec2::new(3.0, 3.0), &(Vec2::new(0.0, 2.0), Vec2::new(1.0, 0.0)) ), true ); assert_eq!( is_vector_belonging_to_half_plane( &Vec2::new(1.0, 0.0), &(Vec2::new(-1.0, 0.0), Vec2::new(-1.0, -1.0).normalize()) ), true ); } fn check_solve_linear_program( obj: &Vec2, half_planes: &[(Vec2, Vec2)], maximize_norm: bool, expected_solution: Option<Vec2>, ) -> Option<Vec2> { let solution = solve_linear_program(&obj.normalize(), obj.norm(), &half_planes, maximize_norm); match expected_solution { Some(expected_valid_solution) => { let valid_solution = solution.unwrap(); half_planes.iter().for_each(|&(h_dir, h_ori)| { assert!( is_vector_belonging_to_half_plane(&valid_solution, &(h_dir, h_ori)), "Computed solution {} ∉ input half-plane ({}, {})", valid_solution, h_dir, h_ori, ) }); assert_relative_eq!(valid_solution, expected_valid_solution); } None => assert_eq!(solution, None), }; solution } #[test] fn linear_program_one_vt_half_plane_1() { let half_planes = vec![(Vec2::new(2.0, 0.0), Vec2::new(0.0, -1.0))]; // (1.0, 0.0) can't be extended to belong to the half plane check_solve_linear_program(&Vec2::new(1.0, 0.0), &half_planes, false, None); // (3.0, 0.0) already belongs to the half plane it should be returned as-is check_solve_linear_program( &Vec2::new(3.0, 0.0), &half_planes, false, Some(Vec2::new(3.0, 0.0)), ); } #[test] fn linear_program_one_vt_half_plane_2() { let half_planes = vec![(Vec2::new(3.0, 12.0), Vec2::new(0.0, 1.0))]; // (1.0, 0.0) already belongs to the half plane it should be returned as-is check_solve_linear_program( &Vec2::new(1.0, 0.0), &half_planes, false, Some(Vec2::new(1.0, 0.0)), ); // (5.0, 0.0) can be shrinked to belong to the half plane check_solve_linear_program( &Vec2::new(5.0, 0.0), &half_planes, false, Some(Vec2::new(3.0, 0.0)), ); // (5.0, 0.0) can be turned to maximize its norm and belong to the half plane check_solve_linear_program( &Vec2::new(5.0, 0.0), &half_planes, true, Some(Vec2::new(3.0, 4.0)), ); } #[test] fn linear_program_one_hz_half_plane_1() { let half_planes = vec![(Vec2::new(12.0, -2.0), Vec2::new(1.0, 0.0))]; // (1.0, 0.0) already belongs to the half plane it should be returned as-is check_solve_linear_program( &Vec2::new(1.0, 0.0), &half_planes, false, Some(Vec2::new(1.0, 0.0)), ); // (3.0, 0.0) already belongs to the half plane it should be returned as-is check_solve_linear_program( &Vec2::new(3.0, 0.0), &half_planes, false, Some(Vec2::new(3.0, 0.0)), ); // (0.0, -3.0) can be shrinked to belong to the half plane check_solve_linear_program( &Vec2::new(0.0, -3.0), &half_planes, false, Some(Vec2::new(0.0, -2.0)), ); // (2.0, -4.0) can be shrinked to belong to the half plane check_solve_linear_program( &Vec2::new(2.0, -4.0), &half_planes, false, Some(Vec2::new(1.0, -2.0)), ); } #[test] fn linear_program_one_half_plane() { let half_planes = vec![(Vec2::new(0.5, 0.), Vec2::new(-1.0, -2.0).normalize())]; // (3.0, 0.0) aleady belong the halfplane check_solve_linear_program( &Vec2::new(3.0, 0.0), &half_planes, true, Some(Vec2::new(3.0, 0.0)), ); // (-3.0, 0.0) would need to be reversed to belong to the halfplane check_solve_linear_program(&Vec2::new(-3.0, 0.0), &half_planes, true, None); // (-1.0, -2.0) would need to be reversed to belong to the halfplane check_solve_linear_program( &Vec2::new(-1.0, -2.0), &half_planes, true, Some(Vec2::new(-0.5797958971132715, -2.159591794226543)), ); } #[test] fn linear_program_two_half_planes_1() { let half_planes = vec![ (Vec2::new(2.0, -2.0), Vec2::new(1.0, 1.0).normalize()), (Vec2::new(12.0, -2.0), Vec2::new(1.0, 0.0)), ]; let mut half_planes_other_order = half_planes.clone(); half_planes_other_order.reverse(); // (1.0, 0.0) already belongs to the region it should be returned as-is { let sol = check_solve_linear_program( &Vec2::new(1.0, 0.0), &half_planes, false, Some(Vec2::new(1.0, 0.0)), ); check_solve_linear_program(&Vec2::new(1.0, 0.0), &half_planes_other_order, false, sol); } // (0.0, -3.0) can be shrinked to belong to the region { let sol = check_solve_linear_program( &Vec2::new(0.0, -3.0), &half_planes, false, Some(Vec2::new(0.0, -2.0)), ); check_solve_linear_program(&Vec2::new(0.0, -3.0), &half_planes_other_order, false, sol); } // (0.0, -3.0) can be turned to belong to the region { let sol = check_solve_linear_program( &Vec2::new(0.0, -3.0), &half_planes, true, Some(Vec2::new(2.0, -2.0)), ); check_solve_linear_program(&Vec2::new(0.0, -3.0), &half_planes_other_order, true, sol); } // (1.0, -4.0) can be shrinked to belong to the region { let sol = check_solve_linear_program( &Vec2::new(1.0, -4.0), &half_planes, false, Some(Vec2::new(0.5, -2.0)), ); check_solve_linear_program(&Vec2::new(1.0, -4.0), &half_planes_other_order, false, sol); } } #[test] fn linear_program_three_half_planes_2_parallels() { let half_planes = vec![ (Vec2::new(-2., -2.), Vec2::new(2., 2.).normalize()), (Vec2::new(0., 1.), Vec2::new(-1., 0.)), (Vec2::new(2., 0.), Vec2::new(2., 2.).normalize()), ]; let mut half_planes_other_order = half_planes.clone(); half_planes_other_order.reverse(); // (0., 0.5) already belongs to the region it should be returned as-is check_solve_linear_program( &Vec2::new(0., 0.5), &half_planes, false, Some(Vec2::new(0., 0.5)), ); // (2., 2.) can be shrinked to belong to the region check_solve_linear_program( &Vec2::new(2., 2.), &half_planes, false, Some(Vec2::new(1., 1.)), ); // (8., 8.) can be shrinked to belong to the region check_solve_linear_program( &Vec2::new(8., 8.), &half_planes_other_order, false, Some(Vec2::new(1., 1.)), ); // (8., 8.) can be turned to belong to the region check_solve_linear_program( &Vec2::new(4., 6.), &half_planes, true, Some(Vec2::new(1., 1.)), ); // (8., 8.) can be turned to belong to the region (with half-planes in the reverse order) check_solve_linear_program( &Vec2::new(4., 6.), &half_planes_other_order, true, Some(Vec2::new(1., 1.)), ); } }
#[doc = "Register `RCC_MC_AHB5LPENCLRR` reader"] pub type R = crate::R<RCC_MC_AHB5LPENCLRR_SPEC>; #[doc = "Register `RCC_MC_AHB5LPENCLRR` writer"] pub type W = crate::W<RCC_MC_AHB5LPENCLRR_SPEC>; #[doc = "Field `GPIOZLPEN` reader - GPIOZLPEN"] pub type GPIOZLPEN_R = crate::BitReader; #[doc = "Field `GPIOZLPEN` writer - GPIOZLPEN"] pub type GPIOZLPEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `CRYP1LPEN` reader - CRYP1LPEN"] pub type CRYP1LPEN_R = crate::BitReader; #[doc = "Field `CRYP1LPEN` writer - CRYP1LPEN"] pub type CRYP1LPEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `HASH1LPEN` reader - HASH1LPEN"] pub type HASH1LPEN_R = crate::BitReader; #[doc = "Field `HASH1LPEN` writer - HASH1LPEN"] pub type HASH1LPEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `RNG1LPEN` reader - RNG1LPEN"] pub type RNG1LPEN_R = crate::BitReader; #[doc = "Field `RNG1LPEN` writer - RNG1LPEN"] pub type RNG1LPEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `BKPSRAMLPEN` reader - BKPSRAMLPEN"] pub type BKPSRAMLPEN_R = crate::BitReader; #[doc = "Field `BKPSRAMLPEN` writer - BKPSRAMLPEN"] pub type BKPSRAMLPEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; impl R { #[doc = "Bit 0 - GPIOZLPEN"] #[inline(always)] pub fn gpiozlpen(&self) -> GPIOZLPEN_R { GPIOZLPEN_R::new((self.bits & 1) != 0) } #[doc = "Bit 4 - CRYP1LPEN"] #[inline(always)] pub fn cryp1lpen(&self) -> CRYP1LPEN_R { CRYP1LPEN_R::new(((self.bits >> 4) & 1) != 0) } #[doc = "Bit 5 - HASH1LPEN"] #[inline(always)] pub fn hash1lpen(&self) -> HASH1LPEN_R { HASH1LPEN_R::new(((self.bits >> 5) & 1) != 0) } #[doc = "Bit 6 - RNG1LPEN"] #[inline(always)] pub fn rng1lpen(&self) -> RNG1LPEN_R { RNG1LPEN_R::new(((self.bits >> 6) & 1) != 0) } #[doc = "Bit 8 - BKPSRAMLPEN"] #[inline(always)] pub fn bkpsramlpen(&self) -> BKPSRAMLPEN_R { BKPSRAMLPEN_R::new(((self.bits >> 8) & 1) != 0) } } impl W { #[doc = "Bit 0 - GPIOZLPEN"] #[inline(always)] #[must_use] pub fn gpiozlpen(&mut self) -> GPIOZLPEN_W<RCC_MC_AHB5LPENCLRR_SPEC, 0> { GPIOZLPEN_W::new(self) } #[doc = "Bit 4 - CRYP1LPEN"] #[inline(always)] #[must_use] pub fn cryp1lpen(&mut self) -> CRYP1LPEN_W<RCC_MC_AHB5LPENCLRR_SPEC, 4> { CRYP1LPEN_W::new(self) } #[doc = "Bit 5 - HASH1LPEN"] #[inline(always)] #[must_use] pub fn hash1lpen(&mut self) -> HASH1LPEN_W<RCC_MC_AHB5LPENCLRR_SPEC, 5> { HASH1LPEN_W::new(self) } #[doc = "Bit 6 - RNG1LPEN"] #[inline(always)] #[must_use] pub fn rng1lpen(&mut self) -> RNG1LPEN_W<RCC_MC_AHB5LPENCLRR_SPEC, 6> { RNG1LPEN_W::new(self) } #[doc = "Bit 8 - BKPSRAMLPEN"] #[inline(always)] #[must_use] pub fn bkpsramlpen(&mut self) -> BKPSRAMLPEN_W<RCC_MC_AHB5LPENCLRR_SPEC, 8> { BKPSRAMLPEN_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "This register is used by the MCU in order to clear the PERxLPEN bit If TZEN = , this register can only be modified in secure mode.\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`rcc_mc_ahb5lpenclrr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`rcc_mc_ahb5lpenclrr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct RCC_MC_AHB5LPENCLRR_SPEC; impl crate::RegisterSpec for RCC_MC_AHB5LPENCLRR_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`rcc_mc_ahb5lpenclrr::R`](R) reader structure"] impl crate::Readable for RCC_MC_AHB5LPENCLRR_SPEC {} #[doc = "`write(|w| ..)` method takes [`rcc_mc_ahb5lpenclrr::W`](W) writer structure"] impl crate::Writable for RCC_MC_AHB5LPENCLRR_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets RCC_MC_AHB5LPENCLRR to value 0x0171"] impl crate::Resettable for RCC_MC_AHB5LPENCLRR_SPEC { const RESET_VALUE: Self::Ux = 0x0171; }
#[doc = "Register `EXTI_FPR1` reader"] pub type R = crate::R<EXTI_FPR1_SPEC>; #[doc = "Register `EXTI_FPR1` writer"] pub type W = crate::W<EXTI_FPR1_SPEC>; #[doc = "Field `FPIF0` reader - FPIF0"] pub type FPIF0_R = crate::BitReader; #[doc = "Field `FPIF0` writer - FPIF0"] pub type FPIF0_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `FPIF1` reader - FPIF1"] pub type FPIF1_R = crate::BitReader; #[doc = "Field `FPIF1` writer - FPIF1"] pub type FPIF1_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `FPIF2` reader - FPIF2"] pub type FPIF2_R = crate::BitReader; #[doc = "Field `FPIF2` writer - FPIF2"] pub type FPIF2_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `FPIF3` reader - FPIF3"] pub type FPIF3_R = crate::BitReader; #[doc = "Field `FPIF3` writer - FPIF3"] pub type FPIF3_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `FPIF4` reader - FPIF4"] pub type FPIF4_R = crate::BitReader; #[doc = "Field `FPIF4` writer - FPIF4"] pub type FPIF4_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `FPIF5` reader - FPIF5"] pub type FPIF5_R = crate::BitReader; #[doc = "Field `FPIF5` writer - FPIF5"] pub type FPIF5_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `FPIF6` reader - FPIF6"] pub type FPIF6_R = crate::BitReader; #[doc = "Field `FPIF6` writer - FPIF6"] pub type FPIF6_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `FPIF7` reader - FPIF7"] pub type FPIF7_R = crate::BitReader; #[doc = "Field `FPIF7` writer - FPIF7"] pub type FPIF7_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `FPIF8` reader - FPIF8"] pub type FPIF8_R = crate::BitReader; #[doc = "Field `FPIF8` writer - FPIF8"] pub type FPIF8_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `FPIF9` reader - FPIF9"] pub type FPIF9_R = crate::BitReader; #[doc = "Field `FPIF9` writer - FPIF9"] pub type FPIF9_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `FPIF10` reader - FPIF10"] pub type FPIF10_R = crate::BitReader; #[doc = "Field `FPIF10` writer - FPIF10"] pub type FPIF10_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `FPIF11` reader - FPIF11"] pub type FPIF11_R = crate::BitReader; #[doc = "Field `FPIF11` writer - FPIF11"] pub type FPIF11_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `FPIF12` reader - FPIF12"] pub type FPIF12_R = crate::BitReader; #[doc = "Field `FPIF12` writer - FPIF12"] pub type FPIF12_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `FPIF13` reader - FPIF13"] pub type FPIF13_R = crate::BitReader; #[doc = "Field `FPIF13` writer - FPIF13"] pub type FPIF13_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `FPIF14` reader - FPIF14"] pub type FPIF14_R = crate::BitReader; #[doc = "Field `FPIF14` writer - FPIF14"] pub type FPIF14_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `FPIF15` reader - FPIF15"] pub type FPIF15_R = crate::BitReader; #[doc = "Field `FPIF15` writer - FPIF15"] pub type FPIF15_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `FPIF16` reader - FPIF16"] pub type FPIF16_R = crate::BitReader; #[doc = "Field `FPIF16` writer - FPIF16"] pub type FPIF16_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; impl R { #[doc = "Bit 0 - FPIF0"] #[inline(always)] pub fn fpif0(&self) -> FPIF0_R { FPIF0_R::new((self.bits & 1) != 0) } #[doc = "Bit 1 - FPIF1"] #[inline(always)] pub fn fpif1(&self) -> FPIF1_R { FPIF1_R::new(((self.bits >> 1) & 1) != 0) } #[doc = "Bit 2 - FPIF2"] #[inline(always)] pub fn fpif2(&self) -> FPIF2_R { FPIF2_R::new(((self.bits >> 2) & 1) != 0) } #[doc = "Bit 3 - FPIF3"] #[inline(always)] pub fn fpif3(&self) -> FPIF3_R { FPIF3_R::new(((self.bits >> 3) & 1) != 0) } #[doc = "Bit 4 - FPIF4"] #[inline(always)] pub fn fpif4(&self) -> FPIF4_R { FPIF4_R::new(((self.bits >> 4) & 1) != 0) } #[doc = "Bit 5 - FPIF5"] #[inline(always)] pub fn fpif5(&self) -> FPIF5_R { FPIF5_R::new(((self.bits >> 5) & 1) != 0) } #[doc = "Bit 6 - FPIF6"] #[inline(always)] pub fn fpif6(&self) -> FPIF6_R { FPIF6_R::new(((self.bits >> 6) & 1) != 0) } #[doc = "Bit 7 - FPIF7"] #[inline(always)] pub fn fpif7(&self) -> FPIF7_R { FPIF7_R::new(((self.bits >> 7) & 1) != 0) } #[doc = "Bit 8 - FPIF8"] #[inline(always)] pub fn fpif8(&self) -> FPIF8_R { FPIF8_R::new(((self.bits >> 8) & 1) != 0) } #[doc = "Bit 9 - FPIF9"] #[inline(always)] pub fn fpif9(&self) -> FPIF9_R { FPIF9_R::new(((self.bits >> 9) & 1) != 0) } #[doc = "Bit 10 - FPIF10"] #[inline(always)] pub fn fpif10(&self) -> FPIF10_R { FPIF10_R::new(((self.bits >> 10) & 1) != 0) } #[doc = "Bit 11 - FPIF11"] #[inline(always)] pub fn fpif11(&self) -> FPIF11_R { FPIF11_R::new(((self.bits >> 11) & 1) != 0) } #[doc = "Bit 12 - FPIF12"] #[inline(always)] pub fn fpif12(&self) -> FPIF12_R { FPIF12_R::new(((self.bits >> 12) & 1) != 0) } #[doc = "Bit 13 - FPIF13"] #[inline(always)] pub fn fpif13(&self) -> FPIF13_R { FPIF13_R::new(((self.bits >> 13) & 1) != 0) } #[doc = "Bit 14 - FPIF14"] #[inline(always)] pub fn fpif14(&self) -> FPIF14_R { FPIF14_R::new(((self.bits >> 14) & 1) != 0) } #[doc = "Bit 15 - FPIF15"] #[inline(always)] pub fn fpif15(&self) -> FPIF15_R { FPIF15_R::new(((self.bits >> 15) & 1) != 0) } #[doc = "Bit 16 - FPIF16"] #[inline(always)] pub fn fpif16(&self) -> FPIF16_R { FPIF16_R::new(((self.bits >> 16) & 1) != 0) } } impl W { #[doc = "Bit 0 - FPIF0"] #[inline(always)] #[must_use] pub fn fpif0(&mut self) -> FPIF0_W<EXTI_FPR1_SPEC, 0> { FPIF0_W::new(self) } #[doc = "Bit 1 - FPIF1"] #[inline(always)] #[must_use] pub fn fpif1(&mut self) -> FPIF1_W<EXTI_FPR1_SPEC, 1> { FPIF1_W::new(self) } #[doc = "Bit 2 - FPIF2"] #[inline(always)] #[must_use] pub fn fpif2(&mut self) -> FPIF2_W<EXTI_FPR1_SPEC, 2> { FPIF2_W::new(self) } #[doc = "Bit 3 - FPIF3"] #[inline(always)] #[must_use] pub fn fpif3(&mut self) -> FPIF3_W<EXTI_FPR1_SPEC, 3> { FPIF3_W::new(self) } #[doc = "Bit 4 - FPIF4"] #[inline(always)] #[must_use] pub fn fpif4(&mut self) -> FPIF4_W<EXTI_FPR1_SPEC, 4> { FPIF4_W::new(self) } #[doc = "Bit 5 - FPIF5"] #[inline(always)] #[must_use] pub fn fpif5(&mut self) -> FPIF5_W<EXTI_FPR1_SPEC, 5> { FPIF5_W::new(self) } #[doc = "Bit 6 - FPIF6"] #[inline(always)] #[must_use] pub fn fpif6(&mut self) -> FPIF6_W<EXTI_FPR1_SPEC, 6> { FPIF6_W::new(self) } #[doc = "Bit 7 - FPIF7"] #[inline(always)] #[must_use] pub fn fpif7(&mut self) -> FPIF7_W<EXTI_FPR1_SPEC, 7> { FPIF7_W::new(self) } #[doc = "Bit 8 - FPIF8"] #[inline(always)] #[must_use] pub fn fpif8(&mut self) -> FPIF8_W<EXTI_FPR1_SPEC, 8> { FPIF8_W::new(self) } #[doc = "Bit 9 - FPIF9"] #[inline(always)] #[must_use] pub fn fpif9(&mut self) -> FPIF9_W<EXTI_FPR1_SPEC, 9> { FPIF9_W::new(self) } #[doc = "Bit 10 - FPIF10"] #[inline(always)] #[must_use] pub fn fpif10(&mut self) -> FPIF10_W<EXTI_FPR1_SPEC, 10> { FPIF10_W::new(self) } #[doc = "Bit 11 - FPIF11"] #[inline(always)] #[must_use] pub fn fpif11(&mut self) -> FPIF11_W<EXTI_FPR1_SPEC, 11> { FPIF11_W::new(self) } #[doc = "Bit 12 - FPIF12"] #[inline(always)] #[must_use] pub fn fpif12(&mut self) -> FPIF12_W<EXTI_FPR1_SPEC, 12> { FPIF12_W::new(self) } #[doc = "Bit 13 - FPIF13"] #[inline(always)] #[must_use] pub fn fpif13(&mut self) -> FPIF13_W<EXTI_FPR1_SPEC, 13> { FPIF13_W::new(self) } #[doc = "Bit 14 - FPIF14"] #[inline(always)] #[must_use] pub fn fpif14(&mut self) -> FPIF14_W<EXTI_FPR1_SPEC, 14> { FPIF14_W::new(self) } #[doc = "Bit 15 - FPIF15"] #[inline(always)] #[must_use] pub fn fpif15(&mut self) -> FPIF15_W<EXTI_FPR1_SPEC, 15> { FPIF15_W::new(self) } #[doc = "Bit 16 - FPIF16"] #[inline(always)] #[must_use] pub fn fpif16(&mut self) -> FPIF16_W<EXTI_FPR1_SPEC, 16> { FPIF16_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "Contains only register bits for configurable events.\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`exti_fpr1::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`exti_fpr1::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct EXTI_FPR1_SPEC; impl crate::RegisterSpec for EXTI_FPR1_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`exti_fpr1::R`](R) reader structure"] impl crate::Readable for EXTI_FPR1_SPEC {} #[doc = "`write(|w| ..)` method takes [`exti_fpr1::W`](W) writer structure"] impl crate::Writable for EXTI_FPR1_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets EXTI_FPR1 to value 0"] impl crate::Resettable for EXTI_FPR1_SPEC { const RESET_VALUE: Self::Ux = 0; }
extern crate procedure; use procedure::{error, info, proceed, success, warning, Progress}; #[test] fn example() { let a = proceed( "Download", "example_file.jpg", |progress: &mut Progress| -> Result<(&str, &str), &str> { for _ in 0..100 { std::thread::sleep(std::time::Duration::from_millis(10)); progress.increment(1); } Ok(("256KB", "example_file.jpg [256 KB]")) }, ); assert_eq!(a.unwrap(), "256KB"); let b = proceed( "Download", "some_other.zip", |progress: &mut Progress| -> Result<(&str, &str), &str> { let min = 500; let max = 1000; for i in min..max { std::thread::sleep(std::time::Duration::from_millis(5)); progress.set_from(min, max, i); if i == 975 { return Err("some_other.zip [Failed]"); } } Ok(("1MB", "some_other.zip [1 MB]")) }, ); assert_eq!(b.unwrap_err(), "some_other.zip [Failed]"); success("Finished", "The tests have finished correctly"); info("Finished", "The tests have finished correctly"); error("Finished", "The tests have finished correctly"); warning("Finished", "The tests have finished correctly"); }
use std::io::Write; use anyhow::{anyhow, Result}; use crate::command::Command; use crate::ops::io::walk; use crate::ops::path::{path_has_extension, path_is_hidden}; use crate::Recurse; pub(crate) struct WalkCommand {} impl Command for WalkCommand { fn execute(subcmd: Recurse, mut writer: impl Write) -> Result<()> { if let Recurse::Walk { extension, dir_only, hidden, inpath, mindepth, maxdepth, symlinks, } = subcmd { // ------------ // Validations // ------------ // 1) inpath exists, if not bail with error if !inpath.exists() { return Err(anyhow!(format!( "no such file or directory '{}'", inpath.display() ))); } let has_extension_filter = extension.is_some(); // Recursive walk of inpath with user-specified filters for entry in walk(inpath, &mindepth, &maxdepth, &symlinks).filter_map(|f| f.ok()) { let md = entry.metadata().unwrap(); if !dir_only && md.is_file() { // File path listings let filepath = entry.path(); if !hidden && path_is_hidden(filepath) { // if file is in a hidden path, skip it continue; } else if has_extension_filter { // if user requested extension filter, filter on it if path_has_extension(filepath, extension.as_ref().unwrap()) { writeln!(writer, "{}", filepath.display())?; } } else { writeln!(writer, "{}", filepath.display())?; } } else if dir_only && md.is_dir() { // Directory path listings let dirpath = entry.path(); if !hidden && path_is_hidden(dirpath) { continue; } else { writeln!(writer, "{}", dirpath.display())?; } } } Ok(()) } else { Err(anyhow!("failure to parse walk subcommand.")) } } } #[cfg(test)] mod tests { use super::*; use std::path::PathBuf; #[test] fn test_walk_subcmd_invalid_inpath_validation() { let rw = Recurse::Walk { extension: None, dir_only: false, hidden: false, inpath: PathBuf::from("path/to/bogus"), mindepth: None, maxdepth: None, symlinks: false, }; let mut output = Vec::new(); let res = WalkCommand::execute(rw, &mut output); // invalid directory path should raise error assert!(res.is_err()); assert!(res .unwrap_err() .to_string() .contains("no such file or directory")); } #[test] fn test_walk_invalid_recurse_enum_arg() { let rw = Recurse::Contains { extension: None, find: "test".to_string(), hidden: false, inpath: PathBuf::from("path/to/bogus"), mindepth: None, maxdepth: None, symlinks: false, }; let mut output = Vec::new(); let res = WalkCommand::execute(rw, &mut output); assert!(res.is_err()); assert!(res .unwrap_err() .to_string() .contains("failure to parse walk subcommand")); } // ============ // File testing // ============ #[test] fn test_walk_subcmd_dir_with_default_depth() { let rw = Recurse::Walk { extension: None, dir_only: false, hidden: false, inpath: PathBuf::from("tests/testfiles/io/stablepaths"), mindepth: None, maxdepth: None, symlinks: false, }; let mut output = Vec::new(); let res = WalkCommand::execute(rw, &mut output); assert!(res.is_ok()); let output_slice = std::str::from_utf8(&output).unwrap(); let output_vec: Vec<&str> = output_slice.split("\n").collect(); // contains three expected file paths, including file path without extension // the path gymnastics are to support cross-platform file path testing let mut output_string = output_slice.replace("/", "_"); output_string = output_string.replace(r"\", "_"); assert!(output_string.contains("tests_testfiles_io_stablepaths_README.md")); assert!(output_string.contains("tests_testfiles_io_stablepaths_test")); assert!(output_string.contains("tests_testfiles_io_stablepaths_test.txt")); // includes total of 4 lines assert!(output_vec.len() == 4); // last line is empty string after newline assert!(output_vec[3] == ""); } #[test] fn test_walk_subcmd_with_dir_set_max_depth_1_level() { let rw = Recurse::Walk { extension: None, dir_only: false, hidden: false, inpath: PathBuf::from("tests/testfiles/io/depthtests"), mindepth: None, maxdepth: Some(1), symlinks: false, }; let mut output = Vec::new(); let res = WalkCommand::execute(rw, &mut output); assert!(res.is_ok()); let output_slice = std::str::from_utf8(&output).unwrap(); let output_vec: Vec<&str> = output_slice.split("\n").collect(); let mut output_string = output_slice.replace("/", "_"); output_string = output_string.replace(r"\", "_"); assert!(output_string.contains("tests_testfiles_io_depthtests_test.txt")); // includes total of 2 lines assert!(output_vec.len() == 2); // last line is empty string after newline assert!(output_vec[1] == ""); } #[test] fn test_walk_subcmd_with_dir_set_max_depth_2_levels() { let rw = Recurse::Walk { extension: None, dir_only: false, hidden: false, inpath: PathBuf::from("tests/testfiles/io/depthtests"), mindepth: None, maxdepth: Some(2), symlinks: false, }; let mut output = Vec::new(); let res = WalkCommand::execute(rw, &mut output); assert!(res.is_ok()); let output_slice = std::str::from_utf8(&output).unwrap(); let output_vec: Vec<&str> = output_slice.split("\n").collect(); let mut output_string = output_slice.replace("/", "_"); output_string = output_string.replace(r"\", "_"); assert!(output_string.contains("tests_testfiles_io_depthtests_test.txt")); assert!(output_string.contains("tests_testfiles_io_depthtests_depth2_test2.txt")); // includes total of 3 lines assert!(output_vec.len() == 3); // last line is empty string after newline assert!(output_vec[2] == ""); } #[test] fn test_walk_subcmd_with_dir_set_min_depth_3_levels() { let rw = Recurse::Walk { extension: None, dir_only: false, hidden: false, inpath: PathBuf::from("tests/testfiles/io/depthtests"), mindepth: Some(3), maxdepth: None, symlinks: false, }; let mut output = Vec::new(); let res = WalkCommand::execute(rw, &mut output); assert!(res.is_ok()); let output_slice = std::str::from_utf8(&output).unwrap(); let output_vec: Vec<&str> = output_slice.split("\n").collect(); let mut output_string = output_slice.replace("/", "_"); output_string = output_string.replace(r"\", "_"); assert!(output_string.contains("tests_testfiles_io_depthtests_depth2_depth3_test3.txt")); // includes total of 2 lines assert!(output_vec.len() == 2); // last line is empty string after newline assert!(output_vec[1] == ""); } #[test] fn test_walk_subcmd_with_extension_filter() { let rw = Recurse::Walk { extension: Some("txt".to_string()), dir_only: false, hidden: false, inpath: PathBuf::from("tests/testfiles/io/stablepaths"), mindepth: None, maxdepth: None, symlinks: false, }; let mut output = Vec::new(); let res = WalkCommand::execute(rw, &mut output); assert!(res.is_ok()); let output_slice = std::str::from_utf8(&output).unwrap(); let output_vec: Vec<&str> = output_slice.split("\n").collect(); // contains three expected file paths, including file path without extension // the path gymnastics are to support cross-platform file path testing let mut output_string = output_slice.replace("/", "_"); output_string = output_string.replace(r"\", "_"); assert!(output_string.contains("tests_testfiles_io_stablepaths_test.txt")); // includes total of 2 lines assert!(output_vec.len() == 2); // last line is empty string after newline assert!(output_vec[1] == ""); } #[test] fn test_walk_subcmd_with_extension_filter_alt_ext_format() { let rw = Recurse::Walk { extension: Some(".txt".to_string()), dir_only: false, hidden: false, inpath: PathBuf::from("tests/testfiles/io/stablepaths"), mindepth: None, maxdepth: None, symlinks: false, }; let mut output = Vec::new(); let res = WalkCommand::execute(rw, &mut output); assert!(res.is_ok()); let output_slice = std::str::from_utf8(&output).unwrap(); let output_vec: Vec<&str> = output_slice.split("\n").collect(); // contains three expected file paths, including file path without extension // the path gymnastics are to support cross-platform file path testing let mut output_string = output_slice.replace("/", "_"); output_string = output_string.replace(r"\", "_"); assert!(output_string.contains("tests_testfiles_io_stablepaths_test.txt")); // includes total of 2 lines assert!(output_vec.len() == 2); // last line is empty string after newline assert!(output_vec[1] == ""); } #[test] fn test_walk_subcmd_with_hidden_filepaths() { let rw = Recurse::Walk { extension: None, dir_only: false, hidden: true, inpath: PathBuf::from("tests/testfiles/.dotdir"), mindepth: None, maxdepth: None, symlinks: false, }; let mut output = Vec::new(); let res = WalkCommand::execute(rw, &mut output); assert!(res.is_ok()); let output_slice = std::str::from_utf8(&output).unwrap(); let output_vec: Vec<&str> = output_slice.split("\n").collect(); let mut output_string = output_slice.replace("/", "_"); output_string = output_string.replace(r"\", "_"); assert!(output_string.contains("tests_testfiles_.dotdir_.testfile")); assert!(output_string.contains("tests_testfiles_.dotdir_testfile")); assert!(output_string.contains("tests_testfiles_.dotdir_.testfile.txt")); // includes total of 4 lines assert!(output_vec.len() == 4); // last line is empty string after newline assert!(output_vec[3] == ""); } #[test] fn test_walk_subcmd_with_hidden_filepaths_and_extension_filter() { let rw = Recurse::Walk { extension: Some("txt".to_string()), dir_only: false, hidden: true, inpath: PathBuf::from("tests/testfiles/.dotdir"), mindepth: None, maxdepth: None, symlinks: false, }; let mut output = Vec::new(); let res = WalkCommand::execute(rw, &mut output); assert!(res.is_ok()); let output_slice = std::str::from_utf8(&output).unwrap(); let output_vec: Vec<&str> = output_slice.split("\n").collect(); let mut output_string = output_slice.replace("/", "_"); output_string = output_string.replace(r"\", "_"); assert!(output_string.contains("tests_testfiles_.dotdir_.testfile.txt")); // includes total of 2 lines assert!(output_vec.len() == 2); // last line is empty string after newline assert!(output_vec[1] == ""); } #[test] fn test_walk_subcmd_with_hidden_filepaths_and_extension_filter_alt_ext_format() { let rw = Recurse::Walk { extension: Some(".txt".to_string()), dir_only: false, hidden: true, inpath: PathBuf::from("tests/testfiles/.dotdir"), mindepth: None, maxdepth: None, symlinks: false, }; let mut output = Vec::new(); let res = WalkCommand::execute(rw, &mut output); assert!(res.is_ok()); let output_slice = std::str::from_utf8(&output).unwrap(); let output_vec: Vec<&str> = output_slice.split("\n").collect(); let mut output_string = output_slice.replace("/", "_"); output_string = output_string.replace(r"\", "_"); assert!(output_string.contains("tests_testfiles_.dotdir_.testfile.txt")); // includes total of 2 lines assert!(output_vec.len() == 2); // last line is empty string after newline assert!(output_vec[1] == ""); } #[test] fn test_walk_subcmd_without_hidden_filepaths() { let rw = Recurse::Walk { extension: None, dir_only: false, hidden: false, inpath: PathBuf::from("tests/testfiles/.dotdir"), mindepth: None, maxdepth: None, symlinks: false, }; let mut output = Vec::new(); let res = WalkCommand::execute(rw, &mut output); assert!(res.is_ok()); let output_slice = std::str::from_utf8(&output).unwrap(); let output_vec: Vec<&str> = output_slice.split("\n").collect(); // includes total of 1 lines with no paths assert!(output_vec.len() == 1); // last line is empty string after newline assert!(output_vec[0] == ""); } // ================= // Directory testing // ================= #[test] fn test_walk_subcmd_filter_dirs_only_default_depth() { let rw = Recurse::Walk { extension: None, dir_only: true, hidden: false, inpath: PathBuf::from("tests/testfiles/io/depthtests"), mindepth: None, maxdepth: None, symlinks: false, }; let mut output = Vec::new(); let res = WalkCommand::execute(rw, &mut output); assert!(res.is_ok()); let output_slice = std::str::from_utf8(&output).unwrap(); let output_vec: Vec<&str> = output_slice.split("\n").collect(); let mut output_string = output_slice.replace("/", "_"); output_string = output_string.replace(r"\", "_"); assert!(output_string.contains("tests_testfiles_io_depthtests")); assert!(output_string.contains("tests_testfiles_io_depthtests_depth2")); assert!(output_string.contains("tests_testfiles_io_depthtests_depth2_depth3")); // includes total of 4 lines assert!(output_vec.len() == 4); // last line is empty string after newline assert!(output_vec[3] == ""); } #[test] fn test_walk_subcmd_filter_dirs_only_hidden_switch_off() { let rw = Recurse::Walk { extension: None, dir_only: true, hidden: false, inpath: PathBuf::from("tests/testfiles/.dotdir"), mindepth: None, maxdepth: None, symlinks: false, }; let mut output = Vec::new(); let res = WalkCommand::execute(rw, &mut output); assert!(res.is_ok()); let output_slice = std::str::from_utf8(&output).unwrap(); let output_vec: Vec<&str> = output_slice.split("\n").collect(); // includes total of 1 lines assert!(output_vec.len() == 1); // last line is empty string after newline assert!(output_vec[0] == ""); } #[test] fn test_walk_subcmd_filter_dirs_only_hidden_switch_on() { let rw = Recurse::Walk { extension: None, dir_only: true, hidden: true, inpath: PathBuf::from("tests/testfiles/.dotdir"), mindepth: None, maxdepth: None, symlinks: false, }; let mut output = Vec::new(); let res = WalkCommand::execute(rw, &mut output); assert!(res.is_ok()); let output_slice = std::str::from_utf8(&output).unwrap(); let output_vec: Vec<&str> = output_slice.split("\n").collect(); let mut output_string = output_slice.replace("/", "_"); output_string = output_string.replace(r"\", "_"); assert!(output_string.contains("tests_testfiles_.dotdir")); // includes total of 2 lines assert!(output_vec.len() == 2); // last line is empty string after newline assert!(output_vec[1] == ""); } }
mod read_cursor; pub mod multiqueue;
/* * Datadog API V1 Collection * * Collection of all Datadog Public endpoints. * * The version of the OpenAPI document: 1.0 * Contact: support@datadoghq.com * Generated by: https://openapi-generator.tech */ /// SyntheticsTestPauseStatus : Define whether you want to start (`live`) or pause (`paused`) a Synthetic test. /// Define whether you want to start (`live`) or pause (`paused`) a Synthetic test. #[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum SyntheticsTestPauseStatus { #[serde(rename = "live")] LIVE, #[serde(rename = "paused")] PAUSED, } impl ToString for SyntheticsTestPauseStatus { fn to_string(&self) -> String { match self { Self::LIVE => String::from("live"), Self::PAUSED => String::from("paused"), } } }
use {AsNative, Backend, ResourceIndex, BufferPtr, SamplerPtr, TexturePtr}; use internal::{Channel, FastStorageMap}; use range_alloc::RangeAllocator; use window::SwapchainImage; use std::borrow::Borrow; use std::cell::RefCell; use std::{fmt, iter}; use std::ops::Range; use std::os::raw::{c_void, c_long}; use std::sync::Arc; use hal::{buffer, image, pso}; use hal::{DescriptorPool as HalDescriptorPool, MemoryTypeId}; use hal::backend::FastHashMap; use hal::command::{ClearColorRaw, ClearValueRaw}; use hal::format::{Aspects, Format, FormatDesc}; use hal::pass::{Attachment, AttachmentLoadOp, AttachmentOps}; use hal::range::RangeArg; use cocoa::foundation::{NSRange, NSUInteger}; use metal; use parking_lot::{Mutex, RwLock}; use smallvec::SmallVec; use spirv_cross::{msl, spirv}; pub type EntryPointMap = FastHashMap<String, spirv::EntryPoint>; /// An index of a resource within descriptor pool. pub type PoolResourceIndex = u32; /// Shader module can be compiled in advance if it's resource bindings do not /// depend on pipeline layout, in which case the value would become `Compiled`. pub enum ShaderModule { Compiled(ModuleInfo), Raw(Vec<u8>), } impl fmt::Debug for ShaderModule { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { match *self { ShaderModule::Compiled(_) => { write!(formatter, "ShaderModule::Compiled(..)") } ShaderModule::Raw(ref vec) => { write!(formatter, "ShaderModule::Raw(length = {})", vec.len()) } } } } unsafe impl Send for ShaderModule {} unsafe impl Sync for ShaderModule {} #[derive(Clone, Debug, Default, Hash, PartialEq, Eq)] pub struct RenderPassKey { // enough room for 4 color targets + depth/stencil pub clear_data: SmallVec<[u32; 20]>, operations: SmallVec<[AttachmentOps; 6]>, } #[derive(Debug)] pub struct RenderPass { pub(crate) attachments: Vec<Attachment>, } unsafe impl Send for RenderPass {} unsafe impl Sync for RenderPass {} impl RenderPass { pub fn build_key<T>(&self, clear_values: T) -> (RenderPassKey, Aspects) where T: IntoIterator, T::Item: Borrow<ClearValueRaw>, { let mut key = RenderPassKey::default(); let mut full_aspects = Aspects::empty(); let dummy_value = ClearValueRaw { color: ClearColorRaw { int32: [0; 4], }, }; let clear_values_iter = clear_values .into_iter() .map(|c| *c.borrow()) .chain(iter::repeat(dummy_value)); for (rat, clear_value) in self.attachments.iter().zip(clear_values_iter) { //TODO: avoid calling `surface_desc` as often let aspects = match rat.format { Some(format) => format.surface_desc().aspects, None => continue, }; full_aspects |= aspects; let cv = clear_value.borrow(); if aspects.contains(Aspects::COLOR) { key.operations.push(rat.ops); if rat.ops.load == AttachmentLoadOp::Clear { key.clear_data.extend_from_slice(unsafe { &cv.color.uint32 }); } } if aspects.contains(Aspects::DEPTH) { key.operations.push(rat.ops); if rat.ops.load == AttachmentLoadOp::Clear { key.clear_data.push(unsafe { *(&cv.depth_stencil.depth as *const _ as *const u32) }); } } if aspects.contains(Aspects::STENCIL) { key.operations.push(rat.stencil_ops); if rat.stencil_ops.load == AttachmentLoadOp::Clear { key.clear_data.push(unsafe { cv.depth_stencil.stencil }); } } } (key, full_aspects) } } #[derive(Clone, Debug)] pub struct ColorAttachment { pub mtl_format: metal::MTLPixelFormat, pub channel: Channel, } #[derive(Clone, Debug)] pub struct FramebufferInner { pub extent: image::Extent, pub aspects: Aspects, pub colors: SmallVec<[ColorAttachment; 4]>, pub depth_stencil: Option<metal::MTLPixelFormat>, } #[derive(Debug)] pub struct Framebuffer { pub(crate) descriptor: metal::RenderPassDescriptor, pub(crate) desc_storage: FastStorageMap<RenderPassKey, metal::RenderPassDescriptor>, pub(crate) inner: FramebufferInner, } unsafe impl Send for Framebuffer {} unsafe impl Sync for Framebuffer {} #[derive(Clone, Debug)] pub struct ResourceData<T> { pub buffers: T, pub textures: T, pub samplers: T, } impl<T> ResourceData<T> { pub fn map<V, F: Fn(&T) -> V>(&self, fun: F) -> ResourceData<V> { ResourceData { buffers: fun(&self.buffers), textures: fun(&self.textures), samplers: fun(&self.samplers), } } } impl ResourceData<PoolResourceIndex> { pub fn new() -> Self { ResourceData { buffers: 0, textures: 0, samplers: 0, } } } /* impl ResourceData<ResourceIndex> { pub fn new() -> Self { ResourceCounters { buffers: 0, textures: 0, samplers: 0, } } } */ impl ResourceData<PoolResourceIndex> { #[inline] pub fn add_many(&mut self, content: DescriptorContent, count: PoolResourceIndex) { if content.contains(DescriptorContent::BUFFER) { self.buffers += count; } if content.contains(DescriptorContent::TEXTURE) { self.textures += count; } if content.contains(DescriptorContent::SAMPLER) { self.samplers += count; } } #[inline] pub fn add(&mut self, content: DescriptorContent) { self.add_many(content, 1) } } #[derive(Clone, Debug)] pub struct MultiStageData<T> { pub vs: T, pub ps: T, pub cs: T, } pub type MultiStageResourceCounters = MultiStageData<ResourceData<ResourceIndex>>; impl MultiStageResourceCounters { pub fn add(&mut self, stages: pso::ShaderStageFlags, content: DescriptorContent) { if stages.contains(pso::ShaderStageFlags::VERTEX) { self.vs.add(content); } if stages.contains(pso::ShaderStageFlags::FRAGMENT) { self.ps.add(content); } if stages.contains(pso::ShaderStageFlags::COMPUTE) { self.cs.add(content); } } } #[derive(Debug)] pub struct DescriptorSetInfo { pub offsets: MultiStageResourceCounters, pub dynamic_buffers: Vec<MultiStageData<PoolResourceIndex>>, } #[derive(Debug)] pub struct PipelineLayout { pub(crate) shader_compiler_options: msl::CompilerOptions, pub(crate) shader_compiler_options_point: msl::CompilerOptions, pub(crate) infos: Vec<DescriptorSetInfo>, pub(crate) total: MultiStageResourceCounters, pub(crate) push_constant_buffer_index: MultiStageData<Option<ResourceIndex>>, } impl PipelineLayout { /// Get the first vertex buffer index to be used by attributes. #[inline(always)] pub(crate) fn attribute_buffer_index(&self) -> ResourceIndex { self.total.vs.buffers as _ } } #[derive(Clone)] pub struct ModuleInfo { pub library: metal::Library, pub entry_point_map: EntryPointMap, } pub struct PipelineCache { pub(crate) modules: FastStorageMap<msl::CompilerOptions, FastStorageMap<Vec<u8>, ModuleInfo>>, } impl fmt::Debug for PipelineCache { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { write!(formatter, "PipelineCache") } } #[derive(Clone, Debug, PartialEq)] pub struct RasterizerState { //TODO: more states pub front_winding: metal::MTLWinding, pub cull_mode: metal::MTLCullMode, pub depth_clip: metal::MTLDepthClipMode, } impl Default for RasterizerState { fn default() -> Self { RasterizerState { front_winding: metal::MTLWinding::Clockwise, cull_mode: metal::MTLCullMode::None, depth_clip: metal::MTLDepthClipMode::Clip, } } } #[derive(Clone, Debug)] pub struct StencilState<T> { pub front_reference: T, pub back_reference: T, pub front_read_mask: T, pub back_read_mask: T, pub front_write_mask: T, pub back_write_mask: T, } pub type VertexBufferVec = Vec<(pso::VertexBufferDesc, pso::ElemOffset)>; #[derive(Debug)] pub struct GraphicsPipeline { // we hold the compiled libraries here for now // TODO: move to some cache in `Device` pub(crate) vs_lib: metal::Library, pub(crate) fs_lib: Option<metal::Library>, pub(crate) raw: metal::RenderPipelineState, pub(crate) primitive_type: metal::MTLPrimitiveType, pub(crate) attribute_buffer_index: ResourceIndex, pub(crate) vs_pc_buffer_index: Option<ResourceIndex>, pub(crate) ps_pc_buffer_index: Option<ResourceIndex>, pub(crate) rasterizer_state: Option<RasterizerState>, pub(crate) depth_bias: pso::State<pso::DepthBias>, pub(crate) depth_stencil_desc: pso::DepthStencilDesc, pub(crate) baked_states: pso::BakedStates, /// The mapping from Metal vertex buffers to Vulkan ones. /// This is needed because Vulkan allows attribute offsets to exceed the strides, /// while Metal does not. Thus, we register extra vertex buffer bindings with /// adjusted offsets to cover this use case. pub(crate) vertex_buffers: VertexBufferVec, /// Tracked attachment formats for figuring (roughly) renderpass compatibility. pub(crate) attachment_formats: Vec<Option<Format>>, } unsafe impl Send for GraphicsPipeline {} unsafe impl Sync for GraphicsPipeline {} #[derive(Debug)] pub struct ComputePipeline { pub(crate) cs_lib: metal::Library, pub(crate) raw: metal::ComputePipelineState, pub(crate) work_group_size: metal::MTLSize, pub(crate) pc_buffer_index: Option<ResourceIndex>, } unsafe impl Send for ComputePipeline {} unsafe impl Sync for ComputePipeline {} #[derive(Debug)] pub struct Image { pub(crate) raw: metal::Texture, pub(crate) kind: image::Kind, pub(crate) format_desc: FormatDesc, pub(crate) shader_channel: Channel, pub(crate) mtl_format: metal::MTLPixelFormat, pub(crate) mtl_type: metal::MTLTextureType, } impl Image { pub(crate) fn pitches_impl( extent: image::Extent, format_desc: FormatDesc ) -> [buffer::Offset; 3] { let bytes_per_texel = format_desc.bits as image::Size >> 3; let row_pitch = extent.width * bytes_per_texel; let depth_pitch = extent.height * row_pitch; let array_pitch = extent.depth * depth_pitch; [row_pitch as _, depth_pitch as _, array_pitch as _] } pub(crate) fn pitches(&self, level: image::Level) -> [buffer::Offset; 3] { let extent = self.kind.extent().at_level(level); Self::pitches_impl(extent, self.format_desc) } /// View this cube texture as a 2D array. pub(crate) fn view_cube_as_2d(&self) -> Option<metal::Texture> { match self.mtl_type { metal::MTLTextureType::Cube | metal::MTLTextureType::CubeArray => { Some(self.raw.new_texture_view_from_slice( self.mtl_format, metal::MTLTextureType::D2Array, NSRange { location: 0, length: self.raw.mipmap_level_count(), }, NSRange { location: 0, length: self.kind.num_layers() as _, }, )) } _ => None, } } } unsafe impl Send for Image {} unsafe impl Sync for Image {} #[derive(Debug)] pub struct BufferView { pub(crate) raw: metal::Texture, } unsafe impl Send for BufferView {} unsafe impl Sync for BufferView {} #[derive(Debug)] pub struct ImageView { pub(crate) raw: metal::Texture, pub(crate) mtl_format: metal::MTLPixelFormat, } unsafe impl Send for ImageView {} unsafe impl Sync for ImageView {} #[derive(Debug)] pub struct Sampler(pub(crate) metal::SamplerState); unsafe impl Send for Sampler {} unsafe impl Sync for Sampler {} #[derive(Debug)] pub struct Semaphore { pub(crate) system: Option<SystemSemaphore>, pub(crate) image_ready: Arc<Mutex<Option<SwapchainImage>>>, } #[derive(Debug)] pub struct Buffer { pub(crate) raw: metal::Buffer, pub(crate) range: Range<u64>, pub(crate) res_options: metal::MTLResourceOptions, } unsafe impl Send for Buffer {} unsafe impl Sync for Buffer {} #[derive(Debug)] pub enum DescriptorPool { Emulated { inner: Arc<RwLock<DescriptorPoolInner>>, allocators: ResourceData<RangeAllocator<PoolResourceIndex>>, }, ArgumentBuffer { raw: metal::Buffer, range_allocator: RangeAllocator<NSUInteger>, }, } //TODO: re-evaluate Send/Sync here unsafe impl Send for DescriptorPool {} unsafe impl Sync for DescriptorPool {} #[derive(Debug)] pub struct DescriptorPoolInner { pub samplers: Vec<Option<SamplerPtr>>, pub textures: Vec<Option<(TexturePtr, image::Layout)>>, pub buffers: Vec<Option<(BufferPtr, buffer::Offset)>>, } impl DescriptorPool { pub(crate) fn new_emulated(counters: ResourceData<PoolResourceIndex>) -> Self { let inner = DescriptorPoolInner { samplers: vec![None; counters.samplers as usize], textures: vec![None; counters.textures as usize], buffers: vec![None; counters.buffers as usize], }; DescriptorPool::Emulated { inner: Arc::new(RwLock::new(inner)), allocators: ResourceData { samplers: RangeAllocator::new(0 .. counters.samplers), textures: RangeAllocator::new(0 .. counters.textures), buffers: RangeAllocator::new(0 .. counters.buffers), } } } fn report_available(&self) { match *self { DescriptorPool::Emulated { ref allocators, .. } => { trace!("\tavailable {} samplers, {} textures, and {} buffers", allocators.samplers.total_available(), allocators.textures.total_available(), allocators.buffers.total_available(), ); } DescriptorPool::ArgumentBuffer { .. } => {} } } } impl HalDescriptorPool<Backend> for DescriptorPool { fn allocate_set(&mut self, set_layout: &DescriptorSetLayout) -> Result<DescriptorSet, pso::AllocationError> { self.report_available(); match *self { DescriptorPool::Emulated { ref inner, ref mut allocators } => { debug!("pool: allocate_set"); let (layouts, immutable_samplers) = match set_layout { &DescriptorSetLayout::Emulated(ref layouts, ref samplers) => (layouts, samplers), _ => return Err(pso::AllocationError::IncompatibleLayout), }; // step[1]: count the total number of descriptors needed let mut counters = MultiStageResourceCounters { vs: ResourceData::new(), ps: ResourceData::new(), cs: ResourceData::new(), }; for layout in layouts.iter() { counters.add(layout.stages, layout.content); } debug!("\ttotal {:?}", counters); let total = ResourceData { buffers: counters.vs.buffers + counters.ps.buffers + counters.cs.buffers, textures: counters.vs.textures + counters.ps.textures + counters.cs.textures, samplers: counters.vs.samplers + counters.ps.samplers + counters.cs.samplers, }; // step[2]: try to allocate the ranges from the pool let sampler_range = if total.samplers != 0 { match allocators.samplers.allocate_range(total.samplers as _) { Ok(range) => range, Err(e) => { return Err(if e.fragmented_free_length >= total.samplers { pso::AllocationError::FragmentedPool } else { pso::AllocationError::OutOfPoolMemory }); } } } else { 0 .. 0 }; let texture_range = if total.textures != 0 { match allocators.textures.allocate_range(total.textures as _) { Ok(range) => range, Err(e) => { if sampler_range.end != 0 { allocators.samplers.free_range(sampler_range); } return Err(if e.fragmented_free_length >= total.samplers { pso::AllocationError::FragmentedPool } else { pso::AllocationError::OutOfPoolMemory }); } } } else { 0 .. 0 }; let buffer_range = if total.buffers != 0 { match allocators.buffers.allocate_range(total.buffers as _) { Ok(range) => range, Err(e) => { if sampler_range.end != 0 { allocators.samplers.free_range(sampler_range); } if texture_range.end != 0 { allocators.textures.free_range(texture_range); } return Err(if e.fragmented_free_length >= total.samplers { pso::AllocationError::FragmentedPool } else { pso::AllocationError::OutOfPoolMemory }); } } } else { 0 .. 0 }; // step[3]: fill out immutable samplers if !immutable_samplers.is_empty() { let mut data = inner.write(); let mut data_vs_index = sampler_range.start as usize; let mut data_ps_index = data_vs_index + counters.vs.samplers as usize; let mut data_cs_index = data_ps_index + counters.ps.samplers as usize; let mut sampler_iter = immutable_samplers.iter(); for layout in layouts.iter() { if layout.content.contains(DescriptorContent::SAMPLER) { let value = if layout.content.contains(DescriptorContent::IMMUTABLE_SAMPLER) { Some(AsNative::from(sampler_iter.next().unwrap().as_ref())) } else { None }; if layout.stages.contains(pso::ShaderStageFlags::VERTEX) { data.samplers[data_vs_index] = value; data_vs_index += 1; } if layout.stages.contains(pso::ShaderStageFlags::FRAGMENT) { data.samplers[data_ps_index] = value; data_ps_index += 1; } if layout.stages.contains(pso::ShaderStageFlags::COMPUTE) { data.samplers[data_cs_index] = value; data_cs_index += 1; } } } debug!("\tassigning {} immutable_samplers", immutable_samplers.len()); } let resources = { let vs = ResourceData { buffers: buffer_range.start .. buffer_range.start + counters.vs.buffers, textures: texture_range.start .. texture_range.start + counters.vs.textures, samplers: sampler_range.start .. sampler_range.start + counters.vs.samplers, }; let ps = ResourceData { buffers: vs.buffers.end .. vs.buffers.end + counters.ps.buffers, textures: vs.textures.end .. vs.textures.end + counters.ps.textures, samplers: vs.samplers.end .. vs.samplers.end + counters.ps.samplers, }; let cs = ResourceData { buffers: ps.buffers.end .. buffer_range.end, textures: ps.textures.end .. texture_range.end, samplers: ps.samplers.end .. sampler_range.end, }; MultiStageData { vs, ps, cs } }; Ok(DescriptorSet::Emulated { pool: Arc::clone(inner), layouts: Arc::clone(layouts), resources, }) } DescriptorPool::ArgumentBuffer { ref raw, ref mut range_allocator, } => { let (encoder, stage_flags) = match set_layout { &DescriptorSetLayout::ArgumentBuffer(ref encoder, stages) => (encoder, stages), _ => return Err(pso::AllocationError::IncompatibleLayout), }; match range_allocator.allocate_range(encoder.encoded_length()) { Ok(range) => Ok(DescriptorSet::ArgumentBuffer { raw: raw.clone(), offset: range.start, encoder: encoder.clone(), stage_flags, }), Err(_) => Err(pso::AllocationError::OutOfPoolMemory), } } } } fn free_sets<I>(&mut self, descriptor_sets: I) where I: IntoIterator<Item = DescriptorSet> { match self { DescriptorPool::Emulated { ref inner, ref mut allocators } => { debug!("pool: free_sets"); let mut data = inner.write(); for descriptor_set in descriptor_sets { match descriptor_set { DescriptorSet::Emulated { resources, .. } => { debug!("\t{:?} resources", resources); let sampler_range = resources.vs.samplers.start .. resources.cs.samplers.end; for sampler in &mut data.samplers[sampler_range.start as usize .. sampler_range.end as usize] { *sampler = None; } if sampler_range.start != sampler_range.end { allocators.samplers.free_range(sampler_range); } let texture_range = resources.vs.textures.start .. resources.cs.textures.end; for image in &mut data.textures[texture_range.start as usize .. texture_range.end as usize] { *image = None; } if texture_range.start != texture_range.end { allocators.textures.free_range(texture_range); } let buffer_range = resources.vs.buffers.start .. resources.cs.buffers.end; for buffer in &mut data.buffers[buffer_range.start as usize .. buffer_range.end as usize] { *buffer = None; } if buffer_range.start != buffer_range.end { allocators.buffers.free_range(buffer_range); } } DescriptorSet::ArgumentBuffer{..} => { panic!("Tried to free a DescriptorSet not given out by this DescriptorPool!") } } } } DescriptorPool::ArgumentBuffer { ref mut range_allocator, .. } => { for descriptor_set in descriptor_sets { match descriptor_set { DescriptorSet::Emulated{..} => { panic!("Tried to free a DescriptorSet not given out by this DescriptorPool!") } DescriptorSet::ArgumentBuffer { offset, encoder, .. } => { let handle_range = offset .. offset + encoder.encoded_length(); range_allocator.free_range(handle_range); } } } } } self.report_available(); } fn reset(&mut self) { match *self { DescriptorPool::Emulated { ref inner, ref mut allocators } => { debug!("pool: reset"); if allocators.samplers.is_empty() && allocators.textures.is_empty() && allocators.buffers.is_empty() { return // spare the locking } let mut data = inner.write(); for range in allocators.samplers.allocated_ranges() { for sampler in &mut data.samplers[range.start as usize .. range.end as usize] { *sampler = None; } } for range in allocators.textures.allocated_ranges() { for texture in &mut data.textures[range.start as usize .. range.end as usize] { *texture = None; } } for range in allocators.buffers.allocated_ranges() { for buffer in &mut data.buffers[range.start as usize .. range.end as usize] { *buffer = None; } } allocators.samplers.reset(); allocators.textures.reset(); allocators.buffers.reset(); } DescriptorPool::ArgumentBuffer { ref mut range_allocator, .. } => { range_allocator.reset(); } } } } bitflags! { /// Descriptor content flags. pub struct DescriptorContent: u8 { const BUFFER = 1<<0; const DYNAMIC_BUFFER = 1<<1; const TEXTURE = 1<<2; const SAMPLER = 1<<3; const IMMUTABLE_SAMPLER = 1<<4; } } impl From<pso::DescriptorType> for DescriptorContent { fn from(ty: pso::DescriptorType) -> Self { match ty { pso::DescriptorType::Sampler => { DescriptorContent::SAMPLER } pso::DescriptorType::CombinedImageSampler => { DescriptorContent::TEXTURE | DescriptorContent::SAMPLER } pso::DescriptorType::SampledImage | pso::DescriptorType::StorageImage | pso::DescriptorType::UniformTexelBuffer | pso::DescriptorType::StorageTexelBuffer | pso::DescriptorType::InputAttachment => { DescriptorContent::TEXTURE } pso::DescriptorType::UniformBuffer | pso::DescriptorType::StorageBuffer => { DescriptorContent::BUFFER } pso::DescriptorType::UniformBufferDynamic | pso::DescriptorType::StorageBufferDynamic => { DescriptorContent::BUFFER | DescriptorContent::DYNAMIC_BUFFER } } } } // Note: this structure is iterated often, so it makes sense to keep it dense #[derive(Debug)] pub struct DescriptorLayout { pub content: DescriptorContent, pub stages: pso::ShaderStageFlags, pub binding: pso::DescriptorBinding, pub array_index: pso::DescriptorArrayIndex, } #[derive(Debug)] pub enum DescriptorSetLayout { Emulated(Arc<Vec<DescriptorLayout>>, Vec<metal::SamplerState>), ArgumentBuffer(metal::ArgumentEncoder, pso::ShaderStageFlags), } unsafe impl Send for DescriptorSetLayout {} unsafe impl Sync for DescriptorSetLayout {} #[derive(Debug)] pub enum DescriptorSet { Emulated { pool: Arc<RwLock<DescriptorPoolInner>>, layouts: Arc<Vec<DescriptorLayout>>, resources: MultiStageData<ResourceData<Range<PoolResourceIndex>>>, }, ArgumentBuffer { raw: metal::Buffer, offset: NSUInteger, encoder: metal::ArgumentEncoder, stage_flags: pso::ShaderStageFlags, }, } unsafe impl Send for DescriptorSet {} unsafe impl Sync for DescriptorSet {} #[derive(Debug)] pub struct Memory { pub(crate) heap: MemoryHeap, pub(crate) size: u64, } impl Memory { pub(crate) fn new(heap: MemoryHeap, size: u64) -> Self { Memory { heap, size, } } pub(crate) fn resolve<R: RangeArg<u64>>(&self, range: &R) -> Range<u64> { *range.start().unwrap_or(&0) .. *range.end().unwrap_or(&self.size) } } unsafe impl Send for Memory {} unsafe impl Sync for Memory {} #[derive(Debug)] pub(crate) enum MemoryHeap { Private, Public(MemoryTypeId, metal::Buffer), Native(metal::Heap), } #[derive(Debug)] pub struct UnboundBuffer { pub(crate) size: u64, pub(crate) usage: buffer::Usage, } unsafe impl Send for UnboundBuffer {} unsafe impl Sync for UnboundBuffer {} #[derive(Debug)] pub struct UnboundImage { pub(crate) texture_desc: metal::TextureDescriptor, pub(crate) format: Format, pub(crate) kind: image::Kind, pub(crate) mip_sizes: Vec<u64>, pub(crate) host_visible: bool, } unsafe impl Send for UnboundImage {} unsafe impl Sync for UnboundImage {} #[derive(Debug)] pub enum QueryPool { Occlusion(Range<u32>), } #[derive(Debug)] pub enum FenceInner { Idle { signaled: bool }, Pending(metal::CommandBuffer), } #[derive(Debug)] pub struct Fence(pub(crate) RefCell<FenceInner>); unsafe impl Send for Fence {} unsafe impl Sync for Fence {} extern "C" { fn dispatch_semaphore_wait( semaphore: *mut c_void, timeout: u64, ) -> c_long; fn dispatch_semaphore_signal( semaphore: *mut c_void, ) -> c_long; fn dispatch_semaphore_create( value: c_long, ) -> *mut c_void; fn dispatch_release( object: *mut c_void, ); } #[derive(Clone, Debug)] pub struct SystemSemaphore(*mut c_void); unsafe impl Send for SystemSemaphore {} unsafe impl Sync for SystemSemaphore {} impl Drop for SystemSemaphore { fn drop(&mut self) { unsafe { dispatch_release(self.0) } } } impl SystemSemaphore { pub(crate) fn new() -> Self { SystemSemaphore(unsafe { dispatch_semaphore_create(1) }) } pub(crate) fn signal(&self) { unsafe { dispatch_semaphore_signal(self.0); } } pub(crate) fn wait(&self, timeout: u64) { unsafe { dispatch_semaphore_wait(self.0, timeout); } } }
extern crate pocket_prover; extern crate pocket_prover_set; use pocket_prover::*; use pocket_prover_set::*; fn main() { println!("Result {}", Set::imply( |s| s.fin_many, |s| not(s.inf_many) )); }
#![deny(warnings)] extern crate warp; extern crate hyper; extern crate handlebars; #[macro_use] extern crate serde_json; use warp::Filter; use handlebars::Handlebars; use std::sync::Arc; fn main() { let template = "<!DOCTYPE html> <html> <head> <title>Warp Handlebars template example</title> </head> <body> <h1>Hello {{user}}!</h1> </body> </html>"; let mut hb = Handlebars::new(); // register the template hb.register_template_string("template.html", template).unwrap(); // Turn Handlebars instance into a Filter so we can combine it // easily with others... let hb = Arc::new(hb); let hb = warp::any().map(move || hb.clone()); //GET / let route = warp::get2() .and(warp::index()) .and(hb) .map(render_index); warp::serve(route).run(([127, 0, 0, 1], 3030)); } //GET / handler fn render_index(hb: Arc<Handlebars>) -> impl warp::Reply { hb.render("template.html", &json!({"user": "Warp"})).unwrap() }
//! Contains all types used by soft use error::*; use std::fmt; /// All soft commands #[derive(Clone, Debug, PartialEq)] pub enum Command { /// Login to server Login(String, String), /// Get a file Get(String), /// Put a file Put(String), /// List directory List(String), /// Get current working dir Cwd, /// Change directory Cd(String), /// Make directory Mkdir(String), /// Remove file Rm(String), /// Remove directory Rmdir(String, bool), /// Presence check Presence, /// Exit Exit, } impl Command { /// Try converting string to command pub fn try_from<S: AsRef<str>>(s: S) -> Result<Command> { let s = s.as_ref().to_string(); let splitted = s.split_whitespace().map(|s| s.to_owned()).collect::<Vec<String>>(); match splitted[0].as_str() { "LOGIN" => { if splitted.len() != 3 { bail!(ErrorKind::InvalidCommand(s)); } Ok(Command::Login(splitted[1].clone(), splitted[2].clone())) } "GET" => { if splitted.len() != 2 { bail!(ErrorKind::InvalidCommand(s)); } Ok(Command::Get(splitted[1].clone())) } "PUT" => { if splitted.len() != 2 { bail!(ErrorKind::InvalidCommand(s)); } Ok(Command::Put(splitted[1].clone())) } "LIST" => { if splitted.len() != 2 { bail!(ErrorKind::InvalidCommand(s)); } Ok(Command::List(splitted[1].clone())) } "CWD" => Ok(Command::Cwd), "CD" => { if splitted.len() != 2 { bail!(ErrorKind::InvalidCommand(s)); } Ok(Command::Cd(splitted[1].clone())) } "MKDIR" => { if splitted.len() != 2 { bail!(ErrorKind::InvalidCommand(s)); } Ok(Command::Mkdir(splitted[1].clone())) } "RM" => { if splitted.len() != 2 { bail!(ErrorKind::InvalidCommand(s)); } Ok(Command::Rm(splitted[1].clone())) } "RMDIR" => { if splitted.len() != 3 { bail!(ErrorKind::InvalidCommand(s)); } Ok(Command::Rmdir(splitted[1].clone(), splitted[2].clone().parse::<bool>().unwrap())) } "PRESENCE" => Ok(Command::Presence), "EXIT" => Ok(Command::Exit), _ => bail!(ErrorKind::InvalidCommand(s)), } } /// Get login username and password /// Only work for Login, else it will panic pub fn unwrap_login(self) -> (String, String) { match self { Command::Login(u, p) => (u, p), c => panic!("Command \'{}\' doesn't contain login information", c), } } /// Get the path from command, /// Work for Get, Put and List, else it will panic pub fn unwrap_path(self) -> String { match self { Command::Get(s) | Command::Put(s) | Command::List(s) | Command::Cd(s) | Command::Rm(s) | Command::Rmdir(s, _) | Command::Mkdir(s) => s, c => panic!("Command \'{}\' doesn't contain path", c), } } } impl fmt::Display for Command { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { Command::Login(ref u, ref p) => write!(f, "LOGIN {} {}", u, p), Command::Get(ref p) => write!(f, "GET {}", p), Command::Put(ref p) => write!(f, "PUT {}", p), Command::List(ref p) => write!(f, "LIST {}", p), Command::Cwd => write!(f, "CWD"), Command::Cd(ref p) => write!(f, "CD {}", p), Command::Mkdir(ref p) => write!(f, "MKDIR {}", p), Command::Rm(ref p) => write!(f, "RM {}", p), Command::Rmdir(ref p, ref r) => write!(f, "RMDIR {} {}", p, r), Command::Presence => write!(f, "PRESENCE"), Command::Exit => write!(f, "EXIT"), } } } /// Status of command pub enum Status { /// Connected to server Connected = 1, /// Disconnected from server Disconnected = 2, /// Wrong login credential WrongLogin = 3, /// Not connected to server NotConnected = 4, /// Okay Okay = 5, /// Not a file NotFile = 6, /// Not a directory NotDir = 7, /// Unknown path PathUnknown = 8, /// Unknown error UnkownError = 255, } impl Status { /// Check if status is positive pub fn is_positive(&self) -> bool { match *self { Status::Connected | Status::Disconnected | Status::Okay => true, _ => false, } } /// Check if status is negative pub fn is_negative(&self) -> bool { !self.is_positive() } } impl From<u8> for Status { fn from(from: u8) -> Status { match from { 1 => Status::Connected, 2 => Status::Disconnected, 3 => Status::WrongLogin, 4 => Status::NotConnected, 5 => Status::Okay, 6 => Status::NotFile, 7 => Status::NotDir, 8 => Status::PathUnknown, _ => Status::UnkownError, } } }
use crate::data_io::AlephDataFor; use core::result::Result; use log::{debug, error, warn}; use sc_client_api::Backend; use sp_api::{BlockId, NumberFor}; use sp_runtime::{ traits::{Block, Header}, Justification, }; use std::sync::Arc; pub(crate) fn finalize_block<BE, B, C>( client: Arc<C>, hash: B::Hash, block_number: NumberFor<B>, justification: Option<Justification>, ) -> Result<(), sp_blockchain::Error> where B: Block, BE: Backend<B>, C: crate::ClientForAleph<B, BE>, { let status = client.info(); if status.finalized_number >= block_number { warn!(target: "afa", "trying to finalize a block with hash {} and number {} that is not greater than already finalized {}", hash, block_number, status.finalized_number); } debug!(target: "afa", "Finalizing block with hash {:?} and number {:?}. Previous best: #{:?}.", hash, block_number, status.finalized_number); let update_res = client.lock_import_and_run(|import_op| { // NOTE: all other finalization logic should come here, inside the lock client.apply_finality(import_op, BlockId::Hash(hash), justification, true) }); let status = client.info(); debug!(target: "afa", "Attempted to finalize block with hash {:?}. Current best: #{:?}.", hash, status.finalized_number); update_res } /// Given hash `last_finalized` and `AlephDataFor` `new_data` of two blocks, returns /// Some(new_data) if the block hash represented by new_data is a descendant of last_finalized /// (and the new_data.number is correct). Otherwise it outputs None. pub(crate) fn should_finalize<BE, B, C>( last_finalized: B::Hash, new_data: AlephDataFor<B>, client: &C, last_block_in_session: NumberFor<B>, ) -> Option<AlephDataFor<B>> where B: Block, BE: Backend<B>, C: crate::ClientForAleph<B, BE>, { // this early return is for optimization reasons only. if new_data.hash == last_finalized { return None; } if new_data.number > last_block_in_session { return None; } let last_finalized_number = match client.number(last_finalized) { Ok(Some(number)) => number, _ => { error!(target: "afa", "No block number for {}", last_finalized); return None; } }; if let Ok(Some(header)) = client.header(BlockId::Hash(new_data.hash)) { if *header.number() != new_data.number { warn!(target: "afa", "Incorrect number for hash {}. Got {}, should be {}", new_data.hash, new_data.number, header.number()); return None; } } else { warn!(target: "afa", "No header for hash {}", new_data.hash); return None; } // Iterate ancestors of `new_hash` until reaching a block with number <= last_finalized_number // in order to check if new_data.hash is an ancestor of last_finalized let mut hash = new_data.hash; loop { let header = match client.header(BlockId::Hash(hash)) { Ok(Some(header)) => header, _ => { error!(target: "afa", "No header for hash {}", hash); return None; } }; if header.number() <= &last_finalized_number { if hash != last_finalized { // `new_hash` is not an ancestor of `last_finalized` return None; } break; } hash = *header.parent_hash(); } Some(new_data) } #[cfg(test)] mod tests { use super::*; use crate::data_io::AlephData; use sc_block_builder::BlockBuilderProvider; use sp_consensus::BlockOrigin; use substrate_test_runtime::Extrinsic; use substrate_test_runtime_client::{ ClientBlockImportExt, ClientExt, DefaultTestClientBuilderExt, TestClient, TestClientBuilder, TestClientBuilderExt, }; fn create_chain(client: &mut Arc<TestClient>, n: u64) -> Vec<sp_core::H256> { let mut blocks = vec![client.genesis_hash()]; for _ in 1..=n { let block = client .new_block(Default::default()) .unwrap() .build() .unwrap() .block; blocks.push(block.header.hash()); futures::executor::block_on(client.import(BlockOrigin::Own, block)).unwrap(); } blocks } #[test] fn should_finalize_for_descendant() { let mut client = Arc::new(TestClientBuilder::new().build()); let n = 5; let blocks = create_chain(&mut client, n as u64); for i in 0..n { for j in i..n { let maybe_data = should_finalize( blocks[i], AlephData::new(blocks[j], j as u64), client.as_ref(), 100u64, ); let correct_result = if i == j { None } else { Some(AlephData::new(blocks[j], j as u64)) }; assert!(maybe_data == correct_result); } } } #[test] fn should_finalize_for_non_descendant() { let mut client = Arc::new(TestClientBuilder::new().build()); let n = 5; let blocks = create_chain(&mut client, n as u64); for i in 0..=n { for j in 0..i { let maybe_data = should_finalize( blocks[i], AlephData::new(blocks[j], j as u64), client.as_ref(), 100u64, ); assert!(maybe_data.is_none()); } } let extra_children: Vec<_> = blocks .iter() .map(|hash| { let mut builder = client .new_block_at(&BlockId::Hash(*hash), Default::default(), false) .unwrap(); // Add a dummy extrinsic to make the block distinct from the one on chain builder .push(Extrinsic::AuthoritiesChange(Vec::new())) .unwrap(); let block = builder.build().unwrap().block; let hash = block.header.hash(); futures::executor::block_on(client.import(BlockOrigin::Own, block)).unwrap(); hash }) .collect(); for i in 0..=n { for j in 0..=n { if i != j { let maybe_data = should_finalize( extra_children[i], AlephData::new(extra_children[j], j as u64), client.as_ref(), 100u64, ); assert!(maybe_data.is_none()); } } } } #[test] fn should_finalize_for_incorrect_aleph_data() { let mut client = Arc::new(TestClientBuilder::new().build()); let n = 5; let blocks = create_chain(&mut client, n as u64); for i in 0..n { for j in i..n { let maybe_data = should_finalize( blocks[i], AlephData::new(blocks[j], (j + 1) as u64), client.as_ref(), 100u64, ); assert!(maybe_data.is_none()); } } } }
#![allow(unused)] mod wago; use wago::WagoModule::*; use wago::*; fn main() { let mut w315 = Wago750315 { res_io: AddrIO(0, 0, 0, 0), io: AddrIO(0, 0, 0, 0), }; let mut w430 = Wago750430 { res_io: AddrIO(0, 8, 0, 0), io: AddrIO(0, 0, 0, 0), }; let mut w530 = Wago750530 { res_io: AddrIO(0, 0, 0, 8), io: AddrIO(0, 0, 0, 0), }; let mut w468 = Wago750468 { res_io: AddrIO(2, 0, 0, 0), io: AddrIO(0, 0, 0, 0), }; let mut w515 = Wago750515 { res_io: AddrIO(0, 0, 0, 4), io: AddrIO(0, 0, 0, 0), }; let mut w559 = WagoModule::Wago750559 { res_io: AddrIO(0, 0, 2, 0), io: AddrIO(0, 0, 0, 0), }; let mut wago = Wago { init: false, mods: vec![w315, w430, w530, w468, w515], modbus_address: 1, port: None, }; Wago::init(&mut wago); wago.print(); w315 = wago.mods[0]; w430 = wago.mods[1]; w530 = wago.mods[2]; w468 = wago.mods[3]; w515 = wago.mods[4]; if wago.init_serial(27) { // interface ideas // wago530.at(wago).set(0xFF); // wago.set(wago530).set(0xFF); // test 530 for v in 0..256 { wago.set(w530, v as u8); std::thread::sleep(std::time::Duration::from_millis(50)); } wago.set(w530, 0x00); // test 515 for v in 0..16 { wago.set(w515, v as u8); std::thread::sleep(std::time::Duration::from_millis(250)); } wago.set(w515, 0x00); // test 430 loop { let v = wago.get(w430); println!("w430=0b{0:08b}", v); std::thread::sleep(std::time::Duration::from_millis(100)); if v != 0 { break; } } // test 468 loop { for channel in 0..4 { // let volt: f64 = wago.get_volt(w468, channel); // println!("w468[{}]={:.2}", channel, volt); let volt: f64 = wago.get_volt(w468, 0); println!("w468[{}]={:.2}", 0, volt); } std::thread::sleep(std::time::Duration::from_millis(100)); } // } else { println!("init_serial: failed"); } }
use std::cell::RefCell; use std::fmt; use std::rc::Rc; use crate::environment::Environment; use crate::object::Object; use crate::parser::Node; #[derive(Debug, Clone)] pub enum Value { Nothing, Number(i32), String(String), Boolean(bool), NativeFunction(fn(Option<Value>, Vec<Value>) -> Value), Function(Vec<String>, Box<Node>, Rc<RefCell<Environment>>), Array(Rc<RefCell<Vec<Value>>>), Object(Rc<RefCell<Object>>), } impl fmt::Display for Value { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { Value::Nothing => write!(f, "<nothing>"), Value::Number(n) => write!(f, "<number: {}>", n), Value::String(ref string) => write!(f, "<string: {}>", string), Value::Boolean(b) => write!(f, "<boolean: {}>", b), Value::NativeFunction(_) => write!(f, "<native function>"), Value::Function(_, _, _) => write!(f, "<function>"), Value::Array(ref array) => write!(f, "<array: {}>", array.borrow().len()), Value::Object(_) => write!(f, "<object>"), } } } impl Drop for Value { fn drop(&mut self) { // println!("dropping {}", self); } }
mod edge; pub mod graph; mod node;
use crate::error::*; use crate::*; use std::io::Write; use tempfile::Builder; pub struct Exe; pub type EditorExeInstaller = Installer<UnityEditor, Exe, InstallerWithDestinationAndOptionalCommand>; pub type ModuleExeTargetInstaller = Installer<UnityModule, Exe, InstallerWithDestinationAndOptionalCommand>; pub type ModuleExeInstaller = Installer<UnityModule, Exe, InstallerWithOptionalCommand>; impl<V> InstallHandler for Installer<V, Exe, InstallerWithDestinationAndOptionalCommand> { fn install_handler(&self) -> Result<()> { let installer = self.installer(); let destination = self.destination(); debug!("install unity from installer exe"); let mut install_helper = Builder::new().suffix(".cmd").rand_bytes(20).tempfile()?; info!( "create install helper script {}", install_helper.path().display() ); { let script = install_helper.as_file_mut(); let parameter_option = match self.cmd() { Some(parameters) => parameters, _ => "/S", }; let destination_option = format!("/D={}", destination.display()); let install_command = format!( r#"CALL "{installer}" {parameters} {destination}"#, installer = installer.display(), parameters = parameter_option, destination = destination_option, ); trace!("install helper script content:"); writeln!(script, "ECHO OFF")?; trace!("{}", &install_command); writeln!(script, "{}", install_command)?; } info!("install {}", installer.display()); info!("to {}", destination.display()); let installer_script = install_helper.into_temp_path(); self.install_from_temp_command(&installer_script)?; installer_script.close()?; Ok(()) } fn after_install(&self) -> Result<()> { if let Some((from, to)) = &self.rename() { uvm_move_dir::move_dir(from, to).chain_err(|| "failed to rename installed module")?; } Ok(()) } } impl InstallHandler for ModuleExeInstaller { fn install_handler(&self) -> Result<()> { let installer = self.installer(); debug!("install unity from installer exe"); let mut install_helper = Builder::new().suffix(".cmd").rand_bytes(20).tempfile()?; info!( "create install helper script {}", install_helper.path().display() ); { let script = install_helper.as_file_mut(); let parameter_option = match self.cmd() { Some(parameters) => parameters, _ => "/S", }; let install_command = format!( r#"CALL "{installer}" {parameters}"#, installer = installer.display(), parameters = parameter_option, ); trace!("install helper script content:"); writeln!(script, "ECHO OFF")?; trace!("{}", &install_command); writeln!(script, "{}", install_command)?; } info!("install {}", installer.display()); let installer_script = install_helper.into_temp_path(); self.install_from_temp_command(&installer_script)?; installer_script.close()?; Ok(()) } fn after_install(&self) -> Result<()> { if let Some((from, to)) = &self.rename() { uvm_move_dir::move_dir(from, to).chain_err(|| "failed to rename installed module")?; } Ok(()) } }
use std::process; use std::collections::HashMap; use std::net::{TcpStream, TcpListener, SocketAddr, IpAddr}; use std::io::Write; use std::path::Path; use std::{env, fs, thread}; use std::str::FromStr; use std::collections::VecDeque; use std::sync::{Mutex, Arc, Barrier}; use local_ipaddress; use pnet::datalink; use heimdallr::DaemonConfig; use heimdallr::networking::*; struct Daemon { name: String, partition: String, client_listener_addr: SocketAddr, client_listener: TcpListener, } impl Daemon { fn new(name: &str, partition: &str, interface: &str) -> std::io::Result<Daemon> { // Get IP of this node let mut ip = match local_ipaddress::get() { Some(i) => IpAddr::from_str(&i).unwrap(), None => IpAddr::from_str("0.0.0.0").unwrap(), }; // Use the manually specified network interface if !interface.is_empty() { let interfaces = datalink::interfaces(); for i in interfaces { if i.name == interface { println!("Using specified network interface {} with ip {}", i.name, i.ips[0]); ip = i.ips[0].ip(); } } } let client_listener_addr = SocketAddr::new(ip, 4664); let client_listener = heimdallr::networking::bind_listener(&client_listener_addr)?; let daemon = Daemon{name: name.to_string(), partition: partition.to_string(), client_listener_addr, client_listener}; daemon.create_partition_file().unwrap(); Ok(daemon) } fn create_partition_file(&self) -> std::io::Result<()> { let config_home = match env::var("XDG_CONFIG_HOME") { Ok(path) => path, Err(_) => { eprintln!("XDG_CONFIG_HOME is not set. Falling back to default path: ~/.config"); let home = env::var("HOME").expect("HOME environment variable is not set"); format!("{}/.config", home) }, }; let path = format!("{}/heimdallr/{}", config_home, &self.partition); if Path::new(&path).exists() == false { fs::create_dir_all(&path)?; } let daemon_config = DaemonConfig::new(&self.name, &self.partition, self.client_listener_addr.clone(), self.client_listener_addr.clone()); let file_path = format!("{}/{}", path, self.name); let serialized = serde_json::to_string(&daemon_config) .expect("Could not serialize DaemonConfig"); fs::write(&file_path, serialized)?; println!("Writing heimdallr daemon config to: {}", file_path); Ok(()) } } struct Job { size: u32, barrier: Mutex<DaemonBarrier>, finalize: Mutex<JobFinalization>, mutexes: Mutex<HashMap<String, HeimdallrDaemonMutex>> } impl Job { fn new(size: u32) -> std::io::Result<Job> { // let clients = Vec::<TcpStream>::new(); // let client_listeners = Vec::<SocketAddr>::new(); let mutexes = Mutex::new(HashMap::<String, HeimdallrDaemonMutex>::new()); let barrier = Mutex::new(DaemonBarrier::new(size)); let finalize = Mutex::new(JobFinalization::new(size)); // Ok(Job {name: name.to_string(), size, clients, client_listeners, // mutexes, barrier, finalize}) Ok(Job{size, barrier, finalize, mutexes}) } } struct HeimdallrDaemonMutex { name: String, streams: Vec<Option<TcpStream>>, constructed: bool, data: Vec<u8>, access_queue: VecDeque<u32>, locked: bool, current_owner: Option<u32>, } impl HeimdallrDaemonMutex { fn new(name: &str, size: u32, start_data: Vec<u8>) -> Self { let mut streams = Vec::<Option<TcpStream>>::new(); streams.resize_with(size as usize, || None); let access_queue = VecDeque::<u32>::new(); Self {name: name.to_string(), streams, constructed: false, data: start_data, access_queue, locked: false, current_owner: None} } fn register_client(&mut self, id: u32, stream: TcpStream) { self.streams[id as usize] = Some(stream); self.constructed = !self.streams.iter().any(|x| x.is_none()); } fn access_request(&mut self, client_id: u32) { self.access_queue.push_back(client_id); self.grant_next_lock(); } fn release_request(&mut self) { if self.locked { self.locked = false; self.current_owner = None; self.grant_next_lock(); } else { eprintln!("Error: Release Request on Mutex that was not locked"); } } fn grant_next_lock(&mut self) { if (!self.locked) & (!self.access_queue.is_empty()) { self.current_owner = self.access_queue.pop_front(); self.locked = true; self.send_data(); } } fn send_data(&mut self) { match self.current_owner { Some(id) => { let stream = self.streams.get_mut(id as usize).unwrap(); match stream { Some(s) => { s.write(self.data.as_slice()).unwrap(); s.flush().unwrap(); }, None => eprintln!("Error: No valid TcpStream found for client"), } }, None => eprintln!("Error: Mutex has no current owner to send data"), } } } struct DaemonBarrier { size: u32, streams: Vec<Option<TcpStream>>, finished: bool, } impl DaemonBarrier { fn new(size: u32) -> Self { let mut streams = Vec::<Option<TcpStream>>::new(); streams.resize_with(size as usize, || None); Self {size, streams, finished: false} } fn register_client(&mut self, id: u32, stream: TcpStream) { self.streams[id as usize] = Some(stream); self.finished = !self.streams.iter().any(|x| x.is_none()); } fn reset(&mut self) { self.streams = Vec::<Option<TcpStream>>::new(); self.streams.resize_with(self.size as usize, || None); self.finished = false; } } struct JobFinalization { streams: Vec<Option<TcpStream>>, finished: bool, } impl JobFinalization { fn new(size: u32) -> Self { let mut streams = Vec::<Option<TcpStream>>::new(); streams.resize_with(size as usize, || None); Self {streams, finished: false} } fn register_client(&mut self, id: u32, stream: TcpStream) { self.streams[id as usize] = Some(stream); self.finished = !self.streams.iter().any(|x| x.is_none()); } } fn handle_client(mut stream: TcpStream, job: Arc<Job>, thread_barrier: Arc<Barrier>) { // println!("thread spawned for job: {}", job.name); loop { let pkt = DaemonPkt::receive(&stream); // println!("Received DaemonPkt: {:?}", pkt); match pkt.pkt { DaemonPktType::MutexCreation(mutex_pkt) => { let mut mutexes = job.mutexes.lock().unwrap(); let mutex = mutexes.entry(mutex_pkt.name.clone()) .or_insert(HeimdallrDaemonMutex::new(&mutex_pkt.name, job.size, mutex_pkt.start_data)); mutex.register_client(mutex_pkt.client_id, stream.try_clone().unwrap()); drop(mutexes); thread_barrier.wait(); let mut mutexes = job.mutexes.lock().unwrap(); let mutex = mutexes.get_mut(&mutex_pkt.name).unwrap(); if mutex.constructed { let reply = MutexCreationReplyPkt::new(&mutex.name); reply.send(&mut stream).expect("Could not send MutexCreationReplyPkt"); } else { eprintln!("Expected Mutex to be constructed at this point"); } }, DaemonPktType::MutexLockReq(mutex_pkt) => { let mut mutexes = job.mutexes.lock().unwrap(); let mutex = mutexes.get_mut(&mutex_pkt.name) .expect("Mutex for MutexLockReq does not exist"); mutex.access_request(mutex_pkt.id); }, DaemonPktType::MutexWriteAndRelease(mutex_pkt) => { // TODO check for correct client id? let mut mutexes = job.mutexes.lock().unwrap(); let mutex = mutexes.get_mut(&mutex_pkt.mutex_name) .expect("Mutex for MutexLockReq does not exist"); mutex.data = mutex_pkt.data; mutex.release_request(); }, DaemonPktType::Barrier(barrier_pkt) => { let mut barrier = job.barrier.lock().unwrap(); barrier.register_client(barrier_pkt.id, stream.try_clone().unwrap()); drop(barrier); thread_barrier.wait(); let barrier = job.barrier.lock().unwrap(); if barrier.finished { let reply = BarrierReplyPkt::new(job.size); reply.send(&mut stream).expect("Could not send BarrierReplyPkt"); } else { eprintln!("Expected all client to have participated in barrier already") } drop(barrier); let b_res = thread_barrier.wait(); if b_res.is_leader() { let mut barrier = job.barrier.lock().unwrap(); barrier.reset(); } thread_barrier.wait(); }, //TODO Maybe use RwLock instead of mutex DaemonPktType::Finalize(finalize_pkt) => { // TODO Cleanup let mut fini = job.finalize.lock().unwrap(); fini.register_client(finalize_pkt.id, stream.try_clone().unwrap()); drop(fini); thread_barrier.wait(); let fini = job.finalize.lock().unwrap(); if fini.finished { let reply = FinalizeReplyPkt::new(job.size); reply.send(&mut stream).expect("Could not send FinalizeReplyPkt"); } else { eprintln!("Expected to have already received all FinalizePkts") } drop(fini); thread_barrier.wait(); return () }, _ => (), } } } fn run(daemon: Daemon) -> std::io::Result<()> { let mut job_name = "".to_string(); let mut job_size = 0; let mut clients = Vec::<TcpStream>::new(); let mut client_listeners = Vec::<SocketAddr>::new(); for stream in daemon.client_listener.incoming() { match stream { Ok(stream) => { let pkt = DaemonPkt::receive(&stream); match pkt.pkt { DaemonPktType::ClientRegistration(client_reg) => { // println!("Received ClientRegistrationPkt: {:?}", client_reg); if job_name.is_empty() { job_name = client_reg.job.clone(); job_size = client_reg.size; } clients.push(stream); client_listeners.push(client_reg.listener_addr); } _ => eprintln!("Unknown Packet type"), } }, Err(e) => { eprintln!("Error in daemon listening to incoming connections: {}", e); }, } if clients.len() as u32 == job_size { break; } } println!("All clients for job have connected"); let mut job_threads = Vec::<thread::JoinHandle<()>>::new(); let job_arc = Arc::new(Job::new(job_size).unwrap()); let thread_barrier = Arc::new(Barrier::new(job_size as usize)); for id in 0..clients.len() { let mut stream = clients.remove(0); let reply = ClientRegistrationReplyPkt::new(id as u32, &client_listeners); reply.send(&mut stream)?; let job = Arc::clone(&job_arc); let barrier = Arc::clone(&thread_barrier); let t = thread::spawn(move|| { handle_client(stream, job, barrier); }); job_threads.push(t); } for t in job_threads { t.join().unwrap(); println!("All job threads joined"); process::exit(0); } Ok(()) } fn parse_args(mut args: std::env::Args) -> Result<(String, String, String), &'static str> { args.next(); let mut partition = String::new(); let mut name = String::new(); let mut interface = String::new(); while let Some(arg) = args.next() { match arg.as_str() { "-p" | "--partition" => { partition = match args.next() { Some(p) => p.to_string(), None => return Err("No valid partition name given."), }; }, "-n" | "--name" => { name = match args.next() { Some(n) => n.to_string(), None => return Err("No valid daemon name given."), }; }, "--interface" => { interface = match args.next() { Some(i) => i.to_string(), None => return Err("No valid network interface name given."), } }, _ => return Err("Unknown argument error."), }; } Ok((name, partition, interface)) } fn main() { let (name, partition, interface) = parse_args(env::args()).unwrap_or_else(|err| { eprintln!("Error: Problem parsing arguments: {}", err); process::exit(1); }); let daemon = Daemon::new(&name, &partition, &interface).unwrap_or_else(|err| { eprintln!("Error: Could not start daemon correctly: {} \n Shutting down.", err); process::exit(1); }); println!("Daemon running under name: {} and address: {}", daemon.name, daemon.client_listener_addr); run(daemon).unwrap_or_else(|err| { eprintln!("Error in running daemon: {}", err); }); println!("Daemon shutting down."); }
use std::cell::RefCell; use std::fs::File; use std::io::{self, BufReader, Read, Write}; use serde_derive::{Deserialize, Serialize}; use crate::{ input::{Lexer, Parser}, player::Player, types::{CmdResult, ItemMap}, util::read_line, world::World, }; /// A command line interface for controlling interactions between objects in a game #[derive(Debug, Serialize, Deserialize)] pub struct Cli { lexer: Lexer, player: RefCell<Player>, world: RefCell<World>, } impl Cli { /// Create a Cli from a JSON file pub fn from_json_file(path: &str) -> Self { Self { lexer: Lexer::new(), player: RefCell::new(Player::new()), world: Cli::get_world_json(path), } } /// Create a Cli from a string containing JSON pub fn from_json_str(json: &str) -> Self { Self { lexer: Lexer::new(), player: RefCell::new(Player::new()), world: Cli::get_world_json_str(json), } } fn get_world_json(path: &str) -> RefCell<World> { let world_file = File::open(path).expect("Unable to open world file"); let mut world_file_reader = BufReader::new(world_file); let mut data = String::new(); world_file_reader .read_to_string(&mut data) .expect("Unable to read string from world file"); serde_json::from_str(&data).expect("Error creating world from JSON file.") } fn get_world_json_str(json: &str) -> RefCell<World> { serde_json::from_str(json).expect("Error creating world from string.") } /// Prompts the user for input with stdin pub fn prompt() -> String { loop { print!("\n> "); io::stdout().flush().expect("Error flushing stdout"); let input = read_line(); if !input.is_empty() { return input; } else { println!("Excuse me?"); } } } /// Returns a helpful list of game commands pub fn help() -> CmdResult { CmdResult::new( false, "Some available commands: go, enter <direction>\tmove through a listed entrance \tshort directions: n, s, e, w, ne, nw, se, sw, u, d \tlong directions: \t north, south, east, west, \t northeast, northwest, southeast, southwest, \t up, down, (other listed entrance)\n take\t\tput an item from the room into your inventory drop\t\tdrop an item from your inventory into the room l, look\t\tlook around the room i, inventory\tprint the contents of your inventory x, examine\t\tshow additional information about an item draw, equip\t\tuse an item from your inventory as your default weapon don, put on\tdon a set of armor to increase your armor class kill\t\tattack an enemy with your main hand or a chosen weapon open | close\topen/close a container or pathway heal\t\treplenish some HP increase\tincrease a chosen ability score by 1 if stat points are available status\t\tdisplay information on the state of your character" .to_owned(), ) } /// Start a basic Kingslayer game for the command line pub fn start(&self) { println!("type \"help\" to learn some common commands.\n"); println!("Use \"increase\" to use your initial stat points.\n"); println!("{}", self.ask("l")); while self.player.borrow().is_alive() { println!("{}", self.ask(&Cli::prompt())); } } /// Handle user input and return the result of commands and events pub fn ask(&self, input: &str) -> String { let command = self.lexer.lex(input); if !command.verb().is_empty() { let res = Parser::parse( command, &mut self.world.borrow_mut(), &mut self.player.borrow_mut(), ); if res.is_action() { format!( "{}{}", res.output(), self.combat(&mut self.world.borrow_mut()) ) } else { res.output().to_owned() } } else { "I do not understand that phrase.".to_owned() } } // manages actions taken by Enemies in the current room fn combat(&self, world: &mut World) -> String { let mut events_str = String::new(); let mut loot: ItemMap = ItemMap::new(); for enemy in world.get_curr_room_mut().enemies_mut().values_mut() { if enemy.is_angry() && enemy.is_alive() { let enemy_damage = enemy.damage(); events_str.push_str( &self .player .borrow_mut() .take_damage(enemy.name(), enemy_damage), ); self.player.borrow_mut().engage_combat(); } if !enemy.is_alive() { events_str.push_str(&format!("\nYou gained {} XP.\n", enemy.xp())); self.player.borrow_mut().disengage_combat(); self.player.borrow_mut().gain_xp(enemy.xp()); loot.extend(enemy.drop_loot()); } } world.get_curr_room_mut().items_mut().extend(loot); world .get_curr_room_mut() .enemies_mut() .retain(|_, e| e.is_alive()); if !self.player.borrow().is_alive() { events_str.push_str("\nYou died."); } else { events_str.push_str(&self.player.borrow_mut().level_up()); } events_str } }
// Copyright 2018-2020 Parity Technologies (UK) Ltd. // This file is part of Substrate. // Substrate is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // Substrate is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // You should have received a copy of the GNU General Public License // along with Substrate. If not, see <http://www.gnu.org/licenses/>. use crate::{ gas::{Gas, GasMeter, Token}, rent, storage, BalanceOf, CodeHash, Config, ContractAddressFor, ContractInfo, ContractInfoOf, Error, Event, RawEvent, Trait, TrieId, TrieIdGenerator, }; use bitflags::bitflags; use frame_support::{ dispatch::DispatchError, ensure, traits::{Currency, ExistenceRequirement, Randomness, Time}, weights::Weight, StorageMap, }; use sp_runtime::traits::{Bounded, Convert, Saturating, Zero}; use sp_std::prelude::*; pub type AccountIdOf<T> = <T as frame_system::Trait>::AccountId; pub type MomentOf<T> = <<T as Trait>::Time as Time>::Moment; pub type SeedOf<T> = <T as frame_system::Trait>::Hash; pub type BlockNumberOf<T> = <T as frame_system::Trait>::BlockNumber; pub type StorageKey = [u8; 32]; /// A type that represents a topic of an event. At the moment a hash is used. pub type TopicOf<T> = <T as frame_system::Trait>::Hash; bitflags! { /// Flags used by a contract to customize exit behaviour. pub struct ReturnFlags: u32 { /// If this bit is set all changes made by the contract exection are rolled back. const REVERT = 0x0000_0001; } } /// Describes whether we deal with a contract or a plain account. pub enum TransactorKind { /// Transaction was initiated from a plain account. That can be either be through a /// signed transaction or through RPC. PlainAccount, /// The call was initiated by a contract account. Contract, } /// Output of a contract call or instantiation which ran to completion. #[cfg_attr(test, derive(PartialEq, Eq, Debug))] pub struct ExecReturnValue { /// Flags passed along by `seal_return`. Empty when `seal_return` was never called. pub flags: ReturnFlags, /// Buffer passed along by `seal_return`. Empty when `seal_return` was never called. pub data: Vec<u8>, } impl ExecReturnValue { /// We understand the absense of a revert flag as success. pub fn is_success(&self) -> bool { !self.flags.contains(ReturnFlags::REVERT) } } /// Call or instantiate both call into other contracts and pass through errors happening /// in those to the caller. This enum is for the caller to distinguish whether the error /// happened during the execution of the callee or in the current execution context. #[cfg_attr(test, derive(PartialEq, Eq, Debug))] pub enum ErrorOrigin { /// The error happened in the current exeuction context rather than in the one /// of the contract that is called into. Caller, /// The error happened during execution of the called contract. Callee, } /// Error returned by contract exection. #[cfg_attr(test, derive(PartialEq, Eq, Debug))] pub struct ExecError { /// The reason why the execution failed. pub error: DispatchError, /// Origin of the error. pub origin: ErrorOrigin, } impl<T: Into<DispatchError>> From<T> for ExecError { fn from(error: T) -> Self { Self { error: error.into(), origin: ErrorOrigin::Caller } } } /// The result that is returned from contract execution. It either contains the output /// buffer or an error describing the reason for failure. pub type ExecResult = Result<ExecReturnValue, ExecError>; /// An interface that provides access to the external environment in which the /// smart-contract is executed. /// /// This interface is specialized to an account of the executing code, so all /// operations are implicitly performed on that account. pub trait Ext { type T: Trait; /// Returns the storage entry of the executing account by the given `key`. /// /// Returns `None` if the `key` wasn't previously set by `set_storage` or /// was deleted. fn get_storage(&self, key: &StorageKey) -> Option<Vec<u8>>; /// Sets the storage entry by the given key to the specified value. If `value` is `None` then /// the storage entry is deleted. fn set_storage(&mut self, key: StorageKey, value: Option<Vec<u8>>); /// Instantiate a contract from the given code. /// /// The newly created account will be associated with `code`. `value` specifies the amount of /// value transferred from this to the newly created account (also known as endowment). fn instantiate( &mut self, code: &CodeHash<Self::T>, value: BalanceOf<Self::T>, gas_meter: &mut GasMeter<Self::T>, input_data: Vec<u8>, ) -> Result<(AccountIdOf<Self::T>, ExecReturnValue), ExecError>; /// Transfer some amount of funds into the specified account. fn transfer( &mut self, to: &AccountIdOf<Self::T>, value: BalanceOf<Self::T>, gas_meter: &mut GasMeter<Self::T>, ) -> Result<(), DispatchError>; /// Transfer all funds to `beneficiary` and delete the contract. /// /// Since this function removes the self contract eagerly, if succeeded, no further actions /// should be performed on this `Ext` instance. /// /// This function will fail if the same contract is present on the contract /// call stack. fn terminate( &mut self, beneficiary: &AccountIdOf<Self::T>, gas_meter: &mut GasMeter<Self::T>, ) -> Result<(), DispatchError>; /// Call (possibly transferring some amount of funds) into the specified account. fn call( &mut self, to: &AccountIdOf<Self::T>, value: BalanceOf<Self::T>, gas_meter: &mut GasMeter<Self::T>, input_data: Vec<u8>, ) -> ExecResult; /// Restores the given destination contract sacrificing the current one. /// /// Since this function removes the self contract eagerly, if succeeded, no further actions /// should be performed on this `Ext` instance. /// /// This function will fail if the same contract is present /// on the contract call stack. fn restore_to( &mut self, dest: AccountIdOf<Self::T>, code_hash: CodeHash<Self::T>, rent_allowance: BalanceOf<Self::T>, delta: Vec<StorageKey>, ) -> Result<(), &'static str>; /// Returns a reference to the account id of the caller. fn caller(&self) -> &AccountIdOf<Self::T>; /// Returns a reference to the account id of the current contract. fn address(&self) -> &AccountIdOf<Self::T>; /// Returns the balance of the current contract. /// /// The `value_transferred` is already added. fn balance(&self) -> BalanceOf<Self::T>; /// Returns the value transferred along with this call or as endowment. fn value_transferred(&self) -> BalanceOf<Self::T>; /// Returns a reference to the timestamp of the current block fn now(&self) -> &MomentOf<Self::T>; /// Returns the minimum balance that is required for creating an account. fn minimum_balance(&self) -> BalanceOf<Self::T>; /// Returns the deposit required to create a tombstone upon contract eviction. fn tombstone_deposit(&self) -> BalanceOf<Self::T>; /// Returns a random number for the current block with the given subject. fn random(&self, subject: &[u8]) -> SeedOf<Self::T>; /// Deposit an event with the given topics. /// /// There should not be any duplicates in `topics`. fn deposit_event(&mut self, topics: Vec<TopicOf<Self::T>>, data: Vec<u8>); /// Set rent allowance of the contract fn set_rent_allowance(&mut self, rent_allowance: BalanceOf<Self::T>); /// Rent allowance of the contract fn rent_allowance(&self) -> BalanceOf<Self::T>; /// Returns the current block number. fn block_number(&self) -> BlockNumberOf<Self::T>; /// Returns the maximum allowed size of a storage item. fn max_value_size(&self) -> u32; /// Returns the price for the specified amount of weight. fn get_weight_price(&self, weight: Weight) -> BalanceOf<Self::T>; } /// Loader is a companion of the `Vm` trait. It loads an appropriate abstract /// executable to be executed by an accompanying `Vm` implementation. pub trait Loader<T: Trait> { type Executable; /// Load the initializer portion of the code specified by the `code_hash`. This /// executable is called upon instantiation. fn load_init(&self, code_hash: &CodeHash<T>) -> Result<Self::Executable, &'static str>; /// Load the main portion of the code specified by the `code_hash`. This executable /// is called for each call to a contract. fn load_main(&self, code_hash: &CodeHash<T>) -> Result<Self::Executable, &'static str>; } /// A trait that represent a virtual machine. /// /// You can view a virtual machine as something that takes code, an input data buffer, /// queries it and/or performs actions on the given `Ext` and optionally /// returns an output data buffer. The type of code depends on the particular virtual machine. /// /// Execution of code can end by either implicit termination (that is, reached the end of /// executable), explicit termination via returning a buffer or termination due to a trap. pub trait Vm<T: Trait> { type Executable; fn execute<E: Ext<T = T>>( &self, exec: &Self::Executable, ext: E, input_data: Vec<u8>, gas_meter: &mut GasMeter<T>, ) -> ExecResult; } #[cfg_attr(test, derive(Debug, PartialEq, Eq))] #[derive(Copy, Clone)] pub enum ExecFeeToken { /// Base fee charged for a call. Call, /// Base fee charged for a instantiate. Instantiate, } impl<T: Trait> Token<T> for ExecFeeToken { type Metadata = Config<T>; #[inline] fn calculate_amount(&self, metadata: &Config<T>) -> Gas { match *self { ExecFeeToken::Call => metadata.schedule.call_base_cost, ExecFeeToken::Instantiate => metadata.schedule.instantiate_base_cost, } } } pub struct ExecutionContext<'a, T: Trait + 'a, V, L> { pub caller: Option<&'a ExecutionContext<'a, T, V, L>>, pub self_account: T::AccountId, pub self_trie_id: Option<TrieId>, pub depth: usize, pub config: &'a Config<T>, pub vm: &'a V, pub loader: &'a L, pub timestamp: MomentOf<T>, pub block_number: T::BlockNumber, } impl<'a, T, E, V, L> ExecutionContext<'a, T, V, L> where T: Trait, L: Loader<T, Executable = E>, V: Vm<T, Executable = E>, { /// Create the top level execution context. /// /// The specified `origin` address will be used as `sender` for. The `origin` must be a regular /// account (not a contract). pub fn top_level(origin: T::AccountId, cfg: &'a Config<T>, vm: &'a V, loader: &'a L) -> Self { ExecutionContext { caller: None, self_trie_id: None, self_account: origin, depth: 0, config: &cfg, vm: &vm, loader: &loader, timestamp: T::Time::now(), block_number: <frame_system::Module<T>>::block_number(), } } fn nested<'b, 'c: 'b>( &'c self, dest: T::AccountId, trie_id: TrieId, ) -> ExecutionContext<'b, T, V, L> { ExecutionContext { caller: Some(self), self_trie_id: Some(trie_id), self_account: dest, depth: self.depth + 1, config: self.config, vm: self.vm, loader: self.loader, timestamp: self.timestamp.clone(), block_number: self.block_number.clone(), } } /// Make a call to the specified address, optionally transferring some funds. pub fn call( &mut self, dest: T::AccountId, value: BalanceOf<T>, gas_meter: &mut GasMeter<T>, input_data: Vec<u8>, ) -> ExecResult { if self.depth == self.config.max_depth as usize { Err(Error::<T>::MaxCallDepthReached)? } if gas_meter.charge(self.config, ExecFeeToken::Call).is_out_of_gas() { Err(Error::<T>::OutOfGas)? } // Assumption: `collect_rent` doesn't collide with overlay because // `collect_rent` will be done on first call and destination contract and balance // cannot be changed before the first call // We do not allow 'calling' plain accounts. For transfering value // `seal_transfer` must be used. let contract = if let Some(ContractInfo::Alive(info)) = rent::collect_rent::<T>(&dest) { info } else { Err(Error::<T>::NotCallable)? }; let transactor_kind = self.transactor_kind(); let caller = self.self_account.clone(); self.with_nested_context(dest.clone(), contract.trie_id.clone(), |nested| { if value > BalanceOf::<T>::zero() { transfer( gas_meter, TransferCause::Call, transactor_kind, &caller, &dest, value, nested, )? } let executable = nested .loader .load_main(&contract.code_hash) .map_err(|_| Error::<T>::CodeNotFound)?; let output = nested .vm .execute(&executable, nested.new_call_context(caller, value), input_data, gas_meter) .map_err(|e| ExecError { error: e.error, origin: ErrorOrigin::Callee })?; Ok(output) }) } pub fn instantiate( &mut self, endowment: BalanceOf<T>, gas_meter: &mut GasMeter<T>, code_hash: &CodeHash<T>, input_data: Vec<u8>, ) -> Result<(T::AccountId, ExecReturnValue), ExecError> { if self.depth == self.config.max_depth as usize { Err(Error::<T>::MaxCallDepthReached)? } if gas_meter.charge(self.config, ExecFeeToken::Instantiate).is_out_of_gas() { Err(Error::<T>::OutOfGas)? } let transactor_kind = self.transactor_kind(); let caller = self.self_account.clone(); let dest = T::DetermineContractAddress::contract_address_for(code_hash, &input_data, &caller); // TrieId has not been generated yet and storage is empty since contract is new. // // Generate it now. let dest_trie_id = <T as Trait>::TrieIdGenerator::trie_id(&dest); let output = self.with_nested_context(dest.clone(), dest_trie_id, |nested| { storage::place_contract::<T>( &dest, nested .self_trie_id .clone() .expect("the nested context always has to have self_trie_id"), code_hash.clone(), )?; // Send funds unconditionally here. If the `endowment` is below existential_deposit // then error will be returned here. transfer( gas_meter, TransferCause::Instantiate, transactor_kind, &caller, &dest, endowment, nested, )?; let executable = nested.loader.load_init(&code_hash).map_err(|_| Error::<T>::CodeNotFound)?; let output = nested .vm .execute( &executable, nested.new_call_context(caller.clone(), endowment), input_data, gas_meter, ) .map_err(|e| ExecError { error: e.error, origin: ErrorOrigin::Callee })?; // We need each contract that exists to be above the subsistence threshold // in order to keep up the guarantuee that we always leave a tombstone behind // with the exception of a contract that called `seal_terminate`. if T::Currency::total_balance(&dest) < nested.config.subsistence_threshold() { Err(Error::<T>::NewContractNotFunded)? } // Deposit an instantiation event. deposit_event::<T>(vec![], RawEvent::Instantiated(caller.clone(), dest.clone())); Ok(output) })?; Ok((dest, output)) } fn new_call_context<'b>( &'b mut self, caller: T::AccountId, value: BalanceOf<T>, ) -> CallContext<'b, 'a, T, V, L> { let timestamp = self.timestamp.clone(); let block_number = self.block_number.clone(); CallContext { ctx: self, caller, value_transferred: value, timestamp, block_number } } /// Execute the given closure within a nested execution context. fn with_nested_context<F>(&mut self, dest: T::AccountId, trie_id: TrieId, func: F) -> ExecResult where F: FnOnce(&mut ExecutionContext<T, V, L>) -> ExecResult, { use frame_support::storage::TransactionOutcome::*; let mut nested = self.nested(dest, trie_id); frame_support::storage::with_transaction(|| { let output = func(&mut nested); match output { Ok(ref rv) if !rv.flags.contains(ReturnFlags::REVERT) => Commit(output), _ => Rollback(output), } }) } /// Returns whether a contract, identified by address, is currently live in the execution /// stack, meaning it is in the middle of an execution. fn is_live(&self, account: &T::AccountId) -> bool { &self.self_account == account || self.caller.map_or(false, |caller| caller.is_live(account)) } fn transactor_kind(&self) -> TransactorKind { if self.depth == 0 { debug_assert!(self.self_trie_id.is_none()); debug_assert!(self.caller.is_none()); debug_assert!(ContractInfoOf::<T>::get(&self.self_account).is_none()); TransactorKind::PlainAccount } else { TransactorKind::Contract } } } #[cfg_attr(test, derive(Debug, PartialEq, Eq))] #[derive(Copy, Clone)] pub enum TransferFeeKind { ContractInstantiate, Transfer, } #[cfg_attr(test, derive(Debug, PartialEq, Eq))] #[derive(Copy, Clone)] pub struct TransferFeeToken { kind: TransferFeeKind, } impl<T: Trait> Token<T> for TransferFeeToken { type Metadata = Config<T>; #[inline] fn calculate_amount(&self, metadata: &Config<T>) -> Gas { match self.kind { TransferFeeKind::ContractInstantiate => metadata.schedule.instantiate_cost, TransferFeeKind::Transfer => metadata.schedule.transfer_cost, } } } /// Describes possible transfer causes. enum TransferCause { Call, Instantiate, Terminate, } /// Transfer some funds from `transactor` to `dest`. /// /// All balance changes are performed in the `overlay`. /// /// This function also handles charging the fee. The fee depends /// on whether the transfer happening because of contract instantiation /// (transferring endowment) or because of a transfer via `call`. This /// is specified using the `cause` parameter. /// /// NOTE: that the fee is denominated in `BalanceOf<T>` units, but /// charged in `Gas` from the provided `gas_meter`. This means /// that the actual amount charged might differ. /// /// NOTE: that we allow for draining all funds of the contract so it /// can go below existential deposit, essentially giving a contract /// the chance to give up it's life. fn transfer<'a, T: Trait, V: Vm<T>, L: Loader<T>>( gas_meter: &mut GasMeter<T>, cause: TransferCause, origin: TransactorKind, transactor: &T::AccountId, dest: &T::AccountId, value: BalanceOf<T>, ctx: &mut ExecutionContext<'a, T, V, L>, ) -> Result<(), DispatchError> { use self::TransactorKind::*; use self::TransferCause::*; use self::TransferFeeKind::*; let token = { let kind: TransferFeeKind = match cause { // If this function is called from `Instantiate` routine, then we always // charge contract account creation fee. Instantiate => ContractInstantiate, // Otherwise the fee is to transfer to an account. Call | Terminate => TransferFeeKind::Transfer, }; TransferFeeToken { kind } }; if gas_meter.charge(ctx.config, token).is_out_of_gas() { Err(Error::<T>::OutOfGas)? } // Only seal_terminate is allowed to bring the sender below the subsistence // threshold or even existential deposit. let existence_requirement = match (cause, origin) { (Terminate, _) => ExistenceRequirement::AllowDeath, (_, Contract) => { ensure!( T::Currency::total_balance(transactor).saturating_sub(value) >= ctx.config.subsistence_threshold(), Error::<T>::BelowSubsistenceThreshold, ); ExistenceRequirement::KeepAlive }, (_, PlainAccount) => ExistenceRequirement::KeepAlive, }; T::Currency::transfer(transactor, dest, value, existence_requirement) .map_err(|_| Error::<T>::TransferFailed)?; Ok(()) } /// A context that is active within a call. /// /// This context has some invariants that must be held at all times. Specifically: ///`ctx` always points to a context of an alive contract. That implies that it has an existent /// `self_trie_id`. /// /// Be advised that there are brief time spans where these invariants could be invalidated. /// For example, when a contract requests self-termination the contract is removed eagerly. That /// implies that the control won't be returned to the contract anymore, but there is still some code /// on the path of the return from that call context. Therefore, care must be taken in these /// situations. struct CallContext<'a, 'b: 'a, T: Trait + 'b, V: Vm<T> + 'b, L: Loader<T>> { ctx: &'a mut ExecutionContext<'b, T, V, L>, caller: T::AccountId, value_transferred: BalanceOf<T>, timestamp: MomentOf<T>, block_number: T::BlockNumber, } impl<'a, 'b: 'a, T, E, V, L> Ext for CallContext<'a, 'b, T, V, L> where T: Trait + 'b, V: Vm<T, Executable = E>, L: Loader<T, Executable = E>, { type T = T; fn get_storage(&self, key: &StorageKey) -> Option<Vec<u8>> { let trie_id = self.ctx.self_trie_id.as_ref().expect( "`ctx.self_trie_id` points to an alive contract within the `CallContext`;\ it cannot be `None`;\ expect can't fail;\ qed", ); storage::read_contract_storage(trie_id, key) } fn set_storage(&mut self, key: StorageKey, value: Option<Vec<u8>>) { let trie_id = self.ctx.self_trie_id.as_ref().expect( "`ctx.self_trie_id` points to an alive contract within the `CallContext`;\ it cannot be `None`;\ expect can't fail;\ qed", ); if let Err(storage::ContractAbsentError) = storage::write_contract_storage::<T>(&self.ctx.self_account, trie_id, &key, value) { panic!( "the contract must be in the alive state within the `CallContext`;\ the contract cannot be absent in storage; write_contract_storage cannot return `None`; qed" ); } } fn instantiate( &mut self, code_hash: &CodeHash<T>, endowment: BalanceOf<T>, gas_meter: &mut GasMeter<T>, input_data: Vec<u8>, ) -> Result<(AccountIdOf<T>, ExecReturnValue), ExecError> { self.ctx.instantiate(endowment, gas_meter, code_hash, input_data) } fn transfer( &mut self, to: &T::AccountId, value: BalanceOf<T>, gas_meter: &mut GasMeter<T>, ) -> Result<(), DispatchError> { transfer( gas_meter, TransferCause::Call, TransactorKind::Contract, &self.ctx.self_account.clone(), &to, value, self.ctx, ) } fn terminate( &mut self, beneficiary: &AccountIdOf<Self::T>, gas_meter: &mut GasMeter<Self::T>, ) -> Result<(), DispatchError> { let self_id = self.ctx.self_account.clone(); let value = T::Currency::free_balance(&self_id); if let Some(caller_ctx) = self.ctx.caller { if caller_ctx.is_live(&self_id) { return Err(DispatchError::Other( "Cannot terminate a contract that is present on the call stack", )) } } transfer( gas_meter, TransferCause::Terminate, TransactorKind::Contract, &self_id, beneficiary, value, self.ctx, )?; let self_trie_id = self.ctx.self_trie_id.as_ref().expect( "this function is only invoked by in the context of a contract;\ a contract has a trie id;\ this can't be None; qed", ); storage::destroy_contract::<T>(&self_id, self_trie_id); Ok(()) } fn call( &mut self, to: &T::AccountId, value: BalanceOf<T>, gas_meter: &mut GasMeter<T>, input_data: Vec<u8>, ) -> ExecResult { self.ctx.call(to.clone(), value, gas_meter, input_data) } fn restore_to( &mut self, dest: AccountIdOf<Self::T>, code_hash: CodeHash<Self::T>, rent_allowance: BalanceOf<Self::T>, delta: Vec<StorageKey>, ) -> Result<(), &'static str> { if let Some(caller_ctx) = self.ctx.caller { if caller_ctx.is_live(&self.ctx.self_account) { return Err( "Cannot perform restoration of a contract that is present on the call stack", ) } } let result = crate::rent::restore_to::<T>( self.ctx.self_account.clone(), dest.clone(), code_hash.clone(), rent_allowance, delta, ); if let Ok(_) = result { deposit_event::<Self::T>( vec![], RawEvent::Restored(self.ctx.self_account.clone(), dest, code_hash, rent_allowance), ); } result } fn address(&self) -> &T::AccountId { &self.ctx.self_account } fn caller(&self) -> &T::AccountId { &self.caller } fn balance(&self) -> BalanceOf<T> { T::Currency::free_balance(&self.ctx.self_account) } fn value_transferred(&self) -> BalanceOf<T> { self.value_transferred } fn random(&self, subject: &[u8]) -> SeedOf<T> { T::Randomness::random(subject) } fn now(&self) -> &MomentOf<T> { &self.timestamp } fn minimum_balance(&self) -> BalanceOf<T> { self.ctx.config.existential_deposit } fn tombstone_deposit(&self) -> BalanceOf<T> { self.ctx.config.tombstone_deposit } fn deposit_event(&mut self, topics: Vec<T::Hash>, data: Vec<u8>) { deposit_event::<Self::T>( topics, RawEvent::ContractExecution(self.ctx.self_account.clone(), data), ); } fn set_rent_allowance(&mut self, rent_allowance: BalanceOf<T>) { if let Err(storage::ContractAbsentError) = storage::set_rent_allowance::<T>(&self.ctx.self_account, rent_allowance) { panic!( "`self_account` points to an alive contract within the `CallContext`; set_rent_allowance cannot return `Err`; qed" ); } } fn rent_allowance(&self) -> BalanceOf<T> { storage::rent_allowance::<T>(&self.ctx.self_account) .unwrap_or_else(|_| <BalanceOf<T>>::max_value()) // Must never be triggered actually } fn block_number(&self) -> T::BlockNumber { self.block_number } fn max_value_size(&self) -> u32 { self.ctx.config.max_value_size } fn get_weight_price(&self, weight: Weight) -> BalanceOf<Self::T> { T::WeightPrice::convert(weight) } } fn deposit_event<T: Trait>(topics: Vec<T::Hash>, event: Event<T>) { <frame_system::Module<T>>::deposit_event_indexed( &*topics, <T as Trait>::Event::from(event).into(), ) } /// These tests exercise the executive layer. /// /// In these tests the VM/loader are mocked. Instead of dealing with wasm bytecode they use simple /// closures. This allows you to tackle executive logic more thoroughly without writing a /// wasm VM code. #[cfg(test)] mod tests { use super::{ BalanceOf, ErrorOrigin, Event, ExecError, ExecFeeToken, ExecResult, ExecutionContext, Ext, Loader, RawEvent, ReturnFlags, TransferFeeKind, TransferFeeToken, Vm, }; use crate::tests::test_utils::{get_balance, place_contract, set_balance}; use crate::{ exec::ExecReturnValue, gas::Gas, gas::GasMeter, storage, tests::{ExtBuilder, MetaEvent, Test}, CodeHash, Config, Error, }; use assert_matches::assert_matches; use sp_runtime::DispatchError; use std::{cell::RefCell, collections::HashMap, marker::PhantomData, rc::Rc}; const ALICE: u64 = 1; const BOB: u64 = 2; const CHARLIE: u64 = 3; const GAS_LIMIT: Gas = 10_000_000_000; fn events() -> Vec<Event<Test>> { <frame_system::Module<Test>>::events() .into_iter() .filter_map(|meta| match meta.event { MetaEvent::contracts(contract_event) => Some(contract_event), _ => None, }) .collect() } struct MockCtx<'a> { ext: &'a mut dyn Ext<T = Test>, input_data: Vec<u8>, gas_meter: &'a mut GasMeter<Test>, } #[derive(Clone)] struct MockExecutable<'a>(Rc<dyn Fn(MockCtx) -> ExecResult + 'a>); impl<'a> MockExecutable<'a> { fn new(f: impl Fn(MockCtx) -> ExecResult + 'a) -> Self { MockExecutable(Rc::new(f)) } } struct MockLoader<'a> { map: HashMap<CodeHash<Test>, MockExecutable<'a>>, counter: u64, } impl<'a> MockLoader<'a> { fn empty() -> Self { MockLoader { map: HashMap::new(), counter: 0 } } fn insert(&mut self, f: impl Fn(MockCtx) -> ExecResult + 'a) -> CodeHash<Test> { // Generate code hashes as monotonically increasing values. let code_hash = <Test as frame_system::Trait>::Hash::from_low_u64_be(self.counter); self.counter += 1; self.map.insert(code_hash, MockExecutable::new(f)); code_hash } } struct MockVm<'a> { _marker: PhantomData<&'a ()>, } impl<'a> MockVm<'a> { fn new() -> Self { MockVm { _marker: PhantomData } } } impl<'a> Loader<Test> for MockLoader<'a> { type Executable = MockExecutable<'a>; fn load_init(&self, code_hash: &CodeHash<Test>) -> Result<Self::Executable, &'static str> { self.map.get(code_hash).cloned().ok_or_else(|| "code not found") } fn load_main(&self, code_hash: &CodeHash<Test>) -> Result<Self::Executable, &'static str> { self.map.get(code_hash).cloned().ok_or_else(|| "code not found") } } impl<'a> Vm<Test> for MockVm<'a> { type Executable = MockExecutable<'a>; fn execute<E: Ext<T = Test>>( &self, exec: &MockExecutable, mut ext: E, input_data: Vec<u8>, gas_meter: &mut GasMeter<Test>, ) -> ExecResult { (exec.0)(MockCtx { ext: &mut ext, input_data, gas_meter }) } } fn exec_success() -> ExecResult { Ok(ExecReturnValue { flags: ReturnFlags::empty(), data: Vec::new() }) } #[test] fn it_works() { let value = Default::default(); let mut gas_meter = GasMeter::<Test>::new(GAS_LIMIT); let data = vec![]; let vm = MockVm::new(); let test_data = Rc::new(RefCell::new(vec![0usize])); let mut loader = MockLoader::empty(); let exec_ch = loader.insert(|_ctx| { test_data.borrow_mut().push(1); exec_success() }); ExtBuilder::default().build().execute_with(|| { let cfg = Config::preload(); let mut ctx = ExecutionContext::top_level(ALICE, &cfg, &vm, &loader); place_contract(&BOB, exec_ch); assert_matches!(ctx.call(BOB, value, &mut gas_meter, data), Ok(_)); }); assert_eq!(&*test_data.borrow(), &vec![0, 1]); } #[test] fn base_fees() { let origin = ALICE; let dest = BOB; // This test verifies that base fee for call is taken. ExtBuilder::default().build().execute_with(|| { let vm = MockVm::new(); let loader = MockLoader::empty(); let cfg = Config::preload(); let mut ctx = ExecutionContext::top_level(origin, &cfg, &vm, &loader); set_balance(&origin, 100); set_balance(&dest, 0); let mut gas_meter = GasMeter::<Test>::new(GAS_LIMIT); let result = super::transfer( &mut gas_meter, super::TransferCause::Call, super::TransactorKind::PlainAccount, &origin, &dest, 0, &mut ctx, ); assert_matches!(result, Ok(_)); let mut toks = gas_meter.tokens().iter(); match_tokens!(toks, TransferFeeToken { kind: TransferFeeKind::Transfer },); }); // This test verifies that base fee for instantiation is taken. ExtBuilder::default().build().execute_with(|| { let mut loader = MockLoader::empty(); let code = loader.insert(|_| exec_success()); let vm = MockVm::new(); let cfg = Config::preload(); let mut ctx = ExecutionContext::top_level(origin, &cfg, &vm, &loader); set_balance(&origin, 100); let mut gas_meter = GasMeter::<Test>::new(GAS_LIMIT); let result = ctx.instantiate(cfg.subsistence_threshold(), &mut gas_meter, &code, vec![]); assert_matches!(result, Ok(_)); let mut toks = gas_meter.tokens().iter(); match_tokens!(toks, ExecFeeToken::Instantiate,); }); } #[test] fn transfer_works() { // This test verifies that a contract is able to transfer // some funds to another account. let origin = ALICE; let dest = BOB; let vm = MockVm::new(); let loader = MockLoader::empty(); ExtBuilder::default().build().execute_with(|| { let cfg = Config::preload(); let mut ctx = ExecutionContext::top_level(origin, &cfg, &vm, &loader); set_balance(&origin, 100); set_balance(&dest, 0); let mut gas_meter = GasMeter::<Test>::new(GAS_LIMIT); super::transfer( &mut gas_meter, super::TransferCause::Call, super::TransactorKind::PlainAccount, &origin, &dest, 55, &mut ctx, ) .unwrap(); assert_eq!(get_balance(&origin), 45); assert_eq!(get_balance(&dest), 55); }); } #[test] fn changes_are_reverted_on_failing_call() { // This test verifies that changes are reverted on a call which fails (or equally, returns // a non-zero status code). let origin = ALICE; let dest = BOB; let vm = MockVm::new(); let mut loader = MockLoader::empty(); let return_ch = loader.insert(|_| Ok(ExecReturnValue { flags: ReturnFlags::REVERT, data: Vec::new() })); ExtBuilder::default().build().execute_with(|| { let cfg = Config::preload(); let mut ctx = ExecutionContext::top_level(origin, &cfg, &vm, &loader); place_contract(&BOB, return_ch); set_balance(&origin, 100); set_balance(&dest, 0); let output = ctx.call(dest, 55, &mut GasMeter::<Test>::new(GAS_LIMIT), vec![]).unwrap(); assert!(!output.is_success()); assert_eq!(get_balance(&origin), 100); assert_eq!(get_balance(&dest), 0); }); } #[test] fn transfer_fees() { let origin = ALICE; let dest = BOB; // This test sends 50 units of currency to a non-existent account. // This should lead to creation of a new account thus // a fee should be charged. ExtBuilder::default().existential_deposit(15).build().execute_with(|| { let vm = MockVm::new(); let loader = MockLoader::empty(); let cfg = Config::preload(); let mut ctx = ExecutionContext::top_level(origin, &cfg, &vm, &loader); set_balance(&origin, 100); set_balance(&dest, 0); let mut gas_meter = GasMeter::<Test>::new(GAS_LIMIT); let result = super::transfer( &mut gas_meter, super::TransferCause::Call, super::TransactorKind::PlainAccount, &origin, &dest, 50, &mut ctx, ); assert_matches!(result, Ok(_)); let mut toks = gas_meter.tokens().iter(); match_tokens!(toks, TransferFeeToken { kind: TransferFeeKind::Transfer },); }); // This one is similar to the previous one but transfer to an existing account. // In this test we expect that a regular transfer fee is charged. ExtBuilder::default().existential_deposit(15).build().execute_with(|| { let vm = MockVm::new(); let loader = MockLoader::empty(); let cfg = Config::preload(); let mut ctx = ExecutionContext::top_level(origin, &cfg, &vm, &loader); set_balance(&origin, 100); set_balance(&dest, 15); let mut gas_meter = GasMeter::<Test>::new(GAS_LIMIT); let result = super::transfer( &mut gas_meter, super::TransferCause::Call, super::TransactorKind::PlainAccount, &origin, &dest, 50, &mut ctx, ); assert_matches!(result, Ok(_)); let mut toks = gas_meter.tokens().iter(); match_tokens!(toks, TransferFeeToken { kind: TransferFeeKind::Transfer },); }); // This test sends 50 units of currency as an endowment to a newly // instantiated contract. ExtBuilder::default().existential_deposit(15).build().execute_with(|| { let mut loader = MockLoader::empty(); let code = loader.insert(|_| exec_success()); let vm = MockVm::new(); let cfg = Config::preload(); let mut ctx = ExecutionContext::top_level(origin, &cfg, &vm, &loader); set_balance(&origin, 100); set_balance(&dest, 15); let mut gas_meter = GasMeter::<Test>::new(GAS_LIMIT); let result = ctx.instantiate(50, &mut gas_meter, &code, vec![]); assert_matches!(result, Ok(_)); let mut toks = gas_meter.tokens().iter(); match_tokens!( toks, ExecFeeToken::Instantiate, TransferFeeToken { kind: TransferFeeKind::ContractInstantiate }, ); }); } #[test] fn balance_too_low() { // This test verifies that a contract can't send value if it's // balance is too low. let origin = ALICE; let dest = BOB; let vm = MockVm::new(); let loader = MockLoader::empty(); ExtBuilder::default().build().execute_with(|| { let cfg = Config::preload(); let mut ctx = ExecutionContext::top_level(origin, &cfg, &vm, &loader); set_balance(&origin, 0); let result = super::transfer( &mut GasMeter::<Test>::new(GAS_LIMIT), super::TransferCause::Call, super::TransactorKind::PlainAccount, &origin, &dest, 100, &mut ctx, ); assert_eq!(result, Err(Error::<Test>::TransferFailed.into())); assert_eq!(get_balance(&origin), 0); assert_eq!(get_balance(&dest), 0); }); } #[test] fn output_is_returned_on_success() { // Verifies that if a contract returns data with a successful exit status, this data // is returned from the execution context. let origin = ALICE; let dest = BOB; let vm = MockVm::new(); let mut loader = MockLoader::empty(); let return_ch = loader.insert(|_| { Ok(ExecReturnValue { flags: ReturnFlags::empty(), data: vec![1, 2, 3, 4] }) }); ExtBuilder::default().build().execute_with(|| { let cfg = Config::preload(); let mut ctx = ExecutionContext::top_level(origin, &cfg, &vm, &loader); place_contract(&BOB, return_ch); let result = ctx.call(dest, 0, &mut GasMeter::<Test>::new(GAS_LIMIT), vec![]); let output = result.unwrap(); assert!(output.is_success()); assert_eq!(output.data, vec![1, 2, 3, 4]); }); } #[test] fn output_is_returned_on_failure() { // Verifies that if a contract returns data with a failing exit status, this data // is returned from the execution context. let origin = ALICE; let dest = BOB; let vm = MockVm::new(); let mut loader = MockLoader::empty(); let return_ch = loader .insert(|_| Ok(ExecReturnValue { flags: ReturnFlags::REVERT, data: vec![1, 2, 3, 4] })); ExtBuilder::default().build().execute_with(|| { let cfg = Config::preload(); let mut ctx = ExecutionContext::top_level(origin, &cfg, &vm, &loader); place_contract(&BOB, return_ch); let result = ctx.call(dest, 0, &mut GasMeter::<Test>::new(GAS_LIMIT), vec![]); let output = result.unwrap(); assert!(!output.is_success()); assert_eq!(output.data, vec![1, 2, 3, 4]); }); } #[test] fn input_data_to_call() { let vm = MockVm::new(); let mut loader = MockLoader::empty(); let input_data_ch = loader.insert(|ctx| { assert_eq!(ctx.input_data, &[1, 2, 3, 4]); exec_success() }); // This one tests passing the input data into a contract via call. ExtBuilder::default().build().execute_with(|| { let cfg = Config::preload(); let mut ctx = ExecutionContext::top_level(ALICE, &cfg, &vm, &loader); place_contract(&BOB, input_data_ch); let result = ctx.call(BOB, 0, &mut GasMeter::<Test>::new(GAS_LIMIT), vec![1, 2, 3, 4]); assert_matches!(result, Ok(_)); }); } #[test] fn input_data_to_instantiate() { let vm = MockVm::new(); let mut loader = MockLoader::empty(); let input_data_ch = loader.insert(|ctx| { assert_eq!(ctx.input_data, &[1, 2, 3, 4]); exec_success() }); // This one tests passing the input data into a contract via instantiate. ExtBuilder::default().build().execute_with(|| { let cfg = Config::preload(); let mut ctx = ExecutionContext::top_level(ALICE, &cfg, &vm, &loader); set_balance(&ALICE, 100); let result = ctx.instantiate( cfg.subsistence_threshold(), &mut GasMeter::<Test>::new(GAS_LIMIT), &input_data_ch, vec![1, 2, 3, 4], ); assert_matches!(result, Ok(_)); }); } #[test] fn max_depth() { // This test verifies that when we reach the maximal depth creation of an // yet another context fails. let value = Default::default(); let reached_bottom = RefCell::new(false); let vm = MockVm::new(); let mut loader = MockLoader::empty(); let recurse_ch = loader.insert(|ctx| { // Try to call into yourself. let r = ctx.ext.call(&BOB, 0, ctx.gas_meter, vec![]); let mut reached_bottom = reached_bottom.borrow_mut(); if !*reached_bottom { // We are first time here, it means we just reached bottom. // Verify that we've got proper error and set `reached_bottom`. assert_eq!(r, Err(Error::<Test>::MaxCallDepthReached.into())); *reached_bottom = true; } else { // We just unwinding stack here. assert_matches!(r, Ok(_)); } exec_success() }); ExtBuilder::default().build().execute_with(|| { let cfg = Config::preload(); let mut ctx = ExecutionContext::top_level(ALICE, &cfg, &vm, &loader); set_balance(&BOB, 1); place_contract(&BOB, recurse_ch); let result = ctx.call(BOB, value, &mut GasMeter::<Test>::new(GAS_LIMIT), vec![]); assert_matches!(result, Ok(_)); }); } #[test] fn caller_returns_proper_values() { let origin = ALICE; let dest = BOB; let vm = MockVm::new(); let witnessed_caller_bob = RefCell::new(None::<u64>); let witnessed_caller_charlie = RefCell::new(None::<u64>); let mut loader = MockLoader::empty(); let bob_ch = loader.insert(|ctx| { // Record the caller for bob. *witnessed_caller_bob.borrow_mut() = Some(*ctx.ext.caller()); // Call into CHARLIE contract. assert_matches!(ctx.ext.call(&CHARLIE, 0, ctx.gas_meter, vec![]), Ok(_)); exec_success() }); let charlie_ch = loader.insert(|ctx| { // Record the caller for charlie. *witnessed_caller_charlie.borrow_mut() = Some(*ctx.ext.caller()); exec_success() }); ExtBuilder::default().build().execute_with(|| { let cfg = Config::preload(); let mut ctx = ExecutionContext::top_level(origin, &cfg, &vm, &loader); place_contract(&dest, bob_ch); place_contract(&CHARLIE, charlie_ch); let result = ctx.call(dest, 0, &mut GasMeter::<Test>::new(GAS_LIMIT), vec![]); assert_matches!(result, Ok(_)); }); assert_eq!(&*witnessed_caller_bob.borrow(), &Some(origin)); assert_eq!(&*witnessed_caller_charlie.borrow(), &Some(dest)); } #[test] fn address_returns_proper_values() { let vm = MockVm::new(); let mut loader = MockLoader::empty(); let bob_ch = loader.insert(|ctx| { // Verify that address matches BOB. assert_eq!(*ctx.ext.address(), BOB); // Call into charlie contract. assert_matches!(ctx.ext.call(&CHARLIE, 0, ctx.gas_meter, vec![]), Ok(_)); exec_success() }); let charlie_ch = loader.insert(|ctx| { assert_eq!(*ctx.ext.address(), CHARLIE); exec_success() }); ExtBuilder::default().build().execute_with(|| { let cfg = Config::preload(); let mut ctx = ExecutionContext::top_level(ALICE, &cfg, &vm, &loader); place_contract(&BOB, bob_ch); place_contract(&CHARLIE, charlie_ch); let result = ctx.call(BOB, 0, &mut GasMeter::<Test>::new(GAS_LIMIT), vec![]); assert_matches!(result, Ok(_)); }); } #[test] fn refuse_instantiate_with_value_below_existential_deposit() { let vm = MockVm::new(); let mut loader = MockLoader::empty(); let dummy_ch = loader.insert(|_| exec_success()); ExtBuilder::default().existential_deposit(15).build().execute_with(|| { let cfg = Config::preload(); let mut ctx = ExecutionContext::top_level(ALICE, &cfg, &vm, &loader); assert_matches!( ctx.instantiate( 0, // <- zero endowment &mut GasMeter::<Test>::new(GAS_LIMIT), &dummy_ch, vec![], ), Err(_) ); }); } #[test] fn instantiation_work_with_success_output() { let vm = MockVm::new(); let mut loader = MockLoader::empty(); let dummy_ch = loader.insert(|_| { Ok(ExecReturnValue { flags: ReturnFlags::empty(), data: vec![80, 65, 83, 83] }) }); ExtBuilder::default().existential_deposit(15).build().execute_with(|| { let cfg = Config::preload(); let mut ctx = ExecutionContext::top_level(ALICE, &cfg, &vm, &loader); set_balance(&ALICE, 1000); let instantiated_contract_address = assert_matches!( ctx.instantiate( 100, &mut GasMeter::<Test>::new(GAS_LIMIT), &dummy_ch, vec![], ), Ok((address, ref output)) if output.data == vec![80, 65, 83, 83] => address ); // Check that the newly created account has the expected code hash and // there are instantiation event. assert_eq!( storage::code_hash::<Test>(&instantiated_contract_address).unwrap(), dummy_ch ); assert_eq!(&events(), &[RawEvent::Instantiated(ALICE, instantiated_contract_address)]); }); } #[test] fn instantiation_fails_with_failing_output() { let vm = MockVm::new(); let mut loader = MockLoader::empty(); let dummy_ch = loader.insert(|_| { Ok(ExecReturnValue { flags: ReturnFlags::REVERT, data: vec![70, 65, 73, 76] }) }); ExtBuilder::default().existential_deposit(15).build().execute_with(|| { let cfg = Config::preload(); let mut ctx = ExecutionContext::top_level(ALICE, &cfg, &vm, &loader); set_balance(&ALICE, 1000); let instantiated_contract_address = assert_matches!( ctx.instantiate( 100, &mut GasMeter::<Test>::new(GAS_LIMIT), &dummy_ch, vec![], ), Ok((address, ref output)) if output.data == vec![70, 65, 73, 76] => address ); // Check that the account has not been created. assert!(storage::code_hash::<Test>(&instantiated_contract_address).is_err()); assert!(events().is_empty()); }); } #[test] fn instantiation_from_contract() { let vm = MockVm::new(); let mut loader = MockLoader::empty(); let dummy_ch = loader.insert(|_| exec_success()); let instantiated_contract_address = Rc::new(RefCell::new(None::<u64>)); let instantiator_ch = loader.insert({ let dummy_ch = dummy_ch.clone(); let instantiated_contract_address = Rc::clone(&instantiated_contract_address); move |ctx| { // Instantiate a contract and save it's address in `instantiated_contract_address`. let (address, output) = ctx .ext .instantiate( &dummy_ch, Config::<Test>::subsistence_threshold_uncached(), ctx.gas_meter, vec![], ) .unwrap(); *instantiated_contract_address.borrow_mut() = address.into(); Ok(output) } }); ExtBuilder::default().existential_deposit(15).build().execute_with(|| { let cfg = Config::preload(); let mut ctx = ExecutionContext::top_level(ALICE, &cfg, &vm, &loader); set_balance(&ALICE, 1000); set_balance(&BOB, 100); place_contract(&BOB, instantiator_ch); assert_matches!( ctx.call(BOB, 20, &mut GasMeter::<Test>::new(GAS_LIMIT), vec![]), Ok(_) ); let instantiated_contract_address = instantiated_contract_address.borrow().as_ref().unwrap().clone(); // Check that the newly created account has the expected code hash and // there are instantiation event. assert_eq!( storage::code_hash::<Test>(&instantiated_contract_address).unwrap(), dummy_ch ); assert_eq!(&events(), &[RawEvent::Instantiated(BOB, instantiated_contract_address)]); }); } #[test] fn instantiation_traps() { let vm = MockVm::new(); let mut loader = MockLoader::empty(); let dummy_ch = loader.insert(|_| Err("It's a trap!".into())); let instantiator_ch = loader.insert({ let dummy_ch = dummy_ch.clone(); move |ctx| { // Instantiate a contract and save it's address in `instantiated_contract_address`. assert_matches!( ctx.ext.instantiate(&dummy_ch, 15u64, ctx.gas_meter, vec![]), Err(ExecError { error: DispatchError::Other("It's a trap!"), origin: ErrorOrigin::Callee, }) ); exec_success() } }); ExtBuilder::default().existential_deposit(15).build().execute_with(|| { let cfg = Config::preload(); let mut ctx = ExecutionContext::top_level(ALICE, &cfg, &vm, &loader); set_balance(&ALICE, 1000); set_balance(&BOB, 100); place_contract(&BOB, instantiator_ch); assert_matches!( ctx.call(BOB, 20, &mut GasMeter::<Test>::new(GAS_LIMIT), vec![]), Ok(_) ); // The contract wasn't instantiated so we don't expect to see an instantiation // event here. assert_eq!(&events(), &[]); }); } #[test] fn termination_from_instantiate_fails() { let vm = MockVm::new(); let mut loader = MockLoader::empty(); let terminate_ch = loader.insert(|mut ctx| { ctx.ext.terminate(&ALICE, &mut ctx.gas_meter).unwrap(); exec_success() }); ExtBuilder::default().existential_deposit(15).build().execute_with(|| { let cfg = Config::preload(); let mut ctx = ExecutionContext::top_level(ALICE, &cfg, &vm, &loader); set_balance(&ALICE, 1000); assert_eq!( ctx.instantiate(100, &mut GasMeter::<Test>::new(GAS_LIMIT), &terminate_ch, vec![],), Err(Error::<Test>::NewContractNotFunded.into()) ); assert_eq!(&events(), &[]); }); } #[test] fn rent_allowance() { let vm = MockVm::new(); let mut loader = MockLoader::empty(); let rent_allowance_ch = loader.insert(|ctx| { assert_eq!(ctx.ext.rent_allowance(), <BalanceOf<Test>>::max_value()); ctx.ext.set_rent_allowance(10); assert_eq!(ctx.ext.rent_allowance(), 10); exec_success() }); ExtBuilder::default().build().execute_with(|| { let cfg = Config::preload(); let mut ctx = ExecutionContext::top_level(ALICE, &cfg, &vm, &loader); set_balance(&ALICE, 100); let result = ctx.instantiate( cfg.subsistence_threshold(), &mut GasMeter::<Test>::new(GAS_LIMIT), &rent_allowance_ch, vec![], ); assert_matches!(result, Ok(_)); }); } }
pub fn three_sum(nums: Vec<i32>) -> Vec<Vec<i32>> { let mut res = Vec::new(); // 排序 let mut nums = nums; nums.sort(); let n = nums.len(); for a in 0..n { // 过滤掉重复的 if a != 0 && nums[a] == nums[a - 1] { continue; } let mut c = n - 1; let target = -nums[a]; for b in a + 1..n { if b != a + 1 && nums[b] == nums[b - 1] { continue; } while b < c && nums[b] + nums[c] > target { c -= 1; } if b == c { break; } if nums[b] + nums[c] == target { res.push(vec![nums[a], nums[b], nums[c]]); } } } res }
extern crate presentrs; use presentrs::Server; fn main() { let mut server = Server::new("target/deploy"); server.run("0.0.0.0:8080").unwrap(); }
#[cfg(test)] mod tests { use goban::pieces::goban::Goban; use goban::pieces::stones::Color; use goban::pieces::stones::Color::Black; use goban::pieces::stones::Stone; use goban::pieces::uint; use goban::pieces::util::coord::Order; use goban::pieces::zobrist::ZOBRIST; use goban::rules::game::Game; use goban::rules::Move::Play; use goban::rules::Rule; use goban::rules::{EndGame, GobanSizes, Move, Player}; use rand::seq::SliceRandom; use std::collections::HashSet; #[test] fn goban() { let mut g = Goban::new(GobanSizes::Nineteen.into()); g.push((1, 2), Color::White); println!("{}", g.pretty_string()); assert!(true) } #[test] fn goban_new_array() { let mut g = Goban::new(GobanSizes::Nineteen.into()); g.push((1, 2), Color::White); g.push((1, 3), Color::Black); let tab = g.raw(); let g2 = Goban::from_array(&tab, Order::RowMajor); assert_eq!(g, g2) } #[test] fn passes() { let mut g = Game::new(GobanSizes::Nine, Rule::Chinese); g.play(Move::Play(3, 3)); g.play(Move::Pass); g.play(Move::Play(4, 3)); let goban: &Goban = g.goban(); assert_eq!(goban.get_stone((4, 3)), Color::Black); } #[test] fn get_all_stones() { let mut g = Goban::new(GobanSizes::Nineteen.into()); g.push((1, 2), Color::White); g.push((0, 0), Color::Black); let expected = vec![ Stone { coordinates: (0, 0), color: Color::Black, }, Stone { coordinates: (1, 2), color: Color::White, }, ]; let vec: Vec<Stone> = g.get_stones().collect(); assert_eq!(expected, vec) } #[test] fn some_plays() { let mut g = Game::new(GobanSizes::Nineteen, Rule::Chinese); let mut i = 300; while !g.is_over() && i != 0 { g.play( *g.legals() .map(|coord| Move::Play(coord.0, coord.1)) .collect::<Vec<Move>>() .choose(&mut rand::thread_rng()) .unwrap(), ); i -= 1; println!("{}", g.goban().pretty_string()); } } #[test] fn some_plays_from_sgf() { let moves_sgf = vec![ Move::Play(16, 13), Move::Play(16, 11), Move::Play(14, 12), Move::Play(14, 11), Move::Play(13, 11), Move::Play(13, 12), Move::Play(14, 13), Move::Play(14, 10), Move::Play(12, 12), Move::Play(13, 13), Move::Play(14, 14), Move::Play(12, 13), Move::Play(14, 15), Move::Play(11, 12), Move::Play(12, 11), Move::Play(11, 13), Move::Play(12, 9), Move::Play(13, 8), Move::Play(13, 9), Move::Play(14, 9), Move::Play(14, 8), Move::Play(12, 8), Move::Play(11, 11), Move::Play(15, 8), Move::Play(14, 7), Move::Play(13, 6), Move::Play(15, 7), Move::Play(16, 8), Move::Play(16, 7), Move::Play(17, 7), Move::Play(17, 6), Move::Play(17, 8), Move::Play(14, 5), Move::Play(16, 4), Move::Play(12, 6), Move::Play(11, 8), Move::Play(13, 5), Move::Play(10, 11), Move::Play(10, 10), Move::Play(11, 10), Move::Play(13, 2), Move::Play(11, 9), Move::Play(16, 2), Move::Play(15, 2), Move::Play(15, 1), Move::Play(14, 1), Move::Play(16, 1), Move::Play(13, 3), Move::Play(12, 3), Move::Play(12, 2), Move::Play(13, 1), Move::Play(14, 2), Move::Play(13, 4), Move::Play(12, 1), Move::Play(13, 0), Move::Play(14, 0), Move::Play(14, 3), Move::Play(17, 4), Move::Play(17, 5), Move::Play(18, 1), Move::Play(2, 13), Move::Play(2, 11), Move::Play(4, 12), Move::Play(3, 13), Move::Play(3, 12), Move::Play(2, 12), Move::Play(3, 14), Move::Play(4, 13), Move::Play(4, 14), Move::Play(5, 13), Move::Play(2, 14), Move::Play(5, 14), Move::Play(4, 15), Move::Play(2, 6), Move::Play(5, 15), Move::Play(6, 15), Move::Play(6, 16), Move::Play(7, 15), Move::Play(7, 16), Move::Play(8, 16), Move::Play(6, 14), Move::Play(5, 12), Move::Play(3, 10), Move::Play(2, 10), Move::Play(4, 10), Move::Play(6, 10), Move::Play(5, 11), Move::Play(7, 13), Move::Play(6, 11), Move::Play(7, 12), Move::Play(7, 11), Move::Play(3, 8), Move::Play(7, 14), Move::Play(8, 15), Move::Play(8, 14), Move::Play(9, 13), Move::Play(9, 12), Move::Play(8, 13), Move::Play(10, 14), Move::Play(9, 14), Move::Play(10, 12), Move::Play(9, 11), Move::Play(8, 11), Move::Play(9, 10), Move::Play(10, 13), Move::Play(12, 15), Move::Play(12, 14), Move::Play(13, 14), Move::Play(13, 15), Move::Play(11, 14), Move::Play(11, 15), Move::Play(8, 12), Move::Play(12, 14), Move::Play(8, 9), Move::Play(4, 8), Move::Play(3, 9), Move::Play(4, 9), Move::Play(6, 8), Move::Play(4, 6), Move::Play(3, 7), Move::Play(4, 7), Move::Play(6, 6), Move::Play(2, 5), Move::Play(3, 5), Move::Play(3, 6), Move::Play(1, 7), Move::Play(7, 9), Move::Play(7, 8), Move::Play(1, 6), Move::Play(2, 7), Move::Play(5, 5), Move::Play(2, 4), Move::Play(6, 5), Move::Play(7, 5), Move::Play(6, 9), Move::Play(5, 9), Move::Play(7, 10), Move::Play(5, 8), Move::Play(7, 4), Move::Play(8, 5), Move::Play(5, 2), Move::Play(7, 2), Move::Play(7, 3), Move::Play(6, 2), Move::Play(6, 3), Move::Play(5, 1), Move::Play(4, 2), Move::Play(4, 1), Move::Play(3, 2), Move::Play(2, 2), Move::Play(3, 1), Move::Play(2, 1), Move::Play(8, 2), Move::Play(8, 1), Move::Play(9, 4), Move::Play(9, 2), Move::Play(9, 9), Move::Play(10, 9), Move::Play(9, 8), Move::Play(8, 8), Move::Play(8, 10), Move::Play(10, 10), Move::Play(10, 7), Move::Play(11, 6), Move::Play(10, 6), Move::Play(12, 5), Move::Play(11, 5), Move::Play(12, 7), Move::Play(11, 4), Move::Play(8, 4), Move::Play(15, 4), Move::Play(15, 5), Move::Play(14, 6), Move::Play(14, 4), Move::Play(13, 3), Move::Play(16, 6), Move::Play(15, 4), Move::Play(16, 3), Move::Play(16, 5), Move::Play(14, 4), Move::Play(15, 6), Move::Play(15, 4), Move::Play(17, 2), Move::Play(18, 4), Move::Play(18, 5), Move::Play(18, 2), Move::Play(18, 3), Move::Play(17, 3), Move::Play(15, 0), Move::Play(16, 16), Move::Play(17, 15), Move::Play(17, 16), Move::Play(16, 15), Move::Play(14, 17), Move::Play(13, 17), Move::Play(14, 16), Move::Play(15, 16), Move::Play(15, 17), Move::Play(17, 17), Move::Play(18, 17), Move::Play(17, 18), Move::Play(12, 17), Move::Play(13, 16), Move::Play(13, 18), Move::Play(11, 17), Move::Play(10, 17), Move::Play(11, 18), Move::Play(10, 16), Move::Play(11, 16), Move::Play(8, 17), Move::Play(5, 10), Move::Play(3, 11), Move::Play(4, 0), Move::Play(6, 1), Move::Play(17, 12), Move::Play(17, 11), Move::Play(4, 5), Move::Play(3, 4), Move::Play(8, 7), Move::Play(7, 7), Move::Play(7, 18), Move::Play(7, 17), Move::Play(6, 17), Move::Play(4, 17), Move::Play(5, 17), Move::Play(4, 16), Move::Play(5, 16), Move::Play(2, 16), Move::Play(1, 16), Move::Play(1, 17), Move::Play(1, 15), Move::Play(0, 17), Move::Play(2, 15), Move::Play(3, 16), Move::Play(3, 18), Move::Play(2, 17), Move::Play(4, 18), Move::Play(1, 13), Move::Play(1, 18), Move::Play(2, 18), Move::Play(5, 18), Move::Play(1, 14), Move::Play(0, 16), Move::Play(8, 18), Move::Play(3, 17), Move::Play(6, 18), Move::Play(0, 18), Move::Play(8, 3), Move::Play(4, 3), Move::Play(4, 4), Move::Play(5, 4), Move::Play(2, 0), Move::Play(3, 0), Move::Play(5, 0), Move::Play(5, 3), Move::Play(4, 11), Move::Play(8, 6), Move::Play(7, 6), Move::Play(9, 5), Move::Play(11, 2), Move::Play(12, 0), Move::Play(11, 0), Move::Play(10, 1), Move::Play(10, 2), Move::Play(11, 1), Move::Play(9, 1), Move::Play(10, 0), Move::Play(9, 0), Move::Play(11, 0), Move::Play(16, 12), Move::Play(17, 13), Move::Play(15, 12), Move::Play(18, 11), Move::Play(18, 10), Move::Play(18, 12), Move::Play(17, 10), Move::Play(0, 14), Move::Play(1, 12), Move::Play(10, 18), Move::Play(9, 18), Move::Play(18, 7), Move::Play(18, 8), Move::Play(15, 13), Move::Play(13, 10), Move::Play(12, 10), Move::Play(5, 7), Move::Play(11, 3), Move::Play(10, 3), Move::Play(10, 4), Move::Play(0, 13), Move::Play(6, 12), Move::Play(6, 13), Move::Play(18, 3), Move::Play(7, 18), Move::Play(18, 6), Move::Play(0, 15), Move::Play(1, 5), Move::Play(1, 4), Move::Play(0, 14), Move::Play(5, 6), Move::Play(0, 15), Move::Play(2, 3), Move::Pass, Move::Play(15, 14), Move::Pass, Move::Play(16, 14), Move::Play(17, 14), Move::Play(15, 16), Move::Play(18, 15), Move::Play(18, 16), Move::Play(14, 18), Move::Play(15, 18), Move::Play(12, 18), Move::Play(14, 18), Move::Play(16, 17), Move::Play(10, 15), Move::Pass, Move::Pass, ]; let handicap = vec![(3, 3), (3, 15), (9, 3), (9, 15), (15, 3), (15, 15)]; let mut g = Game::new(GobanSizes::Nineteen, Rule::Chinese); let inv_coord: Vec<usize> = (0..19).rev().collect(); g.put_handicap(&handicap); for m in moves_sgf { let to_play = match m { Play(x, y) => { let x = x as usize; let y = y as usize; println!("({},{})", x, y); println!("({},{})", inv_coord[x], y); println!("({},{})", inv_coord[x] + 1, y + 1); if inv_coord[x] == 6 && y == 14 && g.turn() == Player::White { println!("bug") } Play(inv_coord[x] as uint, y as uint) } m => m, }; g.try_play(to_play).unwrap(); println!("prisoners: {:?}", g.prisoners()); g.display_goban() } assert!(g.is_over()); let (black_score, white_score) = g.calculate_score(); let (b_prisoners, w_prisoners) = g.prisoners(); println!("score b:{} w:{}", black_score, white_score); assert_eq!(w_prisoners, 35); assert_eq!(b_prisoners, 16); assert_eq!(w_prisoners, 35); } #[test] fn atari() { let mut goban = Goban::new((9, 9)); let s = Stone { coordinates: (4, 4), color: Color::Black, }; goban.push_stone(s); println!("{}", goban.pretty_string()); let cl = goban.clone(); let x = cl.get_liberties(s.coordinates); x.for_each(|s| { println!("{:?}", s.coordinates); goban.push_stone(Stone { coordinates: s.coordinates, color: Color::White, }); }); println!("{}", goban.pretty_string()); assert_eq!(goban.get_liberties(s.coordinates).count(), 0); } #[test] fn atari_2() { let mut g = Game::new(GobanSizes::Nine, Rule::Chinese); g.play(Move::Play(1, 0)); // B println!("{}", g.goban().pretty_string()); g.play(Move::Play(0, 0)); // W println!("{}", g.goban().pretty_string()); g.play(Move::Play(0, 1)); // B println!("{}", g.goban().pretty_string()); // Atari assert_eq!(g.goban().get_stone((0, 0)), Color::None); } #[test] fn game_finished() { let mut g = Game::new(GobanSizes::Nine, Rule::Chinese); g.play(Move::Pass); g.play(Move::Pass); assert_eq!(g.is_over(), true) } #[test] fn score_calcul() { let mut g = Game::new(GobanSizes::Nine, Rule::Japanese); g.play(Move::Play(4, 4)); g.play(Move::Pass); g.play(Move::Pass); let score = g.calculate_score(); assert_eq!(score.0, 80.); //Black assert_eq!(score.1, Rule::Japanese.komi()); //White } #[test] fn score_calcul2() { let mut g = Game::new(GobanSizes::Nineteen, Rule::Chinese); g.set_komi(0.); (0..38).for_each(|x| { g.try_play(Play(if x % 2 == 0 { 9 } else { 8 }, x / 2)) .unwrap(); }); g.display_goban(); let score = g.calculate_score(); assert_eq!(score, (10. * 19., 9. * 19.)); let mut goban: Goban = g.goban().clone(); goban.push_many( &{ let mut vec = vec![]; (10..19).for_each(|x| vec.push((x, 3))); vec }, Color::Black, ); goban.push_many( &vec![ (11, 6), (11, 7), (11, 8), (12, 6), (12, 8), (13, 6), (13, 7), (13, 8), ], Color::White, ); let terr = goban.calculate_territories(); assert_eq!(terr, (27, 8 * 19 + 1)); goban.push_many( &vec![(17, 18), (18, 17), (18, 15), (17, 16), (16, 17), (15, 18)], Black, ); let terr = goban.calculate_territories(); println!("{}", goban); assert_eq!(terr, (27 + 4, 8 * 19 + 1)); } #[test] fn score_calcul_chinese() { let mut g = Game::new(GobanSizes::Nine, Rule::Chinese); g.play(Move::Play(4, 4)); g.play(Move::Pass); g.play(Move::Pass); let outcome = match g.outcome() { Some(endgame) => Ok(endgame), _ => Err("Game not finished"), } .expect("Game finished"); let (black, white) = g.calculate_score(); assert_eq!(black, 81.); assert_eq!(white, g.komi()); assert_eq!( outcome, EndGame::WinnerByScore(Player::Black, 81. - g.komi()) ) } #[test] fn zobrist_test() { let mut set = HashSet::new(); for i in 0..19 { for j in 0..19 { for c in vec![Color::Black, Color::White] { let x = ZOBRIST[((i, j), c)]; assert!(!set.contains(&x)); set.insert(x); } } } } #[test] fn ko_test() { let mut game: Game = Default::default(); game.play(Move::Play(0, 3)); // black game.display_goban(); game.play(Move::Play(0, 2)); // white game.display_goban(); game.play(Move::Play(1, 4)); // black game.display_goban(); game.play(Move::Play(2, 2)); // white game.display_goban(); game.play(Move::Play(2, 3)); // black game.display_goban(); game.play(Move::Play(1, 1)); // white game.display_goban(); game.play(Move::Play(1, 2)); // black game.display_goban(); game.play(Move::Play(1, 3)); // white takes game.display_goban(); //game.play(Move::Play(1, 2)); // black takes back //println!("{}", game); // ko assert!(game.check_ko(Stone { coordinates: (1, 2), color: Color::Black, })); assert!(!game.legals().any(|m| m == (1, 2))); assert!(game.try_play(Move::Play(1, 2)).is_err()); assert!(game.check_superko(Stone { coordinates: (1, 2), color: Color::Black, })); } #[test] fn suicide_test() { let mut game: Game = Default::default(); game.play(Move::Play(0, 2)); // black game.display_goban(); game.play(Move::Play(0, 0)); // white game.display_goban(); game.play(Move::Play(1, 1)); // black game.display_goban(); game.play(Move::Play(1, 0)); // white game.display_goban(); game.play(Move::Play(2, 0)); // black game.display_goban(); //game.play(Move::Play(0, 1)); // white suicide whith //println!("{}", game); // suicide assert!(game.check_suicide(Stone { coordinates: (0, 1), color: Color::White, })); assert!(!game.legals().any(|m| m == (0, 1))); assert!(game.try_play(Move::Play(0, 1)).is_err()); } #[test] fn sgf_test() { let game = Game::from_sgf(include_str!("ShusakuvsInseki.sgf")).unwrap(); println!("score : {:?}", game.calculate_score()); assert_eq!( EndGame::WinnerByScore(Player::Black, 2.0), game.outcome().unwrap() ); assert_eq!(game.prisoners(), (29, 31)); } #[test] fn sgf_test_2_2ha() { let game = Game::from_sgf(include_str!("sgf_2_2ha.sgf")).unwrap(); println!("score : {:?}", game.calculate_score()); assert_eq!(game.prisoners(), (25, 26)); assert_eq!( EndGame::WinnerByScore(Player::Black, 1.0), game.outcome().unwrap() ); } #[test] fn sgf_test_1() { let game = Game::from_sgf(include_str!("sgf_1.sgf")).unwrap(); println!("score : {:?}", game.calculate_score()); println!("prisoners : {:?}", game.prisoners()); assert_eq!(game.prisoners(), (9, 2)); assert_eq!( EndGame::WinnerByResign(Player::White), game.outcome().unwrap() ) } }
use std::process; pub fn get(url: &str) -> Option<Vec<u8>> { let url = url.replace("[", "\\["); let url = url.replace("]", "\\]"); // FIXME: Will panic if curl is not found let data = process::Command::new("curl") .arg("-L") .arg(url) .output() .unwrap(); if data.status.success() { Some(data.stdout) } else { println!("CURL ERROR:\n{}", String::from_utf8(data.stderr).unwrap()); None } }
// Copyright (C) 2015-2021 Swift Navigation Inc. // Contact: https://support.swiftnav.com // // This source is subject to the license found in the file 'LICENSE' which must // be be distributed together with this source. All other rights reserved. // // THIS CODE AND INFORMATION IS PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND, // EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE IMPLIED // WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A PARTICULAR PURPOSE. //**************************************************************************** // Automatically generated from yaml/swiftnav/sbp/ssr.yaml // with generate.py. Please do not hand edit! //****************************************************************************/ //! Precise State Space Representation (SSR) corrections format #[allow(unused_imports)] use std::convert::TryFrom; #[allow(unused_imports)] use byteorder::{LittleEndian, ReadBytesExt}; use super::gnss::*; #[allow(unused_imports)] use crate::serialize::SbpSerialize; #[allow(unused_imports)] use crate::SbpString; /// SSR code biases corrections for a particular satellite /// /// Code biases are to be added to pseudorange. The corrections conform with /// RTCMv3 MT 1059 / 1065. /// #[cfg_attr(feature = "sbp_serde", derive(serde::Serialize))] #[derive(Debug, Clone)] #[allow(non_snake_case)] pub struct CodeBiasesContent { /// Signal encoded following RTCM specifications (DF380, DF381, DF382 and /// DF467). pub code: u8, /// Code bias value pub value: i16, } impl CodeBiasesContent { #[rustfmt::skip] pub fn parse(_buf: &mut &[u8]) -> Result<CodeBiasesContent, crate::Error> { Ok( CodeBiasesContent{ code: _buf.read_u8()?, value: _buf.read_i16::<LittleEndian>()?, } ) } pub fn parse_array(buf: &mut &[u8]) -> Result<Vec<CodeBiasesContent>, crate::Error> { let mut v = Vec::new(); while buf.len() > 0 { v.push(CodeBiasesContent::parse(buf)?); } Ok(v) } pub fn parse_array_limit( buf: &mut &[u8], n: usize, ) -> Result<Vec<CodeBiasesContent>, crate::Error> { let mut v = Vec::new(); for _ in 0..n { v.push(CodeBiasesContent::parse(buf)?); } Ok(v) } } impl crate::serialize::SbpSerialize for CodeBiasesContent { #[allow(unused_variables)] fn append_to_sbp_buffer(&self, buf: &mut Vec<u8>) { self.code.append_to_sbp_buffer(buf); self.value.append_to_sbp_buffer(buf); } fn sbp_size(&self) -> usize { let mut size = 0; size += self.code.sbp_size(); size += self.value.sbp_size(); size } } /// Defines the grid for MSG_SSR_GRIDDED_CORRECTION messages /// /// Defines the grid for MSG_SSR_GRIDDED_CORRECTION messages. Also includes an /// RLE encoded validity list. /// #[cfg_attr(feature = "sbp_serde", derive(serde::Serialize))] #[derive(Debug, Clone)] #[allow(non_snake_case)] pub struct GridDefinitionHeaderDepA { /// region_size (deg) = 10 / region_size_inverse 0 is an invalid value. pub region_size_inverse: u8, /// grid height (deg) = grid width (deg) = area_width / region_size 0 is an /// invalid value. pub area_width: u16, /// North-West corner latitude (deg) = region_size * lat_nw_corner_enc - 90 pub lat_nw_corner_enc: u16, /// North-West corner longitude (deg) = region_size * lon_nw_corner_enc - /// 180 pub lon_nw_corner_enc: u16, /// Number of messages in the dataset pub num_msgs: u8, /// Position of this message in the dataset pub seq_num: u8, } impl GridDefinitionHeaderDepA { #[rustfmt::skip] pub fn parse(_buf: &mut &[u8]) -> Result<GridDefinitionHeaderDepA, crate::Error> { Ok( GridDefinitionHeaderDepA{ region_size_inverse: _buf.read_u8()?, area_width: _buf.read_u16::<LittleEndian>()?, lat_nw_corner_enc: _buf.read_u16::<LittleEndian>()?, lon_nw_corner_enc: _buf.read_u16::<LittleEndian>()?, num_msgs: _buf.read_u8()?, seq_num: _buf.read_u8()?, } ) } pub fn parse_array(buf: &mut &[u8]) -> Result<Vec<GridDefinitionHeaderDepA>, crate::Error> { let mut v = Vec::new(); while buf.len() > 0 { v.push(GridDefinitionHeaderDepA::parse(buf)?); } Ok(v) } pub fn parse_array_limit( buf: &mut &[u8], n: usize, ) -> Result<Vec<GridDefinitionHeaderDepA>, crate::Error> { let mut v = Vec::new(); for _ in 0..n { v.push(GridDefinitionHeaderDepA::parse(buf)?); } Ok(v) } } impl crate::serialize::SbpSerialize for GridDefinitionHeaderDepA { #[allow(unused_variables)] fn append_to_sbp_buffer(&self, buf: &mut Vec<u8>) { self.region_size_inverse.append_to_sbp_buffer(buf); self.area_width.append_to_sbp_buffer(buf); self.lat_nw_corner_enc.append_to_sbp_buffer(buf); self.lon_nw_corner_enc.append_to_sbp_buffer(buf); self.num_msgs.append_to_sbp_buffer(buf); self.seq_num.append_to_sbp_buffer(buf); } fn sbp_size(&self) -> usize { let mut size = 0; size += self.region_size_inverse.sbp_size(); size += self.area_width.sbp_size(); size += self.lat_nw_corner_enc.sbp_size(); size += self.lon_nw_corner_enc.sbp_size(); size += self.num_msgs.sbp_size(); size += self.seq_num.sbp_size(); size } } /// Header for the MSG_SSR_GRIDDED_CORRECTION message /// /// The LPP message contains nested variable length arrays which are not /// supported in SBP, so each grid point will be identified by the index. /// #[cfg_attr(feature = "sbp_serde", derive(serde::Serialize))] #[derive(Debug, Clone)] #[allow(non_snake_case)] pub struct GriddedCorrectionHeader { /// Unique identifier of the tile set this tile belongs to. pub tile_set_id: u16, /// Unique identifier of this tile in the tile set. pub tile_id: u16, /// GNSS reference time of the correction pub time: GPSTimeSec, /// Number of messages in the dataset pub num_msgs: u16, /// Position of this message in the dataset pub seq_num: u16, /// Update interval between consecutive corrections. Encoded following RTCM /// DF391 specification. pub update_interval: u8, /// IOD of the SSR atmospheric correction pub iod_atmo: u8, /// Quality of the troposphere data. Encoded following RTCM DF389 /// specification in units of m. pub tropo_quality_indicator: u8, } impl GriddedCorrectionHeader { #[rustfmt::skip] pub fn parse(_buf: &mut &[u8]) -> Result<GriddedCorrectionHeader, crate::Error> { Ok( GriddedCorrectionHeader{ tile_set_id: _buf.read_u16::<LittleEndian>()?, tile_id: _buf.read_u16::<LittleEndian>()?, time: GPSTimeSec::parse(_buf)?, num_msgs: _buf.read_u16::<LittleEndian>()?, seq_num: _buf.read_u16::<LittleEndian>()?, update_interval: _buf.read_u8()?, iod_atmo: _buf.read_u8()?, tropo_quality_indicator: _buf.read_u8()?, } ) } pub fn parse_array(buf: &mut &[u8]) -> Result<Vec<GriddedCorrectionHeader>, crate::Error> { let mut v = Vec::new(); while buf.len() > 0 { v.push(GriddedCorrectionHeader::parse(buf)?); } Ok(v) } pub fn parse_array_limit( buf: &mut &[u8], n: usize, ) -> Result<Vec<GriddedCorrectionHeader>, crate::Error> { let mut v = Vec::new(); for _ in 0..n { v.push(GriddedCorrectionHeader::parse(buf)?); } Ok(v) } } impl crate::serialize::SbpSerialize for GriddedCorrectionHeader { #[allow(unused_variables)] fn append_to_sbp_buffer(&self, buf: &mut Vec<u8>) { self.tile_set_id.append_to_sbp_buffer(buf); self.tile_id.append_to_sbp_buffer(buf); self.time.append_to_sbp_buffer(buf); self.num_msgs.append_to_sbp_buffer(buf); self.seq_num.append_to_sbp_buffer(buf); self.update_interval.append_to_sbp_buffer(buf); self.iod_atmo.append_to_sbp_buffer(buf); self.tropo_quality_indicator.append_to_sbp_buffer(buf); } fn sbp_size(&self) -> usize { let mut size = 0; size += self.tile_set_id.sbp_size(); size += self.tile_id.sbp_size(); size += self.time.sbp_size(); size += self.num_msgs.sbp_size(); size += self.seq_num.sbp_size(); size += self.update_interval.sbp_size(); size += self.iod_atmo.sbp_size(); size += self.tropo_quality_indicator.sbp_size(); size } } /// Header for MSG_SSR_GRIDDED_CORRECTION_DEP /// /// The 3GPP message contains nested variable length arrays which are not /// supported in SBP, so each grid point will be identified by the index. /// #[cfg_attr(feature = "sbp_serde", derive(serde::Serialize))] #[derive(Debug, Clone)] #[allow(non_snake_case)] pub struct GriddedCorrectionHeaderDepA { /// GNSS reference time of the correction pub time: GPSTimeSec, /// Number of messages in the dataset pub num_msgs: u16, /// Position of this message in the dataset pub seq_num: u16, /// Update interval between consecutive corrections. Encoded following RTCM /// DF391 specification. pub update_interval: u8, /// IOD of the SSR atmospheric correction pub iod_atmo: u8, /// Quality of the troposphere data. Encoded following RTCM DF389 /// specification in units of m. pub tropo_quality_indicator: u8, } impl GriddedCorrectionHeaderDepA { #[rustfmt::skip] pub fn parse(_buf: &mut &[u8]) -> Result<GriddedCorrectionHeaderDepA, crate::Error> { Ok( GriddedCorrectionHeaderDepA{ time: GPSTimeSec::parse(_buf)?, num_msgs: _buf.read_u16::<LittleEndian>()?, seq_num: _buf.read_u16::<LittleEndian>()?, update_interval: _buf.read_u8()?, iod_atmo: _buf.read_u8()?, tropo_quality_indicator: _buf.read_u8()?, } ) } pub fn parse_array(buf: &mut &[u8]) -> Result<Vec<GriddedCorrectionHeaderDepA>, crate::Error> { let mut v = Vec::new(); while buf.len() > 0 { v.push(GriddedCorrectionHeaderDepA::parse(buf)?); } Ok(v) } pub fn parse_array_limit( buf: &mut &[u8], n: usize, ) -> Result<Vec<GriddedCorrectionHeaderDepA>, crate::Error> { let mut v = Vec::new(); for _ in 0..n { v.push(GriddedCorrectionHeaderDepA::parse(buf)?); } Ok(v) } } impl crate::serialize::SbpSerialize for GriddedCorrectionHeaderDepA { #[allow(unused_variables)] fn append_to_sbp_buffer(&self, buf: &mut Vec<u8>) { self.time.append_to_sbp_buffer(buf); self.num_msgs.append_to_sbp_buffer(buf); self.seq_num.append_to_sbp_buffer(buf); self.update_interval.append_to_sbp_buffer(buf); self.iod_atmo.append_to_sbp_buffer(buf); self.tropo_quality_indicator.append_to_sbp_buffer(buf); } fn sbp_size(&self) -> usize { let mut size = 0; size += self.time.sbp_size(); size += self.num_msgs.sbp_size(); size += self.seq_num.sbp_size(); size += self.update_interval.sbp_size(); size += self.iod_atmo.sbp_size(); size += self.tropo_quality_indicator.sbp_size(); size } } /// Precise code biases correction /// /// The precise code biases message is to be added to the pseudorange of the /// corresponding signal to get corrected pseudorange. It is an equivalent to /// the 1059 / 1065 RTCM message types. /// #[cfg_attr(feature = "sbp_serde", derive(serde::Serialize))] #[derive(Debug, Clone)] #[allow(non_snake_case)] pub struct MsgSsrCodeBiases { #[cfg_attr(feature = "sbp_serde", serde(skip_serializing))] pub sender_id: Option<u16>, /// GNSS reference time of the correction pub time: GPSTimeSec, /// GNSS signal identifier (16 bit) pub sid: GnssSignal, /// Update interval between consecutive corrections. Encoded following RTCM /// DF391 specification. pub update_interval: u8, /// IOD of the SSR correction. A change of Issue Of Data SSR is used to /// indicate a change in the SSR generating configuration pub iod_ssr: u8, /// Code biases for the different satellite signals pub biases: Vec<CodeBiasesContent>, } impl MsgSsrCodeBiases { #[rustfmt::skip] pub fn parse(_buf: &mut &[u8]) -> Result<MsgSsrCodeBiases, crate::Error> { Ok( MsgSsrCodeBiases{ sender_id: None, time: GPSTimeSec::parse(_buf)?, sid: GnssSignal::parse(_buf)?, update_interval: _buf.read_u8()?, iod_ssr: _buf.read_u8()?, biases: CodeBiasesContent::parse_array(_buf)?, } ) } } impl super::SBPMessage for MsgSsrCodeBiases { fn get_message_name(&self) -> &'static str { "MSG_SSR_CODE_BIASES" } fn get_message_type(&self) -> u16 { 1505 } fn get_sender_id(&self) -> Option<u16> { self.sender_id } fn set_sender_id(&mut self, new_id: u16) { self.sender_id = Some(new_id); } fn to_frame(&self) -> std::result::Result<Vec<u8>, crate::FramerError> { let mut frame = Vec::new(); self.write_frame(&mut frame)?; Ok(frame) } fn write_frame(&self, frame: &mut Vec<u8>) -> std::result::Result<(), crate::FramerError> { crate::write_frame(self, frame) } } impl super::ConcreteMessage for MsgSsrCodeBiases { const MESSAGE_TYPE: u16 = 1505; const MESSAGE_NAME: &'static str = "MSG_SSR_CODE_BIASES"; } impl TryFrom<super::SBP> for MsgSsrCodeBiases { type Error = super::TryFromSBPError; fn try_from(msg: super::SBP) -> Result<Self, Self::Error> { match msg { super::SBP::MsgSsrCodeBiases(m) => Ok(m), _ => Err(super::TryFromSBPError), } } } impl crate::serialize::SbpSerialize for MsgSsrCodeBiases { #[allow(unused_variables)] fn append_to_sbp_buffer(&self, buf: &mut Vec<u8>) { self.time.append_to_sbp_buffer(buf); self.sid.append_to_sbp_buffer(buf); self.update_interval.append_to_sbp_buffer(buf); self.iod_ssr.append_to_sbp_buffer(buf); self.biases.append_to_sbp_buffer(buf); } fn sbp_size(&self) -> usize { let mut size = 0; size += self.time.sbp_size(); size += self.sid.sbp_size(); size += self.update_interval.sbp_size(); size += self.iod_ssr.sbp_size(); size += self.biases.sbp_size(); size } } /// Gridded troposphere and STEC correction residuals /// /// STEC residuals are per space vehicle, troposphere is not. /// /// It is typically equivalent to the QZSS CLAS Sub Type 9 messages. /// #[cfg_attr(feature = "sbp_serde", derive(serde::Serialize))] #[derive(Debug, Clone)] #[allow(non_snake_case)] pub struct MsgSsrGriddedCorrection { #[cfg_attr(feature = "sbp_serde", serde(skip_serializing))] pub sender_id: Option<u16>, /// Header of a gridded correction message pub header: GriddedCorrectionHeader, /// Index of the grid point. pub index: u16, /// Wet and hydrostatic vertical delays (mean, stddev). pub tropo_delay_correction: TroposphericDelayCorrection, /// STEC residuals for each satellite (mean, stddev). pub stec_residuals: Vec<STECResidual>, } impl MsgSsrGriddedCorrection { #[rustfmt::skip] pub fn parse(_buf: &mut &[u8]) -> Result<MsgSsrGriddedCorrection, crate::Error> { Ok( MsgSsrGriddedCorrection{ sender_id: None, header: GriddedCorrectionHeader::parse(_buf)?, index: _buf.read_u16::<LittleEndian>()?, tropo_delay_correction: TroposphericDelayCorrection::parse(_buf)?, stec_residuals: STECResidual::parse_array(_buf)?, } ) } } impl super::SBPMessage for MsgSsrGriddedCorrection { fn get_message_name(&self) -> &'static str { "MSG_SSR_GRIDDED_CORRECTION" } fn get_message_type(&self) -> u16 { 1532 } fn get_sender_id(&self) -> Option<u16> { self.sender_id } fn set_sender_id(&mut self, new_id: u16) { self.sender_id = Some(new_id); } fn to_frame(&self) -> std::result::Result<Vec<u8>, crate::FramerError> { let mut frame = Vec::new(); self.write_frame(&mut frame)?; Ok(frame) } fn write_frame(&self, frame: &mut Vec<u8>) -> std::result::Result<(), crate::FramerError> { crate::write_frame(self, frame) } } impl super::ConcreteMessage for MsgSsrGriddedCorrection { const MESSAGE_TYPE: u16 = 1532; const MESSAGE_NAME: &'static str = "MSG_SSR_GRIDDED_CORRECTION"; } impl TryFrom<super::SBP> for MsgSsrGriddedCorrection { type Error = super::TryFromSBPError; fn try_from(msg: super::SBP) -> Result<Self, Self::Error> { match msg { super::SBP::MsgSsrGriddedCorrection(m) => Ok(m), _ => Err(super::TryFromSBPError), } } } impl crate::serialize::SbpSerialize for MsgSsrGriddedCorrection { #[allow(unused_variables)] fn append_to_sbp_buffer(&self, buf: &mut Vec<u8>) { self.header.append_to_sbp_buffer(buf); self.index.append_to_sbp_buffer(buf); self.tropo_delay_correction.append_to_sbp_buffer(buf); self.stec_residuals.append_to_sbp_buffer(buf); } fn sbp_size(&self) -> usize { let mut size = 0; size += self.header.sbp_size(); size += self.index.sbp_size(); size += self.tropo_delay_correction.sbp_size(); size += self.stec_residuals.sbp_size(); size } } #[cfg_attr(feature = "sbp_serde", derive(serde::Serialize))] #[derive(Debug, Clone)] #[allow(non_snake_case)] pub struct MsgSsrGriddedCorrectionDepA { #[cfg_attr(feature = "sbp_serde", serde(skip_serializing))] pub sender_id: Option<u16>, /// Header of a Gridded Correction message pub header: GriddedCorrectionHeaderDepA, /// Index of the grid point pub index: u16, /// Wet and hydrostatic vertical delays (mean, stddev) pub tropo_delay_correction: TroposphericDelayCorrection, /// STEC residuals for each satellite (mean, stddev) pub stec_residuals: Vec<STECResidual>, } impl MsgSsrGriddedCorrectionDepA { #[rustfmt::skip] pub fn parse(_buf: &mut &[u8]) -> Result<MsgSsrGriddedCorrectionDepA, crate::Error> { Ok( MsgSsrGriddedCorrectionDepA{ sender_id: None, header: GriddedCorrectionHeaderDepA::parse(_buf)?, index: _buf.read_u16::<LittleEndian>()?, tropo_delay_correction: TroposphericDelayCorrection::parse(_buf)?, stec_residuals: STECResidual::parse_array(_buf)?, } ) } } impl super::SBPMessage for MsgSsrGriddedCorrectionDepA { fn get_message_name(&self) -> &'static str { "MSG_SSR_GRIDDED_CORRECTION_DEP_A" } fn get_message_type(&self) -> u16 { 1530 } fn get_sender_id(&self) -> Option<u16> { self.sender_id } fn set_sender_id(&mut self, new_id: u16) { self.sender_id = Some(new_id); } fn to_frame(&self) -> std::result::Result<Vec<u8>, crate::FramerError> { let mut frame = Vec::new(); self.write_frame(&mut frame)?; Ok(frame) } fn write_frame(&self, frame: &mut Vec<u8>) -> std::result::Result<(), crate::FramerError> { crate::write_frame(self, frame) } } impl super::ConcreteMessage for MsgSsrGriddedCorrectionDepA { const MESSAGE_TYPE: u16 = 1530; const MESSAGE_NAME: &'static str = "MSG_SSR_GRIDDED_CORRECTION_DEP_A"; } impl TryFrom<super::SBP> for MsgSsrGriddedCorrectionDepA { type Error = super::TryFromSBPError; fn try_from(msg: super::SBP) -> Result<Self, Self::Error> { match msg { super::SBP::MsgSsrGriddedCorrectionDepA(m) => Ok(m), _ => Err(super::TryFromSBPError), } } } impl crate::serialize::SbpSerialize for MsgSsrGriddedCorrectionDepA { #[allow(unused_variables)] fn append_to_sbp_buffer(&self, buf: &mut Vec<u8>) { self.header.append_to_sbp_buffer(buf); self.index.append_to_sbp_buffer(buf); self.tropo_delay_correction.append_to_sbp_buffer(buf); self.stec_residuals.append_to_sbp_buffer(buf); } fn sbp_size(&self) -> usize { let mut size = 0; size += self.header.sbp_size(); size += self.index.sbp_size(); size += self.tropo_delay_correction.sbp_size(); size += self.stec_residuals.sbp_size(); size } } #[cfg_attr(feature = "sbp_serde", derive(serde::Serialize))] #[derive(Debug, Clone)] #[allow(non_snake_case)] pub struct MsgSsrGriddedCorrectionNoStdDepA { #[cfg_attr(feature = "sbp_serde", serde(skip_serializing))] pub sender_id: Option<u16>, /// Header of a Gridded Correction message pub header: GriddedCorrectionHeaderDepA, /// Index of the grid point pub index: u16, /// Wet and hydrostatic vertical delays pub tropo_delay_correction: TroposphericDelayCorrectionNoStd, /// STEC residuals for each satellite pub stec_residuals: Vec<STECResidualNoStd>, } impl MsgSsrGriddedCorrectionNoStdDepA { #[rustfmt::skip] pub fn parse(_buf: &mut &[u8]) -> Result<MsgSsrGriddedCorrectionNoStdDepA, crate::Error> { Ok( MsgSsrGriddedCorrectionNoStdDepA{ sender_id: None, header: GriddedCorrectionHeaderDepA::parse(_buf)?, index: _buf.read_u16::<LittleEndian>()?, tropo_delay_correction: TroposphericDelayCorrectionNoStd::parse(_buf)?, stec_residuals: STECResidualNoStd::parse_array(_buf)?, } ) } } impl super::SBPMessage for MsgSsrGriddedCorrectionNoStdDepA { fn get_message_name(&self) -> &'static str { "MSG_SSR_GRIDDED_CORRECTION_NO_STD_DEP_A" } fn get_message_type(&self) -> u16 { 1520 } fn get_sender_id(&self) -> Option<u16> { self.sender_id } fn set_sender_id(&mut self, new_id: u16) { self.sender_id = Some(new_id); } fn to_frame(&self) -> std::result::Result<Vec<u8>, crate::FramerError> { let mut frame = Vec::new(); self.write_frame(&mut frame)?; Ok(frame) } fn write_frame(&self, frame: &mut Vec<u8>) -> std::result::Result<(), crate::FramerError> { crate::write_frame(self, frame) } } impl super::ConcreteMessage for MsgSsrGriddedCorrectionNoStdDepA { const MESSAGE_TYPE: u16 = 1520; const MESSAGE_NAME: &'static str = "MSG_SSR_GRIDDED_CORRECTION_NO_STD_DEP_A"; } impl TryFrom<super::SBP> for MsgSsrGriddedCorrectionNoStdDepA { type Error = super::TryFromSBPError; fn try_from(msg: super::SBP) -> Result<Self, Self::Error> { match msg { super::SBP::MsgSsrGriddedCorrectionNoStdDepA(m) => Ok(m), _ => Err(super::TryFromSBPError), } } } impl crate::serialize::SbpSerialize for MsgSsrGriddedCorrectionNoStdDepA { #[allow(unused_variables)] fn append_to_sbp_buffer(&self, buf: &mut Vec<u8>) { self.header.append_to_sbp_buffer(buf); self.index.append_to_sbp_buffer(buf); self.tropo_delay_correction.append_to_sbp_buffer(buf); self.stec_residuals.append_to_sbp_buffer(buf); } fn sbp_size(&self) -> usize { let mut size = 0; size += self.header.sbp_size(); size += self.index.sbp_size(); size += self.tropo_delay_correction.sbp_size(); size += self.stec_residuals.sbp_size(); size } } #[cfg_attr(feature = "sbp_serde", derive(serde::Serialize))] #[derive(Debug, Clone)] #[allow(non_snake_case)] pub struct MsgSsrGridDefinitionDepA { #[cfg_attr(feature = "sbp_serde", serde(skip_serializing))] pub sender_id: Option<u16>, /// Header of a Gridded Correction message pub header: GridDefinitionHeaderDepA, /// Run Length Encode list of quadrants that contain valid data. The spec /// describes the encoding scheme in detail, but essentially the index of /// the quadrants that contain transitions between valid and invalid (and /// vice versa) are encoded as u8 integers. pub rle_list: Vec<u8>, } impl MsgSsrGridDefinitionDepA { #[rustfmt::skip] pub fn parse(_buf: &mut &[u8]) -> Result<MsgSsrGridDefinitionDepA, crate::Error> { Ok( MsgSsrGridDefinitionDepA{ sender_id: None, header: GridDefinitionHeaderDepA::parse(_buf)?, rle_list: crate::parser::read_u8_array(_buf)?, } ) } } impl super::SBPMessage for MsgSsrGridDefinitionDepA { fn get_message_name(&self) -> &'static str { "MSG_SSR_GRID_DEFINITION_DEP_A" } fn get_message_type(&self) -> u16 { 1525 } fn get_sender_id(&self) -> Option<u16> { self.sender_id } fn set_sender_id(&mut self, new_id: u16) { self.sender_id = Some(new_id); } fn to_frame(&self) -> std::result::Result<Vec<u8>, crate::FramerError> { let mut frame = Vec::new(); self.write_frame(&mut frame)?; Ok(frame) } fn write_frame(&self, frame: &mut Vec<u8>) -> std::result::Result<(), crate::FramerError> { crate::write_frame(self, frame) } } impl super::ConcreteMessage for MsgSsrGridDefinitionDepA { const MESSAGE_TYPE: u16 = 1525; const MESSAGE_NAME: &'static str = "MSG_SSR_GRID_DEFINITION_DEP_A"; } impl TryFrom<super::SBP> for MsgSsrGridDefinitionDepA { type Error = super::TryFromSBPError; fn try_from(msg: super::SBP) -> Result<Self, Self::Error> { match msg { super::SBP::MsgSsrGridDefinitionDepA(m) => Ok(m), _ => Err(super::TryFromSBPError), } } } impl crate::serialize::SbpSerialize for MsgSsrGridDefinitionDepA { #[allow(unused_variables)] fn append_to_sbp_buffer(&self, buf: &mut Vec<u8>) { self.header.append_to_sbp_buffer(buf); self.rle_list.append_to_sbp_buffer(buf); } fn sbp_size(&self) -> usize { let mut size = 0; size += self.header.sbp_size(); size += self.rle_list.sbp_size(); size } } /// Precise orbit and clock correction /// /// The precise orbit and clock correction message is to be applied as a delta /// correction to broadcast ephemeris and is an equivalent to the 1060 /1066 /// RTCM message types. /// #[cfg_attr(feature = "sbp_serde", derive(serde::Serialize))] #[derive(Debug, Clone)] #[allow(non_snake_case)] pub struct MsgSsrOrbitClock { #[cfg_attr(feature = "sbp_serde", serde(skip_serializing))] pub sender_id: Option<u16>, /// GNSS reference time of the correction pub time: GPSTimeSec, /// GNSS signal identifier (16 bit) pub sid: GnssSignal, /// Update interval between consecutive corrections. Encoded following RTCM /// DF391 specification. pub update_interval: u8, /// IOD of the SSR correction. A change of Issue Of Data SSR is used to /// indicate a change in the SSR generating configuration pub iod_ssr: u8, /// Issue of broadcast ephemeris data or IODCRC (Beidou) pub iod: u32, /// Orbit radial delta correction pub radial: i32, /// Orbit along delta correction pub along: i32, /// Orbit along delta correction pub cross: i32, /// Velocity of orbit radial delta correction pub dot_radial: i32, /// Velocity of orbit along delta correction pub dot_along: i32, /// Velocity of orbit cross delta correction pub dot_cross: i32, /// C0 polynomial coefficient for correction of broadcast satellite clock pub c0: i32, /// C1 polynomial coefficient for correction of broadcast satellite clock pub c1: i32, /// C2 polynomial coefficient for correction of broadcast satellite clock pub c2: i32, } impl MsgSsrOrbitClock { #[rustfmt::skip] pub fn parse(_buf: &mut &[u8]) -> Result<MsgSsrOrbitClock, crate::Error> { Ok( MsgSsrOrbitClock{ sender_id: None, time: GPSTimeSec::parse(_buf)?, sid: GnssSignal::parse(_buf)?, update_interval: _buf.read_u8()?, iod_ssr: _buf.read_u8()?, iod: _buf.read_u32::<LittleEndian>()?, radial: _buf.read_i32::<LittleEndian>()?, along: _buf.read_i32::<LittleEndian>()?, cross: _buf.read_i32::<LittleEndian>()?, dot_radial: _buf.read_i32::<LittleEndian>()?, dot_along: _buf.read_i32::<LittleEndian>()?, dot_cross: _buf.read_i32::<LittleEndian>()?, c0: _buf.read_i32::<LittleEndian>()?, c1: _buf.read_i32::<LittleEndian>()?, c2: _buf.read_i32::<LittleEndian>()?, } ) } } impl super::SBPMessage for MsgSsrOrbitClock { fn get_message_name(&self) -> &'static str { "MSG_SSR_ORBIT_CLOCK" } fn get_message_type(&self) -> u16 { 1501 } fn get_sender_id(&self) -> Option<u16> { self.sender_id } fn set_sender_id(&mut self, new_id: u16) { self.sender_id = Some(new_id); } fn to_frame(&self) -> std::result::Result<Vec<u8>, crate::FramerError> { let mut frame = Vec::new(); self.write_frame(&mut frame)?; Ok(frame) } fn write_frame(&self, frame: &mut Vec<u8>) -> std::result::Result<(), crate::FramerError> { crate::write_frame(self, frame) } } impl super::ConcreteMessage for MsgSsrOrbitClock { const MESSAGE_TYPE: u16 = 1501; const MESSAGE_NAME: &'static str = "MSG_SSR_ORBIT_CLOCK"; } impl TryFrom<super::SBP> for MsgSsrOrbitClock { type Error = super::TryFromSBPError; fn try_from(msg: super::SBP) -> Result<Self, Self::Error> { match msg { super::SBP::MsgSsrOrbitClock(m) => Ok(m), _ => Err(super::TryFromSBPError), } } } impl crate::serialize::SbpSerialize for MsgSsrOrbitClock { #[allow(unused_variables)] fn append_to_sbp_buffer(&self, buf: &mut Vec<u8>) { self.time.append_to_sbp_buffer(buf); self.sid.append_to_sbp_buffer(buf); self.update_interval.append_to_sbp_buffer(buf); self.iod_ssr.append_to_sbp_buffer(buf); self.iod.append_to_sbp_buffer(buf); self.radial.append_to_sbp_buffer(buf); self.along.append_to_sbp_buffer(buf); self.cross.append_to_sbp_buffer(buf); self.dot_radial.append_to_sbp_buffer(buf); self.dot_along.append_to_sbp_buffer(buf); self.dot_cross.append_to_sbp_buffer(buf); self.c0.append_to_sbp_buffer(buf); self.c1.append_to_sbp_buffer(buf); self.c2.append_to_sbp_buffer(buf); } fn sbp_size(&self) -> usize { let mut size = 0; size += self.time.sbp_size(); size += self.sid.sbp_size(); size += self.update_interval.sbp_size(); size += self.iod_ssr.sbp_size(); size += self.iod.sbp_size(); size += self.radial.sbp_size(); size += self.along.sbp_size(); size += self.cross.sbp_size(); size += self.dot_radial.sbp_size(); size += self.dot_along.sbp_size(); size += self.dot_cross.sbp_size(); size += self.c0.sbp_size(); size += self.c1.sbp_size(); size += self.c2.sbp_size(); size } } #[cfg_attr(feature = "sbp_serde", derive(serde::Serialize))] #[derive(Debug, Clone)] #[allow(non_snake_case)] pub struct MsgSsrOrbitClockDepA { #[cfg_attr(feature = "sbp_serde", serde(skip_serializing))] pub sender_id: Option<u16>, /// GNSS reference time of the correction pub time: GPSTimeSec, /// GNSS signal identifier (16 bit) pub sid: GnssSignal, /// Update interval between consecutive corrections. Encoded following RTCM /// DF391 specification. pub update_interval: u8, /// IOD of the SSR correction. A change of Issue Of Data SSR is used to /// indicate a change in the SSR generating configuration pub iod_ssr: u8, /// Issue of broadcast ephemeris data pub iod: u8, /// Orbit radial delta correction pub radial: i32, /// Orbit along delta correction pub along: i32, /// Orbit along delta correction pub cross: i32, /// Velocity of orbit radial delta correction pub dot_radial: i32, /// Velocity of orbit along delta correction pub dot_along: i32, /// Velocity of orbit cross delta correction pub dot_cross: i32, /// C0 polynomial coefficient for correction of broadcast satellite clock pub c0: i32, /// C1 polynomial coefficient for correction of broadcast satellite clock pub c1: i32, /// C2 polynomial coefficient for correction of broadcast satellite clock pub c2: i32, } impl MsgSsrOrbitClockDepA { #[rustfmt::skip] pub fn parse(_buf: &mut &[u8]) -> Result<MsgSsrOrbitClockDepA, crate::Error> { Ok( MsgSsrOrbitClockDepA{ sender_id: None, time: GPSTimeSec::parse(_buf)?, sid: GnssSignal::parse(_buf)?, update_interval: _buf.read_u8()?, iod_ssr: _buf.read_u8()?, iod: _buf.read_u8()?, radial: _buf.read_i32::<LittleEndian>()?, along: _buf.read_i32::<LittleEndian>()?, cross: _buf.read_i32::<LittleEndian>()?, dot_radial: _buf.read_i32::<LittleEndian>()?, dot_along: _buf.read_i32::<LittleEndian>()?, dot_cross: _buf.read_i32::<LittleEndian>()?, c0: _buf.read_i32::<LittleEndian>()?, c1: _buf.read_i32::<LittleEndian>()?, c2: _buf.read_i32::<LittleEndian>()?, } ) } } impl super::SBPMessage for MsgSsrOrbitClockDepA { fn get_message_name(&self) -> &'static str { "MSG_SSR_ORBIT_CLOCK_DEP_A" } fn get_message_type(&self) -> u16 { 1500 } fn get_sender_id(&self) -> Option<u16> { self.sender_id } fn set_sender_id(&mut self, new_id: u16) { self.sender_id = Some(new_id); } fn to_frame(&self) -> std::result::Result<Vec<u8>, crate::FramerError> { let mut frame = Vec::new(); self.write_frame(&mut frame)?; Ok(frame) } fn write_frame(&self, frame: &mut Vec<u8>) -> std::result::Result<(), crate::FramerError> { crate::write_frame(self, frame) } } impl super::ConcreteMessage for MsgSsrOrbitClockDepA { const MESSAGE_TYPE: u16 = 1500; const MESSAGE_NAME: &'static str = "MSG_SSR_ORBIT_CLOCK_DEP_A"; } impl TryFrom<super::SBP> for MsgSsrOrbitClockDepA { type Error = super::TryFromSBPError; fn try_from(msg: super::SBP) -> Result<Self, Self::Error> { match msg { super::SBP::MsgSsrOrbitClockDepA(m) => Ok(m), _ => Err(super::TryFromSBPError), } } } impl crate::serialize::SbpSerialize for MsgSsrOrbitClockDepA { #[allow(unused_variables)] fn append_to_sbp_buffer(&self, buf: &mut Vec<u8>) { self.time.append_to_sbp_buffer(buf); self.sid.append_to_sbp_buffer(buf); self.update_interval.append_to_sbp_buffer(buf); self.iod_ssr.append_to_sbp_buffer(buf); self.iod.append_to_sbp_buffer(buf); self.radial.append_to_sbp_buffer(buf); self.along.append_to_sbp_buffer(buf); self.cross.append_to_sbp_buffer(buf); self.dot_radial.append_to_sbp_buffer(buf); self.dot_along.append_to_sbp_buffer(buf); self.dot_cross.append_to_sbp_buffer(buf); self.c0.append_to_sbp_buffer(buf); self.c1.append_to_sbp_buffer(buf); self.c2.append_to_sbp_buffer(buf); } fn sbp_size(&self) -> usize { let mut size = 0; size += self.time.sbp_size(); size += self.sid.sbp_size(); size += self.update_interval.sbp_size(); size += self.iod_ssr.sbp_size(); size += self.iod.sbp_size(); size += self.radial.sbp_size(); size += self.along.sbp_size(); size += self.cross.sbp_size(); size += self.dot_radial.sbp_size(); size += self.dot_along.sbp_size(); size += self.dot_cross.sbp_size(); size += self.c0.sbp_size(); size += self.c1.sbp_size(); size += self.c2.sbp_size(); size } } /// Precise phase biases correction /// /// The precise phase biases message contains the biases to be added to the /// carrier phase of the corresponding signal to get corrected carrier phase /// measurement, as well as the satellite yaw angle to be applied to compute /// the phase wind-up correction. It is typically an equivalent to the 1265 /// RTCM message types. /// #[cfg_attr(feature = "sbp_serde", derive(serde::Serialize))] #[derive(Debug, Clone)] #[allow(non_snake_case)] pub struct MsgSsrPhaseBiases { #[cfg_attr(feature = "sbp_serde", serde(skip_serializing))] pub sender_id: Option<u16>, /// GNSS reference time of the correction pub time: GPSTimeSec, /// GNSS signal identifier (16 bit) pub sid: GnssSignal, /// Update interval between consecutive corrections. Encoded following RTCM /// DF391 specification. pub update_interval: u8, /// IOD of the SSR correction. A change of Issue Of Data SSR is used to /// indicate a change in the SSR generating configuration pub iod_ssr: u8, /// Indicator for the dispersive phase biases property. pub dispersive_bias: u8, /// Consistency indicator for Melbourne-Wubbena linear combinations pub mw_consistency: u8, /// Satellite yaw angle pub yaw: u16, /// Satellite yaw angle rate pub yaw_rate: i8, /// Phase biases corrections for a satellite being tracked. pub biases: Vec<PhaseBiasesContent>, } impl MsgSsrPhaseBiases { #[rustfmt::skip] pub fn parse(_buf: &mut &[u8]) -> Result<MsgSsrPhaseBiases, crate::Error> { Ok( MsgSsrPhaseBiases{ sender_id: None, time: GPSTimeSec::parse(_buf)?, sid: GnssSignal::parse(_buf)?, update_interval: _buf.read_u8()?, iod_ssr: _buf.read_u8()?, dispersive_bias: _buf.read_u8()?, mw_consistency: _buf.read_u8()?, yaw: _buf.read_u16::<LittleEndian>()?, yaw_rate: _buf.read_i8()?, biases: PhaseBiasesContent::parse_array(_buf)?, } ) } } impl super::SBPMessage for MsgSsrPhaseBiases { fn get_message_name(&self) -> &'static str { "MSG_SSR_PHASE_BIASES" } fn get_message_type(&self) -> u16 { 1510 } fn get_sender_id(&self) -> Option<u16> { self.sender_id } fn set_sender_id(&mut self, new_id: u16) { self.sender_id = Some(new_id); } fn to_frame(&self) -> std::result::Result<Vec<u8>, crate::FramerError> { let mut frame = Vec::new(); self.write_frame(&mut frame)?; Ok(frame) } fn write_frame(&self, frame: &mut Vec<u8>) -> std::result::Result<(), crate::FramerError> { crate::write_frame(self, frame) } } impl super::ConcreteMessage for MsgSsrPhaseBiases { const MESSAGE_TYPE: u16 = 1510; const MESSAGE_NAME: &'static str = "MSG_SSR_PHASE_BIASES"; } impl TryFrom<super::SBP> for MsgSsrPhaseBiases { type Error = super::TryFromSBPError; fn try_from(msg: super::SBP) -> Result<Self, Self::Error> { match msg { super::SBP::MsgSsrPhaseBiases(m) => Ok(m), _ => Err(super::TryFromSBPError), } } } impl crate::serialize::SbpSerialize for MsgSsrPhaseBiases { #[allow(unused_variables)] fn append_to_sbp_buffer(&self, buf: &mut Vec<u8>) { self.time.append_to_sbp_buffer(buf); self.sid.append_to_sbp_buffer(buf); self.update_interval.append_to_sbp_buffer(buf); self.iod_ssr.append_to_sbp_buffer(buf); self.dispersive_bias.append_to_sbp_buffer(buf); self.mw_consistency.append_to_sbp_buffer(buf); self.yaw.append_to_sbp_buffer(buf); self.yaw_rate.append_to_sbp_buffer(buf); self.biases.append_to_sbp_buffer(buf); } fn sbp_size(&self) -> usize { let mut size = 0; size += self.time.sbp_size(); size += self.sid.sbp_size(); size += self.update_interval.sbp_size(); size += self.iod_ssr.sbp_size(); size += self.dispersive_bias.sbp_size(); size += self.mw_consistency.sbp_size(); size += self.yaw.sbp_size(); size += self.yaw_rate.sbp_size(); size += self.biases.sbp_size(); size } } #[cfg_attr(feature = "sbp_serde", derive(serde::Serialize))] #[derive(Debug, Clone)] #[allow(non_snake_case)] pub struct MsgSsrSatelliteApc { #[cfg_attr(feature = "sbp_serde", serde(skip_serializing))] pub sender_id: Option<u16>, /// Satellite antenna phase center corrections pub apc: Vec<SatelliteAPC>, } impl MsgSsrSatelliteApc { #[rustfmt::skip] pub fn parse(_buf: &mut &[u8]) -> Result<MsgSsrSatelliteApc, crate::Error> { Ok( MsgSsrSatelliteApc{ sender_id: None, apc: SatelliteAPC::parse_array(_buf)?, } ) } } impl super::SBPMessage for MsgSsrSatelliteApc { fn get_message_name(&self) -> &'static str { "MSG_SSR_SATELLITE_APC" } fn get_message_type(&self) -> u16 { 1540 } fn get_sender_id(&self) -> Option<u16> { self.sender_id } fn set_sender_id(&mut self, new_id: u16) { self.sender_id = Some(new_id); } fn to_frame(&self) -> std::result::Result<Vec<u8>, crate::FramerError> { let mut frame = Vec::new(); self.write_frame(&mut frame)?; Ok(frame) } fn write_frame(&self, frame: &mut Vec<u8>) -> std::result::Result<(), crate::FramerError> { crate::write_frame(self, frame) } } impl super::ConcreteMessage for MsgSsrSatelliteApc { const MESSAGE_TYPE: u16 = 1540; const MESSAGE_NAME: &'static str = "MSG_SSR_SATELLITE_APC"; } impl TryFrom<super::SBP> for MsgSsrSatelliteApc { type Error = super::TryFromSBPError; fn try_from(msg: super::SBP) -> Result<Self, Self::Error> { match msg { super::SBP::MsgSsrSatelliteApc(m) => Ok(m), _ => Err(super::TryFromSBPError), } } } impl crate::serialize::SbpSerialize for MsgSsrSatelliteApc { #[allow(unused_variables)] fn append_to_sbp_buffer(&self, buf: &mut Vec<u8>) { self.apc.append_to_sbp_buffer(buf); } fn sbp_size(&self) -> usize { let mut size = 0; size += self.apc.sbp_size(); size } } /// STEC correction polynomial coefficients /// /// The Slant Total Electron Content per space vehicle, given as polynomial /// approximation for a given tile. This should be combined with the /// MSG_SSR_GRIDDED_CORRECTION message to get the state space representation /// of the atmospheric delay. /// /// It is typically equivalent to the QZSS CLAS Sub Type 8 messages. /// #[cfg_attr(feature = "sbp_serde", derive(serde::Serialize))] #[derive(Debug, Clone)] #[allow(non_snake_case)] pub struct MsgSsrStecCorrection { #[cfg_attr(feature = "sbp_serde", serde(skip_serializing))] pub sender_id: Option<u16>, /// Header of a STEC polynomial coefficient message. pub header: STECHeader, /// Array of STEC polynomial coefficients for each space vehicle. pub stec_sat_list: Vec<STECSatElement>, } impl MsgSsrStecCorrection { #[rustfmt::skip] pub fn parse(_buf: &mut &[u8]) -> Result<MsgSsrStecCorrection, crate::Error> { Ok( MsgSsrStecCorrection{ sender_id: None, header: STECHeader::parse(_buf)?, stec_sat_list: STECSatElement::parse_array(_buf)?, } ) } } impl super::SBPMessage for MsgSsrStecCorrection { fn get_message_name(&self) -> &'static str { "MSG_SSR_STEC_CORRECTION" } fn get_message_type(&self) -> u16 { 1531 } fn get_sender_id(&self) -> Option<u16> { self.sender_id } fn set_sender_id(&mut self, new_id: u16) { self.sender_id = Some(new_id); } fn to_frame(&self) -> std::result::Result<Vec<u8>, crate::FramerError> { let mut frame = Vec::new(); self.write_frame(&mut frame)?; Ok(frame) } fn write_frame(&self, frame: &mut Vec<u8>) -> std::result::Result<(), crate::FramerError> { crate::write_frame(self, frame) } } impl super::ConcreteMessage for MsgSsrStecCorrection { const MESSAGE_TYPE: u16 = 1531; const MESSAGE_NAME: &'static str = "MSG_SSR_STEC_CORRECTION"; } impl TryFrom<super::SBP> for MsgSsrStecCorrection { type Error = super::TryFromSBPError; fn try_from(msg: super::SBP) -> Result<Self, Self::Error> { match msg { super::SBP::MsgSsrStecCorrection(m) => Ok(m), _ => Err(super::TryFromSBPError), } } } impl crate::serialize::SbpSerialize for MsgSsrStecCorrection { #[allow(unused_variables)] fn append_to_sbp_buffer(&self, buf: &mut Vec<u8>) { self.header.append_to_sbp_buffer(buf); self.stec_sat_list.append_to_sbp_buffer(buf); } fn sbp_size(&self) -> usize { let mut size = 0; size += self.header.sbp_size(); size += self.stec_sat_list.sbp_size(); size } } #[cfg_attr(feature = "sbp_serde", derive(serde::Serialize))] #[derive(Debug, Clone)] #[allow(non_snake_case)] pub struct MsgSsrStecCorrectionDepA { #[cfg_attr(feature = "sbp_serde", serde(skip_serializing))] pub sender_id: Option<u16>, /// Header of a STEC message pub header: STECHeaderDepA, /// Array of STEC information for each space vehicle pub stec_sat_list: Vec<STECSatElement>, } impl MsgSsrStecCorrectionDepA { #[rustfmt::skip] pub fn parse(_buf: &mut &[u8]) -> Result<MsgSsrStecCorrectionDepA, crate::Error> { Ok( MsgSsrStecCorrectionDepA{ sender_id: None, header: STECHeaderDepA::parse(_buf)?, stec_sat_list: STECSatElement::parse_array(_buf)?, } ) } } impl super::SBPMessage for MsgSsrStecCorrectionDepA { fn get_message_name(&self) -> &'static str { "MSG_SSR_STEC_CORRECTION_DEP_A" } fn get_message_type(&self) -> u16 { 1515 } fn get_sender_id(&self) -> Option<u16> { self.sender_id } fn set_sender_id(&mut self, new_id: u16) { self.sender_id = Some(new_id); } fn to_frame(&self) -> std::result::Result<Vec<u8>, crate::FramerError> { let mut frame = Vec::new(); self.write_frame(&mut frame)?; Ok(frame) } fn write_frame(&self, frame: &mut Vec<u8>) -> std::result::Result<(), crate::FramerError> { crate::write_frame(self, frame) } } impl super::ConcreteMessage for MsgSsrStecCorrectionDepA { const MESSAGE_TYPE: u16 = 1515; const MESSAGE_NAME: &'static str = "MSG_SSR_STEC_CORRECTION_DEP_A"; } impl TryFrom<super::SBP> for MsgSsrStecCorrectionDepA { type Error = super::TryFromSBPError; fn try_from(msg: super::SBP) -> Result<Self, Self::Error> { match msg { super::SBP::MsgSsrStecCorrectionDepA(m) => Ok(m), _ => Err(super::TryFromSBPError), } } } impl crate::serialize::SbpSerialize for MsgSsrStecCorrectionDepA { #[allow(unused_variables)] fn append_to_sbp_buffer(&self, buf: &mut Vec<u8>) { self.header.append_to_sbp_buffer(buf); self.stec_sat_list.append_to_sbp_buffer(buf); } fn sbp_size(&self) -> usize { let mut size = 0; size += self.header.sbp_size(); size += self.stec_sat_list.sbp_size(); size } } /// Definition of a SSR atmospheric correction tile. /// /// Provides the correction point coordinates for the atmospheric correction /// values in the MSG_SSR_STEC_CORRECTION and MSG_SSR_GRIDDED_CORRECTION /// messages. /// /// Based on ETSI TS 137 355 V16.1.0 (LTE Positioning Protocol) information /// element GNSS-SSR-CorrectionPoints. SBP only supports gridded arrays of /// correction points, not lists of points. /// #[cfg_attr(feature = "sbp_serde", derive(serde::Serialize))] #[derive(Debug, Clone)] #[allow(non_snake_case)] pub struct MsgSsrTileDefinition { #[cfg_attr(feature = "sbp_serde", serde(skip_serializing))] pub sender_id: Option<u16>, /// Unique identifier of the tile set this tile belongs to. pub tile_set_id: u16, /// Unique identifier of this tile in the tile set. /// See GNSS-SSR-ArrayOfCorrectionPoints field correctionPointSetID. pub tile_id: u16, /// North-West corner correction point latitude. /// /// The relation between the latitude X in the range [-90, 90] and the coded /// number N is: /// /// N = floor((X / 90) * 2^14) /// /// See GNSS-SSR-ArrayOfCorrectionPoints field referencePointLatitude. pub corner_nw_lat: i16, /// North-West corner correction point longitude. /// /// The relation between the longitude X in the range [-180, 180] and the /// coded number N is: /// /// N = floor((X / 180) * 2^15) /// /// See GNSS-SSR-ArrayOfCorrectionPoints field referencePointLongitude. pub corner_nw_lon: i16, /// Spacing of the correction points in the latitude direction. /// /// See GNSS-SSR-ArrayOfCorrectionPoints field stepOfLatitude. pub spacing_lat: u16, /// Spacing of the correction points in the longitude direction. /// /// See GNSS-SSR-ArrayOfCorrectionPoints field stepOfLongitude. pub spacing_lon: u16, /// Number of steps in the latitude direction. /// /// See GNSS-SSR-ArrayOfCorrectionPoints field numberOfStepsLatitude. pub rows: u16, /// Number of steps in the longitude direction. /// /// See GNSS-SSR-ArrayOfCorrectionPoints field numberOfStepsLongitude. pub cols: u16, /// Specifies the availability of correction data at the correction points /// in the array. /// /// If a specific bit is enabled (set to 1), the correction is not /// available. Only the first rows * cols bits are used, the remainder are /// set to 0. If there are more then 64 correction points the remaining /// corrections are always available. /// /// Starting with the northwest corner of the array (top left on a north /// oriented map) the correction points are enumerated with row precedence - /// first row west to east, second row west to east, until last row west to /// east - ending with the southeast corner of the array. /// /// See GNSS-SSR-ArrayOfCorrectionPoints field bitmaskOfGrids but note the /// definition of the bits is inverted. pub bitmask: u64, } impl MsgSsrTileDefinition { #[rustfmt::skip] pub fn parse(_buf: &mut &[u8]) -> Result<MsgSsrTileDefinition, crate::Error> { Ok( MsgSsrTileDefinition{ sender_id: None, tile_set_id: _buf.read_u16::<LittleEndian>()?, tile_id: _buf.read_u16::<LittleEndian>()?, corner_nw_lat: _buf.read_i16::<LittleEndian>()?, corner_nw_lon: _buf.read_i16::<LittleEndian>()?, spacing_lat: _buf.read_u16::<LittleEndian>()?, spacing_lon: _buf.read_u16::<LittleEndian>()?, rows: _buf.read_u16::<LittleEndian>()?, cols: _buf.read_u16::<LittleEndian>()?, bitmask: _buf.read_u64::<LittleEndian>()?, } ) } } impl super::SBPMessage for MsgSsrTileDefinition { fn get_message_name(&self) -> &'static str { "MSG_SSR_TILE_DEFINITION" } fn get_message_type(&self) -> u16 { 1526 } fn get_sender_id(&self) -> Option<u16> { self.sender_id } fn set_sender_id(&mut self, new_id: u16) { self.sender_id = Some(new_id); } fn to_frame(&self) -> std::result::Result<Vec<u8>, crate::FramerError> { let mut frame = Vec::new(); self.write_frame(&mut frame)?; Ok(frame) } fn write_frame(&self, frame: &mut Vec<u8>) -> std::result::Result<(), crate::FramerError> { crate::write_frame(self, frame) } } impl super::ConcreteMessage for MsgSsrTileDefinition { const MESSAGE_TYPE: u16 = 1526; const MESSAGE_NAME: &'static str = "MSG_SSR_TILE_DEFINITION"; } impl TryFrom<super::SBP> for MsgSsrTileDefinition { type Error = super::TryFromSBPError; fn try_from(msg: super::SBP) -> Result<Self, Self::Error> { match msg { super::SBP::MsgSsrTileDefinition(m) => Ok(m), _ => Err(super::TryFromSBPError), } } } impl crate::serialize::SbpSerialize for MsgSsrTileDefinition { #[allow(unused_variables)] fn append_to_sbp_buffer(&self, buf: &mut Vec<u8>) { self.tile_set_id.append_to_sbp_buffer(buf); self.tile_id.append_to_sbp_buffer(buf); self.corner_nw_lat.append_to_sbp_buffer(buf); self.corner_nw_lon.append_to_sbp_buffer(buf); self.spacing_lat.append_to_sbp_buffer(buf); self.spacing_lon.append_to_sbp_buffer(buf); self.rows.append_to_sbp_buffer(buf); self.cols.append_to_sbp_buffer(buf); self.bitmask.append_to_sbp_buffer(buf); } fn sbp_size(&self) -> usize { let mut size = 0; size += self.tile_set_id.sbp_size(); size += self.tile_id.sbp_size(); size += self.corner_nw_lat.sbp_size(); size += self.corner_nw_lon.sbp_size(); size += self.spacing_lat.sbp_size(); size += self.spacing_lon.sbp_size(); size += self.rows.sbp_size(); size += self.cols.sbp_size(); size += self.bitmask.sbp_size(); size } } /// SSR phase biases corrections for a particular satellite /// /// Phase biases are to be added to carrier phase measurements. /// #[cfg_attr(feature = "sbp_serde", derive(serde::Serialize))] #[derive(Debug, Clone)] #[allow(non_snake_case)] pub struct PhaseBiasesContent { /// Signal encoded following RTCM specifications (DF380, DF381, DF382 and /// DF467) pub code: u8, /// Indicator for integer property pub integer_indicator: u8, /// Indicator for two groups of Wide-Lane(s) integer property pub widelane_integer_indicator: u8, /// Signal phase discontinuity counter. Increased for every discontinuity in /// phase. pub discontinuity_counter: u8, /// Phase bias for specified signal pub bias: i32, } impl PhaseBiasesContent { #[rustfmt::skip] pub fn parse(_buf: &mut &[u8]) -> Result<PhaseBiasesContent, crate::Error> { Ok( PhaseBiasesContent{ code: _buf.read_u8()?, integer_indicator: _buf.read_u8()?, widelane_integer_indicator: _buf.read_u8()?, discontinuity_counter: _buf.read_u8()?, bias: _buf.read_i32::<LittleEndian>()?, } ) } pub fn parse_array(buf: &mut &[u8]) -> Result<Vec<PhaseBiasesContent>, crate::Error> { let mut v = Vec::new(); while buf.len() > 0 { v.push(PhaseBiasesContent::parse(buf)?); } Ok(v) } pub fn parse_array_limit( buf: &mut &[u8], n: usize, ) -> Result<Vec<PhaseBiasesContent>, crate::Error> { let mut v = Vec::new(); for _ in 0..n { v.push(PhaseBiasesContent::parse(buf)?); } Ok(v) } } impl crate::serialize::SbpSerialize for PhaseBiasesContent { #[allow(unused_variables)] fn append_to_sbp_buffer(&self, buf: &mut Vec<u8>) { self.code.append_to_sbp_buffer(buf); self.integer_indicator.append_to_sbp_buffer(buf); self.widelane_integer_indicator.append_to_sbp_buffer(buf); self.discontinuity_counter.append_to_sbp_buffer(buf); self.bias.append_to_sbp_buffer(buf); } fn sbp_size(&self) -> usize { let mut size = 0; size += self.code.sbp_size(); size += self.integer_indicator.sbp_size(); size += self.widelane_integer_indicator.sbp_size(); size += self.discontinuity_counter.sbp_size(); size += self.bias.sbp_size(); size } } /// Header for the MSG_SSR_STEC_CORRECTION message /// /// A full set of STEC information will likely span multiple SBP messages, /// since SBP message a limited to 255 bytes. The header is used to tie /// multiple SBP messages into a sequence. /// #[cfg_attr(feature = "sbp_serde", derive(serde::Serialize))] #[derive(Debug, Clone)] #[allow(non_snake_case)] pub struct STECHeader { /// Unique identifier of the tile set this tile belongs to. pub tile_set_id: u16, /// Unique identifier of this tile in the tile set. pub tile_id: u16, /// GNSS reference time of the correction pub time: GPSTimeSec, /// Number of messages in the dataset pub num_msgs: u8, /// Position of this message in the dataset pub seq_num: u8, /// Update interval between consecutive corrections. Encoded following RTCM /// DF391 specification. pub update_interval: u8, /// IOD of the SSR atmospheric correction pub iod_atmo: u8, } impl STECHeader { #[rustfmt::skip] pub fn parse(_buf: &mut &[u8]) -> Result<STECHeader, crate::Error> { Ok( STECHeader{ tile_set_id: _buf.read_u16::<LittleEndian>()?, tile_id: _buf.read_u16::<LittleEndian>()?, time: GPSTimeSec::parse(_buf)?, num_msgs: _buf.read_u8()?, seq_num: _buf.read_u8()?, update_interval: _buf.read_u8()?, iod_atmo: _buf.read_u8()?, } ) } pub fn parse_array(buf: &mut &[u8]) -> Result<Vec<STECHeader>, crate::Error> { let mut v = Vec::new(); while buf.len() > 0 { v.push(STECHeader::parse(buf)?); } Ok(v) } pub fn parse_array_limit(buf: &mut &[u8], n: usize) -> Result<Vec<STECHeader>, crate::Error> { let mut v = Vec::new(); for _ in 0..n { v.push(STECHeader::parse(buf)?); } Ok(v) } } impl crate::serialize::SbpSerialize for STECHeader { #[allow(unused_variables)] fn append_to_sbp_buffer(&self, buf: &mut Vec<u8>) { self.tile_set_id.append_to_sbp_buffer(buf); self.tile_id.append_to_sbp_buffer(buf); self.time.append_to_sbp_buffer(buf); self.num_msgs.append_to_sbp_buffer(buf); self.seq_num.append_to_sbp_buffer(buf); self.update_interval.append_to_sbp_buffer(buf); self.iod_atmo.append_to_sbp_buffer(buf); } fn sbp_size(&self) -> usize { let mut size = 0; size += self.tile_set_id.sbp_size(); size += self.tile_id.sbp_size(); size += self.time.sbp_size(); size += self.num_msgs.sbp_size(); size += self.seq_num.sbp_size(); size += self.update_interval.sbp_size(); size += self.iod_atmo.sbp_size(); size } } /// Header for MSG_SSR_STEC_CORRECTION_DEP message /// /// A full set of STEC information will likely span multiple SBP messages, /// since SBP message a limited to 255 bytes. The header is used to tie /// multiple SBP messages into a sequence. /// #[cfg_attr(feature = "sbp_serde", derive(serde::Serialize))] #[derive(Debug, Clone)] #[allow(non_snake_case)] pub struct STECHeaderDepA { /// GNSS reference time of the correction pub time: GPSTimeSec, /// Number of messages in the dataset pub num_msgs: u8, /// Position of this message in the dataset pub seq_num: u8, /// Update interval between consecutive corrections. Encoded following RTCM /// DF391 specification. pub update_interval: u8, /// IOD of the SSR atmospheric correction pub iod_atmo: u8, } impl STECHeaderDepA { #[rustfmt::skip] pub fn parse(_buf: &mut &[u8]) -> Result<STECHeaderDepA, crate::Error> { Ok( STECHeaderDepA{ time: GPSTimeSec::parse(_buf)?, num_msgs: _buf.read_u8()?, seq_num: _buf.read_u8()?, update_interval: _buf.read_u8()?, iod_atmo: _buf.read_u8()?, } ) } pub fn parse_array(buf: &mut &[u8]) -> Result<Vec<STECHeaderDepA>, crate::Error> { let mut v = Vec::new(); while buf.len() > 0 { v.push(STECHeaderDepA::parse(buf)?); } Ok(v) } pub fn parse_array_limit( buf: &mut &[u8], n: usize, ) -> Result<Vec<STECHeaderDepA>, crate::Error> { let mut v = Vec::new(); for _ in 0..n { v.push(STECHeaderDepA::parse(buf)?); } Ok(v) } } impl crate::serialize::SbpSerialize for STECHeaderDepA { #[allow(unused_variables)] fn append_to_sbp_buffer(&self, buf: &mut Vec<u8>) { self.time.append_to_sbp_buffer(buf); self.num_msgs.append_to_sbp_buffer(buf); self.seq_num.append_to_sbp_buffer(buf); self.update_interval.append_to_sbp_buffer(buf); self.iod_atmo.append_to_sbp_buffer(buf); } fn sbp_size(&self) -> usize { let mut size = 0; size += self.time.sbp_size(); size += self.num_msgs.sbp_size(); size += self.seq_num.sbp_size(); size += self.update_interval.sbp_size(); size += self.iod_atmo.sbp_size(); size } } /// None /// /// STEC residual (mean and standard deviation) for the given satellite at the /// grid point. /// #[cfg_attr(feature = "sbp_serde", derive(serde::Serialize))] #[derive(Debug, Clone)] #[allow(non_snake_case)] pub struct STECResidual { /// space vehicle identifier pub sv_id: SvId, /// STEC residual pub residual: i16, /// stddev pub stddev: u8, } impl STECResidual { #[rustfmt::skip] pub fn parse(_buf: &mut &[u8]) -> Result<STECResidual, crate::Error> { Ok( STECResidual{ sv_id: SvId::parse(_buf)?, residual: _buf.read_i16::<LittleEndian>()?, stddev: _buf.read_u8()?, } ) } pub fn parse_array(buf: &mut &[u8]) -> Result<Vec<STECResidual>, crate::Error> { let mut v = Vec::new(); while buf.len() > 0 { v.push(STECResidual::parse(buf)?); } Ok(v) } pub fn parse_array_limit(buf: &mut &[u8], n: usize) -> Result<Vec<STECResidual>, crate::Error> { let mut v = Vec::new(); for _ in 0..n { v.push(STECResidual::parse(buf)?); } Ok(v) } } impl crate::serialize::SbpSerialize for STECResidual { #[allow(unused_variables)] fn append_to_sbp_buffer(&self, buf: &mut Vec<u8>) { self.sv_id.append_to_sbp_buffer(buf); self.residual.append_to_sbp_buffer(buf); self.stddev.append_to_sbp_buffer(buf); } fn sbp_size(&self) -> usize { let mut size = 0; size += self.sv_id.sbp_size(); size += self.residual.sbp_size(); size += self.stddev.sbp_size(); size } } /// None /// /// STEC residual for the given satellite at the grid point. /// #[cfg_attr(feature = "sbp_serde", derive(serde::Serialize))] #[derive(Debug, Clone)] #[allow(non_snake_case)] pub struct STECResidualNoStd { /// space vehicle identifier pub sv_id: SvId, /// STEC residual pub residual: i16, } impl STECResidualNoStd { #[rustfmt::skip] pub fn parse(_buf: &mut &[u8]) -> Result<STECResidualNoStd, crate::Error> { Ok( STECResidualNoStd{ sv_id: SvId::parse(_buf)?, residual: _buf.read_i16::<LittleEndian>()?, } ) } pub fn parse_array(buf: &mut &[u8]) -> Result<Vec<STECResidualNoStd>, crate::Error> { let mut v = Vec::new(); while buf.len() > 0 { v.push(STECResidualNoStd::parse(buf)?); } Ok(v) } pub fn parse_array_limit( buf: &mut &[u8], n: usize, ) -> Result<Vec<STECResidualNoStd>, crate::Error> { let mut v = Vec::new(); for _ in 0..n { v.push(STECResidualNoStd::parse(buf)?); } Ok(v) } } impl crate::serialize::SbpSerialize for STECResidualNoStd { #[allow(unused_variables)] fn append_to_sbp_buffer(&self, buf: &mut Vec<u8>) { self.sv_id.append_to_sbp_buffer(buf); self.residual.append_to_sbp_buffer(buf); } fn sbp_size(&self) -> usize { let mut size = 0; size += self.sv_id.sbp_size(); size += self.residual.sbp_size(); size } } /// None /// /// STEC polynomial for the given satellite. /// #[cfg_attr(feature = "sbp_serde", derive(serde::Serialize))] #[derive(Debug, Clone)] #[allow(non_snake_case)] pub struct STECSatElement { /// Unique space vehicle identifier pub sv_id: SvId, /// Quality of the STEC data. Encoded following RTCM DF389 specification but /// in units of TECU instead of m. pub stec_quality_indicator: u8, /// Coefficients of the STEC polynomial in the order of C00, C01, C10, C11 pub stec_coeff: Vec<i16>, } impl STECSatElement { #[rustfmt::skip] pub fn parse(_buf: &mut &[u8]) -> Result<STECSatElement, crate::Error> { Ok( STECSatElement{ sv_id: SvId::parse(_buf)?, stec_quality_indicator: _buf.read_u8()?, stec_coeff: crate::parser::read_s16_array_limit(_buf, 4)?, } ) } pub fn parse_array(buf: &mut &[u8]) -> Result<Vec<STECSatElement>, crate::Error> { let mut v = Vec::new(); while buf.len() > 0 { v.push(STECSatElement::parse(buf)?); } Ok(v) } pub fn parse_array_limit( buf: &mut &[u8], n: usize, ) -> Result<Vec<STECSatElement>, crate::Error> { let mut v = Vec::new(); for _ in 0..n { v.push(STECSatElement::parse(buf)?); } Ok(v) } } impl crate::serialize::SbpSerialize for STECSatElement { #[allow(unused_variables)] fn append_to_sbp_buffer(&self, buf: &mut Vec<u8>) { self.sv_id.append_to_sbp_buffer(buf); self.stec_quality_indicator.append_to_sbp_buffer(buf); self.stec_coeff.append_to_sbp_buffer(buf); } fn sbp_size(&self) -> usize { let mut size = 0; size += self.sv_id.sbp_size(); size += self.stec_quality_indicator.sbp_size(); size += self.stec_coeff.sbp_size(); size } } /// Antenna phase center correction /// /// Contains phase center offset and elevation variation corrections for one /// signal on a satellite. /// #[cfg_attr(feature = "sbp_serde", derive(serde::Serialize))] #[derive(Debug, Clone)] #[allow(non_snake_case)] pub struct SatelliteAPC { /// GNSS signal identifier (16 bit) pub sid: GnssSignal, /// Additional satellite information pub sat_info: u8, /// Satellite Code, as defined by IGS. Typically the space vehicle number. pub svn: u16, /// Mean phase center offset, X Y and Z axes. See IGS ANTEX file format /// description for coordinate system definition. pub pco: Vec<i16>, /// Elevation dependent phase center variations. First element is 0 degrees /// separation from the Z axis, subsequent elements represent elevation /// variations in 1 degree increments. pub pcv: Vec<i8>, } impl SatelliteAPC { #[rustfmt::skip] pub fn parse(_buf: &mut &[u8]) -> Result<SatelliteAPC, crate::Error> { Ok( SatelliteAPC{ sid: GnssSignal::parse(_buf)?, sat_info: _buf.read_u8()?, svn: _buf.read_u16::<LittleEndian>()?, pco: crate::parser::read_s16_array_limit(_buf, 3)?, pcv: crate::parser::read_s8_array_limit(_buf, 21)?, } ) } pub fn parse_array(buf: &mut &[u8]) -> Result<Vec<SatelliteAPC>, crate::Error> { let mut v = Vec::new(); while buf.len() > 0 { v.push(SatelliteAPC::parse(buf)?); } Ok(v) } pub fn parse_array_limit(buf: &mut &[u8], n: usize) -> Result<Vec<SatelliteAPC>, crate::Error> { let mut v = Vec::new(); for _ in 0..n { v.push(SatelliteAPC::parse(buf)?); } Ok(v) } } impl crate::serialize::SbpSerialize for SatelliteAPC { #[allow(unused_variables)] fn append_to_sbp_buffer(&self, buf: &mut Vec<u8>) { self.sid.append_to_sbp_buffer(buf); self.sat_info.append_to_sbp_buffer(buf); self.svn.append_to_sbp_buffer(buf); self.pco.append_to_sbp_buffer(buf); self.pcv.append_to_sbp_buffer(buf); } fn sbp_size(&self) -> usize { let mut size = 0; size += self.sid.sbp_size(); size += self.sat_info.sbp_size(); size += self.svn.sbp_size(); size += self.pco.sbp_size(); size += self.pcv.sbp_size(); size } } /// None /// /// Troposphere vertical delays (mean and standard deviation) at the grid /// point. /// #[cfg_attr(feature = "sbp_serde", derive(serde::Serialize))] #[derive(Debug, Clone)] #[allow(non_snake_case)] pub struct TroposphericDelayCorrection { /// Hydrostatic vertical delay pub hydro: i16, /// Wet vertical delay pub wet: i8, /// stddev pub stddev: u8, } impl TroposphericDelayCorrection { #[rustfmt::skip] pub fn parse(_buf: &mut &[u8]) -> Result<TroposphericDelayCorrection, crate::Error> { Ok( TroposphericDelayCorrection{ hydro: _buf.read_i16::<LittleEndian>()?, wet: _buf.read_i8()?, stddev: _buf.read_u8()?, } ) } pub fn parse_array(buf: &mut &[u8]) -> Result<Vec<TroposphericDelayCorrection>, crate::Error> { let mut v = Vec::new(); while buf.len() > 0 { v.push(TroposphericDelayCorrection::parse(buf)?); } Ok(v) } pub fn parse_array_limit( buf: &mut &[u8], n: usize, ) -> Result<Vec<TroposphericDelayCorrection>, crate::Error> { let mut v = Vec::new(); for _ in 0..n { v.push(TroposphericDelayCorrection::parse(buf)?); } Ok(v) } } impl crate::serialize::SbpSerialize for TroposphericDelayCorrection { #[allow(unused_variables)] fn append_to_sbp_buffer(&self, buf: &mut Vec<u8>) { self.hydro.append_to_sbp_buffer(buf); self.wet.append_to_sbp_buffer(buf); self.stddev.append_to_sbp_buffer(buf); } fn sbp_size(&self) -> usize { let mut size = 0; size += self.hydro.sbp_size(); size += self.wet.sbp_size(); size += self.stddev.sbp_size(); size } } /// None /// /// Troposphere vertical delays at the grid point. /// #[cfg_attr(feature = "sbp_serde", derive(serde::Serialize))] #[derive(Debug, Clone)] #[allow(non_snake_case)] pub struct TroposphericDelayCorrectionNoStd { /// Hydrostatic vertical delay pub hydro: i16, /// Wet vertical delay pub wet: i8, } impl TroposphericDelayCorrectionNoStd { #[rustfmt::skip] pub fn parse(_buf: &mut &[u8]) -> Result<TroposphericDelayCorrectionNoStd, crate::Error> { Ok( TroposphericDelayCorrectionNoStd{ hydro: _buf.read_i16::<LittleEndian>()?, wet: _buf.read_i8()?, } ) } pub fn parse_array( buf: &mut &[u8], ) -> Result<Vec<TroposphericDelayCorrectionNoStd>, crate::Error> { let mut v = Vec::new(); while buf.len() > 0 { v.push(TroposphericDelayCorrectionNoStd::parse(buf)?); } Ok(v) } pub fn parse_array_limit( buf: &mut &[u8], n: usize, ) -> Result<Vec<TroposphericDelayCorrectionNoStd>, crate::Error> { let mut v = Vec::new(); for _ in 0..n { v.push(TroposphericDelayCorrectionNoStd::parse(buf)?); } Ok(v) } } impl crate::serialize::SbpSerialize for TroposphericDelayCorrectionNoStd { #[allow(unused_variables)] fn append_to_sbp_buffer(&self, buf: &mut Vec<u8>) { self.hydro.append_to_sbp_buffer(buf); self.wet.append_to_sbp_buffer(buf); } fn sbp_size(&self) -> usize { let mut size = 0; size += self.hydro.sbp_size(); size += self.wet.sbp_size(); size } }
#[doc = r"Register block"] #[repr(C)] pub struct RegisterBlock { #[doc = "0x00 - LTDC identification register"] pub ltdc_idr: LTDC_IDR, #[doc = "0x04 - LDTC layer count register"] pub ltdc_lcr: LTDC_LCR, #[doc = "0x08 - This register defines the number of horizontal synchronization pixels minus 1 and the number of vertical synchronization lines minus 1. Refer to Figure120 and Section19.4: LTDC programmable parameters for an example of configuration."] pub ltdc_sscr: LTDC_SSCR, #[doc = "0x0c - This register defines the accumulated number of horizontal synchronization and back porch pixels minus 1 (HSYNCwidth+HBP-1) and the accumulated number of vertical synchronization and back porch lines minus 1 (VSYNCheight+VBP-1). Refer to Figure120 and Section19.4: LTDC programmable parameters for an example of configuration."] pub ltdc_bpcr: LTDC_BPCR, #[doc = "0x10 - This register defines the accumulated number of horizontal synchronization, back porch and active pixels minus 1 (HSYNC width+HBP+activewidth-1) and the accumulated number of vertical synchronization, back porch lines and active lines minus 1 (VSYNCheight+BVBP+activeheight-1). Refer to Figure120 and Section19.4: LTDC programmable parameters for an example of configuration."] pub ltdc_awcr: LTDC_AWCR, #[doc = "0x14 - This register defines the accumulated number of horizontal synchronization, back porch, active and front porch pixels minus 1 (HSYNCwidth+HBP+activewidth+HFP-1) and the accumulated number of vertical synchronization, back porch lines, active and front lines minus 1 (VSYNCheight+BVBP+activeheight+VFP-1). Refer to Figure120 and Section19.4: LTDC programmable parameters for an example of configuration."] pub ltdc_twcr: LTDC_TWCR, #[doc = "0x18 - This register defines the global configuration of the LCD-TFT controller."] pub ltdc_gcr: LTDC_GCR, #[doc = "0x1c - LTDC global configuration 1 register"] pub ltdc_gc1r: LTDC_GC1R, #[doc = "0x20 - LTDC global configuration 2 register"] pub ltdc_gc2r: LTDC_GC2R, #[doc = "0x24 - This register allows to reload either immediately or during the vertical blanking period, the shadow registers values to the active registers. The shadow registers are all Layer1 and Layer2 registers except the LTDC_L1CLUTWR and the LTDC_L2CLUTWR."] pub ltdc_srcr: LTDC_SRCR, _reserved10: [u8; 4usize], #[doc = "0x2c - This register defines the background color (RGB888)."] pub ltdc_bccr: LTDC_BCCR, _reserved11: [u8; 4usize], #[doc = "0x34 - This register determines which status flags generate an interrupt request by setting the corresponding bit to 1."] pub ltdc_ier: LTDC_IER, #[doc = "0x38 - This register returns the interrupt status flag."] pub ltdc_isr: LTDC_ISR, #[doc = "0x3c - LTDC Interrupt Clear Register"] pub ltdc_icr: LTDC_ICR, #[doc = "0x40 - This register defines the position of the line interrupt. The line value to be programmed depends on the timings parameters. Refer to Figure120."] pub ltdc_lipcr: LTDC_LIPCR, #[doc = "0x44 - LTDC current position status register"] pub ltdc_cpsr: LTDC_CPSR, #[doc = "0x48 - This register returns the status of the current display phase which is controlled by the HSYNC, VSYNC, and horizontal/vertical DE signals. Example: if the current display phase is the vertical synchronization, the VSYNCS bit is set (active high). If the current display phase is the horizontal synchronization, the HSYNCS bit is active high."] pub ltdc_cdsr: LTDC_CDSR, _reserved17: [u8; 56usize], #[doc = "0x84 - LTDC layer 1 control register"] pub ltdc_l1cr: LTDC_L1CR, #[doc = "0x88 - This register defines the horizontal position (first and last pixel) of the layer 1 or 2 window. The first visible pixel of a line is the programmed value of AHBP\\[11:0\\] bits + 1 in the LTDC_BPCR register. The last visible pixel of a line is the programmed value of AAW\\[10:0\\] bits in the LTDC_AWCR register."] pub ltdc_l1whpcr: LTDC_L1WHPCR, #[doc = "0x8c - This register defines the vertical position (first and last line) of the layer1 or 2 window. The first visible line of a frame is the programmed value of AVBP\\[11:0\\] bits + 1 in the register LTDC_BPCR register. The last visible line of a frame is the programmed value of AAH\\[11:0\\] bits in the LTDC_AWCR register."] pub ltdc_l1wvpcr: LTDC_L1WVPCR, #[doc = "0x90 - This register defines the color key value (RGB), that is used by the color keying."] pub ltdc_l1ckcr: LTDC_L1CKCR, #[doc = "0x94 - This register defines the pixel format that is used for the stored data in the frame buffer of a layer. The pixel data is read from the frame buffer and then transformed to the internal format 8888 (ARGB)."] pub ltdc_l1pfcr: LTDC_L1PFCR, #[doc = "0x98 - This register defines the constant alpha value (divided by 255 by hardware), that is used in the alpha blending. Refer to LTDC_LxBFCR register."] pub ltdc_l1cacr: LTDC_L1CACR, #[doc = "0x9c - This register defines the default color of a layer in the format ARGB. The default color is used outside the defined layer window or when a layer is disabled. The reset value of 0x00000000 defines a transparent black color."] pub ltdc_l1dccr: LTDC_L1DCCR, #[doc = "0xa0 - This register defines the blending factors F1 and F2. The general blending formula is: BC = BF1 x C + BF2 x Cs BC = blended color BF1 = blend factor 1 C = current layer color BF2 = blend factor 2 Cs = subjacent layers blended color"] pub ltdc_l1bfcr: LTDC_L1BFCR, _reserved25: [u8; 8usize], #[doc = "0xac - This register defines the color frame buffer start address which has to point to the address where the pixel data of the top left pixel of a layer is stored in the frame buffer."] pub ltdc_l1cfbar: LTDC_L1CFBAR, #[doc = "0xb0 - This register defines the color frame buffer line length and pitch."] pub ltdc_l1cfblr: LTDC_L1CFBLR, #[doc = "0xb4 - This register defines the number of lines in the color frame buffer."] pub ltdc_l1cfblnr: LTDC_L1CFBLNR, _reserved28: [u8; 12usize], #[doc = "0xc4 - This register defines the CLUT address and the RGB value."] pub ltdc_l1clutwr: LTDC_L1CLUTWR, _reserved29: [u8; 60usize], #[doc = "0x104 - LTDC layer 2 control register"] pub ltdc_l2cr: LTDC_L2CR, #[doc = "0x108 - This register defines the horizontal position (first and last pixel) of the layer 1 or 2 window. The first visible pixel of a line is the programmed value of AHBP\\[11:0\\] bits + 1 in the LTDC_BPCR register. The last visible pixel of a line is the programmed value of AAW\\[10:0\\] bits in the LTDC_AWCR register."] pub ltdc_l2whpcr: LTDC_L2WHPCR, #[doc = "0x10c - This register defines the vertical position (first and last line) of the layer1 or 2 window. The first visible line of a frame is the programmed value of AVBP\\[11:0\\] bits + 1 in the register LTDC_BPCR register. The last visible line of a frame is the programmed value of AAH\\[11:0\\] bits in the LTDC_AWCR register."] pub ltdc_l2wvpcr: LTDC_L2WVPCR, #[doc = "0x110 - This register defines the color key value (RGB), that is used by the color keying."] pub ltdc_l2ckcr: LTDC_L2CKCR, #[doc = "0x114 - This register defines the pixel format that is used for the stored data in the frame buffer of a layer. The pixel data is read from the frame buffer and then transformed to the internal format 8888 (ARGB)."] pub ltdc_l2pfcr: LTDC_L2PFCR, #[doc = "0x118 - This register defines the constant alpha value (divided by 255 by hardware), that is used in the alpha blending. Refer to LTDC_LxBFCR register."] pub ltdc_l2cacr: LTDC_L2CACR, #[doc = "0x11c - This register defines the default color of a layer in the format ARGB. The default color is used outside the defined layer window or when a layer is disabled. The reset value of 0x00000000 defines a transparent black color."] pub ltdc_l2dccr: LTDC_L2DCCR, #[doc = "0x120 - This register defines the blending factors F1 and F2. The general blending formula is: BC = BF1 x C + BF2 x Cs BC = blended color BF1 = blend factor 1 C = current layer color BF2 = blend factor 2 Cs = subjacent layers blended color"] pub ltdc_l2bfcr: LTDC_L2BFCR, _reserved37: [u8; 8usize], #[doc = "0x12c - This register defines the color frame buffer start address which has to point to the address where the pixel data of the top left pixel of a layer is stored in the frame buffer."] pub ltdc_l2cfbar: LTDC_L2CFBAR, #[doc = "0x130 - This register defines the color frame buffer line length and pitch."] pub ltdc_l2cfblr: LTDC_L2CFBLR, #[doc = "0x134 - This register defines the number of lines in the color frame buffer."] pub ltdc_l2cfblnr: LTDC_L2CFBLNR, _reserved40: [u8; 12usize], #[doc = "0x144 - This register defines the CLUT address and the RGB value."] pub ltdc_l2clutwr: LTDC_L2CLUTWR, } #[doc = "LTDC identification register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ltdc_idr](ltdc_idr) module"] pub type LTDC_IDR = crate::Reg<u32, _LTDC_IDR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _LTDC_IDR; #[doc = "`read()` method returns [ltdc_idr::R](ltdc_idr::R) reader structure"] impl crate::Readable for LTDC_IDR {} #[doc = "LTDC identification register"] pub mod ltdc_idr; #[doc = "LDTC layer count register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ltdc_lcr](ltdc_lcr) module"] pub type LTDC_LCR = crate::Reg<u32, _LTDC_LCR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _LTDC_LCR; #[doc = "`read()` method returns [ltdc_lcr::R](ltdc_lcr::R) reader structure"] impl crate::Readable for LTDC_LCR {} #[doc = "LDTC layer count register"] pub mod ltdc_lcr; #[doc = "This register defines the number of horizontal synchronization pixels minus 1 and the number of vertical synchronization lines minus 1. Refer to Figure120 and Section19.4: LTDC programmable parameters for an example of configuration.\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ltdc_sscr](ltdc_sscr) module"] pub type LTDC_SSCR = crate::Reg<u32, _LTDC_SSCR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _LTDC_SSCR; #[doc = "`read()` method returns [ltdc_sscr::R](ltdc_sscr::R) reader structure"] impl crate::Readable for LTDC_SSCR {} #[doc = "`write(|w| ..)` method takes [ltdc_sscr::W](ltdc_sscr::W) writer structure"] impl crate::Writable for LTDC_SSCR {} #[doc = "This register defines the number of horizontal synchronization pixels minus 1 and the number of vertical synchronization lines minus 1. Refer to Figure120 and Section19.4: LTDC programmable parameters for an example of configuration."] pub mod ltdc_sscr; #[doc = "This register defines the accumulated number of horizontal synchronization and back porch pixels minus 1 (HSYNCwidth+HBP-1) and the accumulated number of vertical synchronization and back porch lines minus 1 (VSYNCheight+VBP-1). Refer to Figure120 and Section19.4: LTDC programmable parameters for an example of configuration.\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ltdc_bpcr](ltdc_bpcr) module"] pub type LTDC_BPCR = crate::Reg<u32, _LTDC_BPCR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _LTDC_BPCR; #[doc = "`read()` method returns [ltdc_bpcr::R](ltdc_bpcr::R) reader structure"] impl crate::Readable for LTDC_BPCR {} #[doc = "`write(|w| ..)` method takes [ltdc_bpcr::W](ltdc_bpcr::W) writer structure"] impl crate::Writable for LTDC_BPCR {} #[doc = "This register defines the accumulated number of horizontal synchronization and back porch pixels minus 1 (HSYNCwidth+HBP-1) and the accumulated number of vertical synchronization and back porch lines minus 1 (VSYNCheight+VBP-1). Refer to Figure120 and Section19.4: LTDC programmable parameters for an example of configuration."] pub mod ltdc_bpcr; #[doc = "This register defines the accumulated number of horizontal synchronization, back porch and active pixels minus 1 (HSYNC width+HBP+activewidth-1) and the accumulated number of vertical synchronization, back porch lines and active lines minus 1 (VSYNCheight+BVBP+activeheight-1). Refer to Figure120 and Section19.4: LTDC programmable parameters for an example of configuration.\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ltdc_awcr](ltdc_awcr) module"] pub type LTDC_AWCR = crate::Reg<u32, _LTDC_AWCR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _LTDC_AWCR; #[doc = "`read()` method returns [ltdc_awcr::R](ltdc_awcr::R) reader structure"] impl crate::Readable for LTDC_AWCR {} #[doc = "`write(|w| ..)` method takes [ltdc_awcr::W](ltdc_awcr::W) writer structure"] impl crate::Writable for LTDC_AWCR {} #[doc = "This register defines the accumulated number of horizontal synchronization, back porch and active pixels minus 1 (HSYNC width+HBP+activewidth-1) and the accumulated number of vertical synchronization, back porch lines and active lines minus 1 (VSYNCheight+BVBP+activeheight-1). Refer to Figure120 and Section19.4: LTDC programmable parameters for an example of configuration."] pub mod ltdc_awcr; #[doc = "This register defines the accumulated number of horizontal synchronization, back porch, active and front porch pixels minus 1 (HSYNCwidth+HBP+activewidth+HFP-1) and the accumulated number of vertical synchronization, back porch lines, active and front lines minus 1 (VSYNCheight+BVBP+activeheight+VFP-1). Refer to Figure120 and Section19.4: LTDC programmable parameters for an example of configuration.\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ltdc_twcr](ltdc_twcr) module"] pub type LTDC_TWCR = crate::Reg<u32, _LTDC_TWCR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _LTDC_TWCR; #[doc = "`read()` method returns [ltdc_twcr::R](ltdc_twcr::R) reader structure"] impl crate::Readable for LTDC_TWCR {} #[doc = "`write(|w| ..)` method takes [ltdc_twcr::W](ltdc_twcr::W) writer structure"] impl crate::Writable for LTDC_TWCR {} #[doc = "This register defines the accumulated number of horizontal synchronization, back porch, active and front porch pixels minus 1 (HSYNCwidth+HBP+activewidth+HFP-1) and the accumulated number of vertical synchronization, back porch lines, active and front lines minus 1 (VSYNCheight+BVBP+activeheight+VFP-1). Refer to Figure120 and Section19.4: LTDC programmable parameters for an example of configuration."] pub mod ltdc_twcr; #[doc = "This register defines the global configuration of the LCD-TFT controller.\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ltdc_gcr](ltdc_gcr) module"] pub type LTDC_GCR = crate::Reg<u32, _LTDC_GCR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _LTDC_GCR; #[doc = "`read()` method returns [ltdc_gcr::R](ltdc_gcr::R) reader structure"] impl crate::Readable for LTDC_GCR {} #[doc = "`write(|w| ..)` method takes [ltdc_gcr::W](ltdc_gcr::W) writer structure"] impl crate::Writable for LTDC_GCR {} #[doc = "This register defines the global configuration of the LCD-TFT controller."] pub mod ltdc_gcr; #[doc = "LTDC global configuration 1 register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ltdc_gc1r](ltdc_gc1r) module"] pub type LTDC_GC1R = crate::Reg<u32, _LTDC_GC1R>; #[allow(missing_docs)] #[doc(hidden)] pub struct _LTDC_GC1R; #[doc = "`read()` method returns [ltdc_gc1r::R](ltdc_gc1r::R) reader structure"] impl crate::Readable for LTDC_GC1R {} #[doc = "LTDC global configuration 1 register"] pub mod ltdc_gc1r; #[doc = "LTDC global configuration 2 register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ltdc_gc2r](ltdc_gc2r) module"] pub type LTDC_GC2R = crate::Reg<u32, _LTDC_GC2R>; #[allow(missing_docs)] #[doc(hidden)] pub struct _LTDC_GC2R; #[doc = "`read()` method returns [ltdc_gc2r::R](ltdc_gc2r::R) reader structure"] impl crate::Readable for LTDC_GC2R {} #[doc = "LTDC global configuration 2 register"] pub mod ltdc_gc2r; #[doc = "This register allows to reload either immediately or during the vertical blanking period, the shadow registers values to the active registers. The shadow registers are all Layer1 and Layer2 registers except the LTDC_L1CLUTWR and the LTDC_L2CLUTWR.\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ltdc_srcr](ltdc_srcr) module"] pub type LTDC_SRCR = crate::Reg<u32, _LTDC_SRCR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _LTDC_SRCR; #[doc = "`read()` method returns [ltdc_srcr::R](ltdc_srcr::R) reader structure"] impl crate::Readable for LTDC_SRCR {} #[doc = "`write(|w| ..)` method takes [ltdc_srcr::W](ltdc_srcr::W) writer structure"] impl crate::Writable for LTDC_SRCR {} #[doc = "This register allows to reload either immediately or during the vertical blanking period, the shadow registers values to the active registers. The shadow registers are all Layer1 and Layer2 registers except the LTDC_L1CLUTWR and the LTDC_L2CLUTWR."] pub mod ltdc_srcr; #[doc = "This register defines the background color (RGB888).\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ltdc_bccr](ltdc_bccr) module"] pub type LTDC_BCCR = crate::Reg<u32, _LTDC_BCCR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _LTDC_BCCR; #[doc = "`read()` method returns [ltdc_bccr::R](ltdc_bccr::R) reader structure"] impl crate::Readable for LTDC_BCCR {} #[doc = "`write(|w| ..)` method takes [ltdc_bccr::W](ltdc_bccr::W) writer structure"] impl crate::Writable for LTDC_BCCR {} #[doc = "This register defines the background color (RGB888)."] pub mod ltdc_bccr; #[doc = "This register determines which status flags generate an interrupt request by setting the corresponding bit to 1.\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ltdc_ier](ltdc_ier) module"] pub type LTDC_IER = crate::Reg<u32, _LTDC_IER>; #[allow(missing_docs)] #[doc(hidden)] pub struct _LTDC_IER; #[doc = "`read()` method returns [ltdc_ier::R](ltdc_ier::R) reader structure"] impl crate::Readable for LTDC_IER {} #[doc = "`write(|w| ..)` method takes [ltdc_ier::W](ltdc_ier::W) writer structure"] impl crate::Writable for LTDC_IER {} #[doc = "This register determines which status flags generate an interrupt request by setting the corresponding bit to 1."] pub mod ltdc_ier; #[doc = "This register returns the interrupt status flag.\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ltdc_isr](ltdc_isr) module"] pub type LTDC_ISR = crate::Reg<u32, _LTDC_ISR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _LTDC_ISR; #[doc = "`read()` method returns [ltdc_isr::R](ltdc_isr::R) reader structure"] impl crate::Readable for LTDC_ISR {} #[doc = "This register returns the interrupt status flag."] pub mod ltdc_isr; #[doc = "LTDC Interrupt Clear Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ltdc_icr](ltdc_icr) module"] pub type LTDC_ICR = crate::Reg<u32, _LTDC_ICR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _LTDC_ICR; #[doc = "`read()` method returns [ltdc_icr::R](ltdc_icr::R) reader structure"] impl crate::Readable for LTDC_ICR {} #[doc = "`write(|w| ..)` method takes [ltdc_icr::W](ltdc_icr::W) writer structure"] impl crate::Writable for LTDC_ICR {} #[doc = "LTDC Interrupt Clear Register"] pub mod ltdc_icr; #[doc = "This register defines the position of the line interrupt. The line value to be programmed depends on the timings parameters. Refer to Figure120.\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ltdc_lipcr](ltdc_lipcr) module"] pub type LTDC_LIPCR = crate::Reg<u32, _LTDC_LIPCR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _LTDC_LIPCR; #[doc = "`read()` method returns [ltdc_lipcr::R](ltdc_lipcr::R) reader structure"] impl crate::Readable for LTDC_LIPCR {} #[doc = "`write(|w| ..)` method takes [ltdc_lipcr::W](ltdc_lipcr::W) writer structure"] impl crate::Writable for LTDC_LIPCR {} #[doc = "This register defines the position of the line interrupt. The line value to be programmed depends on the timings parameters. Refer to Figure120."] pub mod ltdc_lipcr; #[doc = "LTDC current position status register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ltdc_cpsr](ltdc_cpsr) module"] pub type LTDC_CPSR = crate::Reg<u32, _LTDC_CPSR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _LTDC_CPSR; #[doc = "`read()` method returns [ltdc_cpsr::R](ltdc_cpsr::R) reader structure"] impl crate::Readable for LTDC_CPSR {} #[doc = "LTDC current position status register"] pub mod ltdc_cpsr; #[doc = "This register returns the status of the current display phase which is controlled by the HSYNC, VSYNC, and horizontal/vertical DE signals. Example: if the current display phase is the vertical synchronization, the VSYNCS bit is set (active high). If the current display phase is the horizontal synchronization, the HSYNCS bit is active high.\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ltdc_cdsr](ltdc_cdsr) module"] pub type LTDC_CDSR = crate::Reg<u32, _LTDC_CDSR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _LTDC_CDSR; #[doc = "`read()` method returns [ltdc_cdsr::R](ltdc_cdsr::R) reader structure"] impl crate::Readable for LTDC_CDSR {} #[doc = "This register returns the status of the current display phase which is controlled by the HSYNC, VSYNC, and horizontal/vertical DE signals. Example: if the current display phase is the vertical synchronization, the VSYNCS bit is set (active high). If the current display phase is the horizontal synchronization, the HSYNCS bit is active high."] pub mod ltdc_cdsr; #[doc = "LTDC layer 1 control register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ltdc_l1cr](ltdc_l1cr) module"] pub type LTDC_L1CR = crate::Reg<u32, _LTDC_L1CR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _LTDC_L1CR; #[doc = "`read()` method returns [ltdc_l1cr::R](ltdc_l1cr::R) reader structure"] impl crate::Readable for LTDC_L1CR {} #[doc = "`write(|w| ..)` method takes [ltdc_l1cr::W](ltdc_l1cr::W) writer structure"] impl crate::Writable for LTDC_L1CR {} #[doc = "LTDC layer 1 control register"] pub mod ltdc_l1cr; #[doc = "This register defines the horizontal position (first and last pixel) of the layer 1 or 2 window. The first visible pixel of a line is the programmed value of AHBP\\[11:0\\] bits + 1 in the LTDC_BPCR register. The last visible pixel of a line is the programmed value of AAW\\[10:0\\] bits in the LTDC_AWCR register.\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ltdc_l1whpcr](ltdc_l1whpcr) module"] pub type LTDC_L1WHPCR = crate::Reg<u32, _LTDC_L1WHPCR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _LTDC_L1WHPCR; #[doc = "`read()` method returns [ltdc_l1whpcr::R](ltdc_l1whpcr::R) reader structure"] impl crate::Readable for LTDC_L1WHPCR {} #[doc = "`write(|w| ..)` method takes [ltdc_l1whpcr::W](ltdc_l1whpcr::W) writer structure"] impl crate::Writable for LTDC_L1WHPCR {} #[doc = "This register defines the horizontal position (first and last pixel) of the layer 1 or 2 window. The first visible pixel of a line is the programmed value of AHBP\\[11:0\\] bits + 1 in the LTDC_BPCR register. The last visible pixel of a line is the programmed value of AAW\\[10:0\\] bits in the LTDC_AWCR register."] pub mod ltdc_l1whpcr; #[doc = "This register defines the vertical position (first and last line) of the layer1 or 2 window. The first visible line of a frame is the programmed value of AVBP\\[11:0\\] bits + 1 in the register LTDC_BPCR register. The last visible line of a frame is the programmed value of AAH\\[11:0\\] bits in the LTDC_AWCR register.\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ltdc_l1wvpcr](ltdc_l1wvpcr) module"] pub type LTDC_L1WVPCR = crate::Reg<u32, _LTDC_L1WVPCR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _LTDC_L1WVPCR; #[doc = "`read()` method returns [ltdc_l1wvpcr::R](ltdc_l1wvpcr::R) reader structure"] impl crate::Readable for LTDC_L1WVPCR {} #[doc = "`write(|w| ..)` method takes [ltdc_l1wvpcr::W](ltdc_l1wvpcr::W) writer structure"] impl crate::Writable for LTDC_L1WVPCR {} #[doc = "This register defines the vertical position (first and last line) of the layer1 or 2 window. The first visible line of a frame is the programmed value of AVBP\\[11:0\\] bits + 1 in the register LTDC_BPCR register. The last visible line of a frame is the programmed value of AAH\\[11:0\\] bits in the LTDC_AWCR register."] pub mod ltdc_l1wvpcr; #[doc = "This register defines the color key value (RGB), that is used by the color keying.\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ltdc_l1ckcr](ltdc_l1ckcr) module"] pub type LTDC_L1CKCR = crate::Reg<u32, _LTDC_L1CKCR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _LTDC_L1CKCR; #[doc = "`read()` method returns [ltdc_l1ckcr::R](ltdc_l1ckcr::R) reader structure"] impl crate::Readable for LTDC_L1CKCR {} #[doc = "`write(|w| ..)` method takes [ltdc_l1ckcr::W](ltdc_l1ckcr::W) writer structure"] impl crate::Writable for LTDC_L1CKCR {} #[doc = "This register defines the color key value (RGB), that is used by the color keying."] pub mod ltdc_l1ckcr; #[doc = "This register defines the pixel format that is used for the stored data in the frame buffer of a layer. The pixel data is read from the frame buffer and then transformed to the internal format 8888 (ARGB).\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ltdc_l1pfcr](ltdc_l1pfcr) module"] pub type LTDC_L1PFCR = crate::Reg<u32, _LTDC_L1PFCR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _LTDC_L1PFCR; #[doc = "`read()` method returns [ltdc_l1pfcr::R](ltdc_l1pfcr::R) reader structure"] impl crate::Readable for LTDC_L1PFCR {} #[doc = "`write(|w| ..)` method takes [ltdc_l1pfcr::W](ltdc_l1pfcr::W) writer structure"] impl crate::Writable for LTDC_L1PFCR {} #[doc = "This register defines the pixel format that is used for the stored data in the frame buffer of a layer. The pixel data is read from the frame buffer and then transformed to the internal format 8888 (ARGB)."] pub mod ltdc_l1pfcr; #[doc = "This register defines the constant alpha value (divided by 255 by hardware), that is used in the alpha blending. Refer to LTDC_LxBFCR register.\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ltdc_l1cacr](ltdc_l1cacr) module"] pub type LTDC_L1CACR = crate::Reg<u32, _LTDC_L1CACR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _LTDC_L1CACR; #[doc = "`read()` method returns [ltdc_l1cacr::R](ltdc_l1cacr::R) reader structure"] impl crate::Readable for LTDC_L1CACR {} #[doc = "`write(|w| ..)` method takes [ltdc_l1cacr::W](ltdc_l1cacr::W) writer structure"] impl crate::Writable for LTDC_L1CACR {} #[doc = "This register defines the constant alpha value (divided by 255 by hardware), that is used in the alpha blending. Refer to LTDC_LxBFCR register."] pub mod ltdc_l1cacr; #[doc = "This register defines the default color of a layer in the format ARGB. The default color is used outside the defined layer window or when a layer is disabled. The reset value of 0x00000000 defines a transparent black color.\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ltdc_l1dccr](ltdc_l1dccr) module"] pub type LTDC_L1DCCR = crate::Reg<u32, _LTDC_L1DCCR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _LTDC_L1DCCR; #[doc = "`read()` method returns [ltdc_l1dccr::R](ltdc_l1dccr::R) reader structure"] impl crate::Readable for LTDC_L1DCCR {} #[doc = "`write(|w| ..)` method takes [ltdc_l1dccr::W](ltdc_l1dccr::W) writer structure"] impl crate::Writable for LTDC_L1DCCR {} #[doc = "This register defines the default color of a layer in the format ARGB. The default color is used outside the defined layer window or when a layer is disabled. The reset value of 0x00000000 defines a transparent black color."] pub mod ltdc_l1dccr; #[doc = "This register defines the blending factors F1 and F2. The general blending formula is: BC = BF1 x C + BF2 x Cs BC = blended color BF1 = blend factor 1 C = current layer color BF2 = blend factor 2 Cs = subjacent layers blended color\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ltdc_l1bfcr](ltdc_l1bfcr) module"] pub type LTDC_L1BFCR = crate::Reg<u32, _LTDC_L1BFCR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _LTDC_L1BFCR; #[doc = "`read()` method returns [ltdc_l1bfcr::R](ltdc_l1bfcr::R) reader structure"] impl crate::Readable for LTDC_L1BFCR {} #[doc = "`write(|w| ..)` method takes [ltdc_l1bfcr::W](ltdc_l1bfcr::W) writer structure"] impl crate::Writable for LTDC_L1BFCR {} #[doc = "This register defines the blending factors F1 and F2. The general blending formula is: BC = BF1 x C + BF2 x Cs BC = blended color BF1 = blend factor 1 C = current layer color BF2 = blend factor 2 Cs = subjacent layers blended color"] pub mod ltdc_l1bfcr; #[doc = "This register defines the color frame buffer start address which has to point to the address where the pixel data of the top left pixel of a layer is stored in the frame buffer.\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ltdc_l1cfbar](ltdc_l1cfbar) module"] pub type LTDC_L1CFBAR = crate::Reg<u32, _LTDC_L1CFBAR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _LTDC_L1CFBAR; #[doc = "`read()` method returns [ltdc_l1cfbar::R](ltdc_l1cfbar::R) reader structure"] impl crate::Readable for LTDC_L1CFBAR {} #[doc = "`write(|w| ..)` method takes [ltdc_l1cfbar::W](ltdc_l1cfbar::W) writer structure"] impl crate::Writable for LTDC_L1CFBAR {} #[doc = "This register defines the color frame buffer start address which has to point to the address where the pixel data of the top left pixel of a layer is stored in the frame buffer."] pub mod ltdc_l1cfbar; #[doc = "This register defines the color frame buffer line length and pitch.\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ltdc_l1cfblr](ltdc_l1cfblr) module"] pub type LTDC_L1CFBLR = crate::Reg<u32, _LTDC_L1CFBLR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _LTDC_L1CFBLR; #[doc = "`read()` method returns [ltdc_l1cfblr::R](ltdc_l1cfblr::R) reader structure"] impl crate::Readable for LTDC_L1CFBLR {} #[doc = "`write(|w| ..)` method takes [ltdc_l1cfblr::W](ltdc_l1cfblr::W) writer structure"] impl crate::Writable for LTDC_L1CFBLR {} #[doc = "This register defines the color frame buffer line length and pitch."] pub mod ltdc_l1cfblr; #[doc = "This register defines the number of lines in the color frame buffer.\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ltdc_l1cfblnr](ltdc_l1cfblnr) module"] pub type LTDC_L1CFBLNR = crate::Reg<u32, _LTDC_L1CFBLNR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _LTDC_L1CFBLNR; #[doc = "`read()` method returns [ltdc_l1cfblnr::R](ltdc_l1cfblnr::R) reader structure"] impl crate::Readable for LTDC_L1CFBLNR {} #[doc = "`write(|w| ..)` method takes [ltdc_l1cfblnr::W](ltdc_l1cfblnr::W) writer structure"] impl crate::Writable for LTDC_L1CFBLNR {} #[doc = "This register defines the number of lines in the color frame buffer."] pub mod ltdc_l1cfblnr; #[doc = "This register defines the CLUT address and the RGB value.\n\nThis register you can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ltdc_l1clutwr](ltdc_l1clutwr) module"] pub type LTDC_L1CLUTWR = crate::Reg<u32, _LTDC_L1CLUTWR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _LTDC_L1CLUTWR; #[doc = "`write(|w| ..)` method takes [ltdc_l1clutwr::W](ltdc_l1clutwr::W) writer structure"] impl crate::Writable for LTDC_L1CLUTWR {} #[doc = "This register defines the CLUT address and the RGB value."] pub mod ltdc_l1clutwr; #[doc = "LTDC layer 2 control register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ltdc_l2cr](ltdc_l2cr) module"] pub type LTDC_L2CR = crate::Reg<u32, _LTDC_L2CR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _LTDC_L2CR; #[doc = "`read()` method returns [ltdc_l2cr::R](ltdc_l2cr::R) reader structure"] impl crate::Readable for LTDC_L2CR {} #[doc = "`write(|w| ..)` method takes [ltdc_l2cr::W](ltdc_l2cr::W) writer structure"] impl crate::Writable for LTDC_L2CR {} #[doc = "LTDC layer 2 control register"] pub mod ltdc_l2cr; #[doc = "This register defines the horizontal position (first and last pixel) of the layer 1 or 2 window. The first visible pixel of a line is the programmed value of AHBP\\[11:0\\] bits + 1 in the LTDC_BPCR register. The last visible pixel of a line is the programmed value of AAW\\[10:0\\] bits in the LTDC_AWCR register.\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ltdc_l2whpcr](ltdc_l2whpcr) module"] pub type LTDC_L2WHPCR = crate::Reg<u32, _LTDC_L2WHPCR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _LTDC_L2WHPCR; #[doc = "`read()` method returns [ltdc_l2whpcr::R](ltdc_l2whpcr::R) reader structure"] impl crate::Readable for LTDC_L2WHPCR {} #[doc = "`write(|w| ..)` method takes [ltdc_l2whpcr::W](ltdc_l2whpcr::W) writer structure"] impl crate::Writable for LTDC_L2WHPCR {} #[doc = "This register defines the horizontal position (first and last pixel) of the layer 1 or 2 window. The first visible pixel of a line is the programmed value of AHBP\\[11:0\\] bits + 1 in the LTDC_BPCR register. The last visible pixel of a line is the programmed value of AAW\\[10:0\\] bits in the LTDC_AWCR register."] pub mod ltdc_l2whpcr; #[doc = "This register defines the vertical position (first and last line) of the layer1 or 2 window. The first visible line of a frame is the programmed value of AVBP\\[11:0\\] bits + 1 in the register LTDC_BPCR register. The last visible line of a frame is the programmed value of AAH\\[11:0\\] bits in the LTDC_AWCR register.\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ltdc_l2wvpcr](ltdc_l2wvpcr) module"] pub type LTDC_L2WVPCR = crate::Reg<u32, _LTDC_L2WVPCR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _LTDC_L2WVPCR; #[doc = "`read()` method returns [ltdc_l2wvpcr::R](ltdc_l2wvpcr::R) reader structure"] impl crate::Readable for LTDC_L2WVPCR {} #[doc = "`write(|w| ..)` method takes [ltdc_l2wvpcr::W](ltdc_l2wvpcr::W) writer structure"] impl crate::Writable for LTDC_L2WVPCR {} #[doc = "This register defines the vertical position (first and last line) of the layer1 or 2 window. The first visible line of a frame is the programmed value of AVBP\\[11:0\\] bits + 1 in the register LTDC_BPCR register. The last visible line of a frame is the programmed value of AAH\\[11:0\\] bits in the LTDC_AWCR register."] pub mod ltdc_l2wvpcr; #[doc = "This register defines the color key value (RGB), that is used by the color keying.\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ltdc_l2ckcr](ltdc_l2ckcr) module"] pub type LTDC_L2CKCR = crate::Reg<u32, _LTDC_L2CKCR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _LTDC_L2CKCR; #[doc = "`read()` method returns [ltdc_l2ckcr::R](ltdc_l2ckcr::R) reader structure"] impl crate::Readable for LTDC_L2CKCR {} #[doc = "`write(|w| ..)` method takes [ltdc_l2ckcr::W](ltdc_l2ckcr::W) writer structure"] impl crate::Writable for LTDC_L2CKCR {} #[doc = "This register defines the color key value (RGB), that is used by the color keying."] pub mod ltdc_l2ckcr; #[doc = "This register defines the pixel format that is used for the stored data in the frame buffer of a layer. The pixel data is read from the frame buffer and then transformed to the internal format 8888 (ARGB).\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ltdc_l2pfcr](ltdc_l2pfcr) module"] pub type LTDC_L2PFCR = crate::Reg<u32, _LTDC_L2PFCR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _LTDC_L2PFCR; #[doc = "`read()` method returns [ltdc_l2pfcr::R](ltdc_l2pfcr::R) reader structure"] impl crate::Readable for LTDC_L2PFCR {} #[doc = "`write(|w| ..)` method takes [ltdc_l2pfcr::W](ltdc_l2pfcr::W) writer structure"] impl crate::Writable for LTDC_L2PFCR {} #[doc = "This register defines the pixel format that is used for the stored data in the frame buffer of a layer. The pixel data is read from the frame buffer and then transformed to the internal format 8888 (ARGB)."] pub mod ltdc_l2pfcr; #[doc = "This register defines the constant alpha value (divided by 255 by hardware), that is used in the alpha blending. Refer to LTDC_LxBFCR register.\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ltdc_l2cacr](ltdc_l2cacr) module"] pub type LTDC_L2CACR = crate::Reg<u32, _LTDC_L2CACR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _LTDC_L2CACR; #[doc = "`read()` method returns [ltdc_l2cacr::R](ltdc_l2cacr::R) reader structure"] impl crate::Readable for LTDC_L2CACR {} #[doc = "`write(|w| ..)` method takes [ltdc_l2cacr::W](ltdc_l2cacr::W) writer structure"] impl crate::Writable for LTDC_L2CACR {} #[doc = "This register defines the constant alpha value (divided by 255 by hardware), that is used in the alpha blending. Refer to LTDC_LxBFCR register."] pub mod ltdc_l2cacr; #[doc = "This register defines the default color of a layer in the format ARGB. The default color is used outside the defined layer window or when a layer is disabled. The reset value of 0x00000000 defines a transparent black color.\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ltdc_l2dccr](ltdc_l2dccr) module"] pub type LTDC_L2DCCR = crate::Reg<u32, _LTDC_L2DCCR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _LTDC_L2DCCR; #[doc = "`read()` method returns [ltdc_l2dccr::R](ltdc_l2dccr::R) reader structure"] impl crate::Readable for LTDC_L2DCCR {} #[doc = "`write(|w| ..)` method takes [ltdc_l2dccr::W](ltdc_l2dccr::W) writer structure"] impl crate::Writable for LTDC_L2DCCR {} #[doc = "This register defines the default color of a layer in the format ARGB. The default color is used outside the defined layer window or when a layer is disabled. The reset value of 0x00000000 defines a transparent black color."] pub mod ltdc_l2dccr; #[doc = "This register defines the blending factors F1 and F2. The general blending formula is: BC = BF1 x C + BF2 x Cs BC = blended color BF1 = blend factor 1 C = current layer color BF2 = blend factor 2 Cs = subjacent layers blended color\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ltdc_l2bfcr](ltdc_l2bfcr) module"] pub type LTDC_L2BFCR = crate::Reg<u32, _LTDC_L2BFCR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _LTDC_L2BFCR; #[doc = "`read()` method returns [ltdc_l2bfcr::R](ltdc_l2bfcr::R) reader structure"] impl crate::Readable for LTDC_L2BFCR {} #[doc = "`write(|w| ..)` method takes [ltdc_l2bfcr::W](ltdc_l2bfcr::W) writer structure"] impl crate::Writable for LTDC_L2BFCR {} #[doc = "This register defines the blending factors F1 and F2. The general blending formula is: BC = BF1 x C + BF2 x Cs BC = blended color BF1 = blend factor 1 C = current layer color BF2 = blend factor 2 Cs = subjacent layers blended color"] pub mod ltdc_l2bfcr; #[doc = "This register defines the color frame buffer start address which has to point to the address where the pixel data of the top left pixel of a layer is stored in the frame buffer.\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ltdc_l2cfbar](ltdc_l2cfbar) module"] pub type LTDC_L2CFBAR = crate::Reg<u32, _LTDC_L2CFBAR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _LTDC_L2CFBAR; #[doc = "`read()` method returns [ltdc_l2cfbar::R](ltdc_l2cfbar::R) reader structure"] impl crate::Readable for LTDC_L2CFBAR {} #[doc = "`write(|w| ..)` method takes [ltdc_l2cfbar::W](ltdc_l2cfbar::W) writer structure"] impl crate::Writable for LTDC_L2CFBAR {} #[doc = "This register defines the color frame buffer start address which has to point to the address where the pixel data of the top left pixel of a layer is stored in the frame buffer."] pub mod ltdc_l2cfbar; #[doc = "This register defines the color frame buffer line length and pitch.\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ltdc_l2cfblr](ltdc_l2cfblr) module"] pub type LTDC_L2CFBLR = crate::Reg<u32, _LTDC_L2CFBLR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _LTDC_L2CFBLR; #[doc = "`read()` method returns [ltdc_l2cfblr::R](ltdc_l2cfblr::R) reader structure"] impl crate::Readable for LTDC_L2CFBLR {} #[doc = "`write(|w| ..)` method takes [ltdc_l2cfblr::W](ltdc_l2cfblr::W) writer structure"] impl crate::Writable for LTDC_L2CFBLR {} #[doc = "This register defines the color frame buffer line length and pitch."] pub mod ltdc_l2cfblr; #[doc = "This register defines the number of lines in the color frame buffer.\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ltdc_l2cfblnr](ltdc_l2cfblnr) module"] pub type LTDC_L2CFBLNR = crate::Reg<u32, _LTDC_L2CFBLNR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _LTDC_L2CFBLNR; #[doc = "`read()` method returns [ltdc_l2cfblnr::R](ltdc_l2cfblnr::R) reader structure"] impl crate::Readable for LTDC_L2CFBLNR {} #[doc = "`write(|w| ..)` method takes [ltdc_l2cfblnr::W](ltdc_l2cfblnr::W) writer structure"] impl crate::Writable for LTDC_L2CFBLNR {} #[doc = "This register defines the number of lines in the color frame buffer."] pub mod ltdc_l2cfblnr; #[doc = "This register defines the CLUT address and the RGB value.\n\nThis register you can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ltdc_l2clutwr](ltdc_l2clutwr) module"] pub type LTDC_L2CLUTWR = crate::Reg<u32, _LTDC_L2CLUTWR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _LTDC_L2CLUTWR; #[doc = "`write(|w| ..)` method takes [ltdc_l2clutwr::W](ltdc_l2clutwr::W) writer structure"] impl crate::Writable for LTDC_L2CLUTWR {} #[doc = "This register defines the CLUT address and the RGB value."] pub mod ltdc_l2clutwr;
#[doc = "Reader of register DFSDM_FLT2CR1"] pub type R = crate::R<u32, super::DFSDM_FLT2CR1>; #[doc = "Writer for register DFSDM_FLT2CR1"] pub type W = crate::W<u32, super::DFSDM_FLT2CR1>; #[doc = "Register DFSDM_FLT2CR1 `reset()`'s with value 0"] impl crate::ResetValue for super::DFSDM_FLT2CR1 { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "DFEN\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum DFEN_A { #[doc = "0: DFSDM_FLTx is disabled. All\r\n conversions of given DFSDM_FLTx are stopped\r\n immediately and all DFSDM_FLTx functions are\r\n stopped."] B_0X0 = 0, #[doc = "1: DFSDM_FLTx is enabled. If DFSDM_FLTx\r\n is enabled, then DFSDM_FLTx starts operating\r\n according to its setting."] B_0X1 = 1, } impl From<DFEN_A> for bool { #[inline(always)] fn from(variant: DFEN_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `DFEN`"] pub type DFEN_R = crate::R<bool, DFEN_A>; impl DFEN_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> DFEN_A { match self.bits { false => DFEN_A::B_0X0, true => DFEN_A::B_0X1, } } #[doc = "Checks if the value of the field is `B_0X0`"] #[inline(always)] pub fn is_b_0x0(&self) -> bool { *self == DFEN_A::B_0X0 } #[doc = "Checks if the value of the field is `B_0X1`"] #[inline(always)] pub fn is_b_0x1(&self) -> bool { *self == DFEN_A::B_0X1 } } #[doc = "Write proxy for field `DFEN`"] pub struct DFEN_W<'a> { w: &'a mut W, } impl<'a> DFEN_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: DFEN_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "DFSDM_FLTx is disabled. All conversions of given DFSDM_FLTx are stopped immediately and all DFSDM_FLTx functions are stopped."] #[inline(always)] pub fn b_0x0(self) -> &'a mut W { self.variant(DFEN_A::B_0X0) } #[doc = "DFSDM_FLTx is enabled. If DFSDM_FLTx is enabled, then DFSDM_FLTx starts operating according to its setting."] #[inline(always)] pub fn b_0x1(self) -> &'a mut W { self.variant(DFEN_A::B_0X1) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } #[doc = "JSWSTART\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum JSWSTART_AW { #[doc = "0: Writing 0 has no\r\n effect."] B_0X0 = 0, #[doc = "1: Writing 1 makes a request to convert\r\n the channels in the injected conversion group,\r\n causing JCIP to become 1 at the same time. If\r\n JCIP=1 already, then writing to JSWSTART has no\r\n effect. Writing 1 has no effect if\r\n JSYNC=1."] B_0X1 = 1, } impl From<JSWSTART_AW> for bool { #[inline(always)] fn from(variant: JSWSTART_AW) -> Self { variant as u8 != 0 } } #[doc = "Write proxy for field `JSWSTART`"] pub struct JSWSTART_W<'a> { w: &'a mut W, } impl<'a> JSWSTART_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: JSWSTART_AW) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "Writing 0 has no effect."] #[inline(always)] pub fn b_0x0(self) -> &'a mut W { self.variant(JSWSTART_AW::B_0X0) } #[doc = "Writing 1 makes a request to convert the channels in the injected conversion group, causing JCIP to become 1 at the same time. If JCIP=1 already, then writing to JSWSTART has no effect. Writing 1 has no effect if JSYNC=1."] #[inline(always)] pub fn b_0x1(self) -> &'a mut W { self.variant(JSWSTART_AW::B_0X1) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1); self.w } } #[doc = "JSYNC\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum JSYNC_A { #[doc = "0: Do not launch an injected conversion\r\n synchronously with DFSDM_FLT0"] B_0X0 = 0, #[doc = "1: Launch an injected conversion in\r\n this DFSDM_FLTx at the very moment when an\r\n injected conversion is launched in DFSDM_FLT0 by\r\n its JSWSTART trigger"] B_0X1 = 1, } impl From<JSYNC_A> for bool { #[inline(always)] fn from(variant: JSYNC_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `JSYNC`"] pub type JSYNC_R = crate::R<bool, JSYNC_A>; impl JSYNC_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> JSYNC_A { match self.bits { false => JSYNC_A::B_0X0, true => JSYNC_A::B_0X1, } } #[doc = "Checks if the value of the field is `B_0X0`"] #[inline(always)] pub fn is_b_0x0(&self) -> bool { *self == JSYNC_A::B_0X0 } #[doc = "Checks if the value of the field is `B_0X1`"] #[inline(always)] pub fn is_b_0x1(&self) -> bool { *self == JSYNC_A::B_0X1 } } #[doc = "Write proxy for field `JSYNC`"] pub struct JSYNC_W<'a> { w: &'a mut W, } impl<'a> JSYNC_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: JSYNC_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "Do not launch an injected conversion synchronously with DFSDM_FLT0"] #[inline(always)] pub fn b_0x0(self) -> &'a mut W { self.variant(JSYNC_A::B_0X0) } #[doc = "Launch an injected conversion in this DFSDM_FLTx at the very moment when an injected conversion is launched in DFSDM_FLT0 by its JSWSTART trigger"] #[inline(always)] pub fn b_0x1(self) -> &'a mut W { self.variant(JSYNC_A::B_0X1) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3); self.w } } #[doc = "JSCAN\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum JSCAN_A { #[doc = "0: One channel conversion is performed\r\n from the injected channel group and next the\r\n selected channel from this group is\r\n selected."] B_0X0 = 0, #[doc = "1: The series of conversions for the\r\n injected group channels is executed, starting\r\n over with the lowest selected\r\n channel."] B_0X1 = 1, } impl From<JSCAN_A> for bool { #[inline(always)] fn from(variant: JSCAN_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `JSCAN`"] pub type JSCAN_R = crate::R<bool, JSCAN_A>; impl JSCAN_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> JSCAN_A { match self.bits { false => JSCAN_A::B_0X0, true => JSCAN_A::B_0X1, } } #[doc = "Checks if the value of the field is `B_0X0`"] #[inline(always)] pub fn is_b_0x0(&self) -> bool { *self == JSCAN_A::B_0X0 } #[doc = "Checks if the value of the field is `B_0X1`"] #[inline(always)] pub fn is_b_0x1(&self) -> bool { *self == JSCAN_A::B_0X1 } } #[doc = "Write proxy for field `JSCAN`"] pub struct JSCAN_W<'a> { w: &'a mut W, } impl<'a> JSCAN_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: JSCAN_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "One channel conversion is performed from the injected channel group and next the selected channel from this group is selected."] #[inline(always)] pub fn b_0x0(self) -> &'a mut W { self.variant(JSCAN_A::B_0X0) } #[doc = "The series of conversions for the injected group channels is executed, starting over with the lowest selected channel."] #[inline(always)] pub fn b_0x1(self) -> &'a mut W { self.variant(JSCAN_A::B_0X1) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4); self.w } } #[doc = "JDMAEN\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum JDMAEN_A { #[doc = "0: The DMA channel is not enabled to\r\n read injected data"] B_0X0 = 0, #[doc = "1: The DMA channel is enabled to read\r\n injected data"] B_0X1 = 1, } impl From<JDMAEN_A> for bool { #[inline(always)] fn from(variant: JDMAEN_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `JDMAEN`"] pub type JDMAEN_R = crate::R<bool, JDMAEN_A>; impl JDMAEN_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> JDMAEN_A { match self.bits { false => JDMAEN_A::B_0X0, true => JDMAEN_A::B_0X1, } } #[doc = "Checks if the value of the field is `B_0X0`"] #[inline(always)] pub fn is_b_0x0(&self) -> bool { *self == JDMAEN_A::B_0X0 } #[doc = "Checks if the value of the field is `B_0X1`"] #[inline(always)] pub fn is_b_0x1(&self) -> bool { *self == JDMAEN_A::B_0X1 } } #[doc = "Write proxy for field `JDMAEN`"] pub struct JDMAEN_W<'a> { w: &'a mut W, } impl<'a> JDMAEN_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: JDMAEN_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "The DMA channel is not enabled to read injected data"] #[inline(always)] pub fn b_0x0(self) -> &'a mut W { self.variant(JDMAEN_A::B_0X0) } #[doc = "The DMA channel is enabled to read injected data"] #[inline(always)] pub fn b_0x1(self) -> &'a mut W { self.variant(JDMAEN_A::B_0X1) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5); self.w } } #[doc = "Reader of field `JEXTSEL`"] pub type JEXTSEL_R = crate::R<u8, u8>; #[doc = "Write proxy for field `JEXTSEL`"] pub struct JEXTSEL_W<'a> { w: &'a mut W, } impl<'a> JEXTSEL_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x07 << 8)) | (((value as u32) & 0x07) << 8); self.w } } #[doc = "JEXTEN\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] #[repr(u8)] pub enum JEXTEN_A { #[doc = "0: Trigger detection is\r\n disabled"] B_0X0 = 0, #[doc = "1: Each rising edge on the selected\r\n trigger makes a request to launch an injected\r\n conversion"] B_0X1 = 1, #[doc = "2: Each falling edge on the selected\r\n trigger makes a request to launch an injected\r\n conversion"] B_0X2 = 2, #[doc = "3: Both rising edges and falling edges\r\n on the selected trigger make requests to launch\r\n injected conversions"] B_0X3 = 3, } impl From<JEXTEN_A> for u8 { #[inline(always)] fn from(variant: JEXTEN_A) -> Self { variant as _ } } #[doc = "Reader of field `JEXTEN`"] pub type JEXTEN_R = crate::R<u8, JEXTEN_A>; impl JEXTEN_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> JEXTEN_A { match self.bits { 0 => JEXTEN_A::B_0X0, 1 => JEXTEN_A::B_0X1, 2 => JEXTEN_A::B_0X2, 3 => JEXTEN_A::B_0X3, _ => unreachable!(), } } #[doc = "Checks if the value of the field is `B_0X0`"] #[inline(always)] pub fn is_b_0x0(&self) -> bool { *self == JEXTEN_A::B_0X0 } #[doc = "Checks if the value of the field is `B_0X1`"] #[inline(always)] pub fn is_b_0x1(&self) -> bool { *self == JEXTEN_A::B_0X1 } #[doc = "Checks if the value of the field is `B_0X2`"] #[inline(always)] pub fn is_b_0x2(&self) -> bool { *self == JEXTEN_A::B_0X2 } #[doc = "Checks if the value of the field is `B_0X3`"] #[inline(always)] pub fn is_b_0x3(&self) -> bool { *self == JEXTEN_A::B_0X3 } } #[doc = "Write proxy for field `JEXTEN`"] pub struct JEXTEN_W<'a> { w: &'a mut W, } impl<'a> JEXTEN_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: JEXTEN_A) -> &'a mut W { { self.bits(variant.into()) } } #[doc = "Trigger detection is disabled"] #[inline(always)] pub fn b_0x0(self) -> &'a mut W { self.variant(JEXTEN_A::B_0X0) } #[doc = "Each rising edge on the selected trigger makes a request to launch an injected conversion"] #[inline(always)] pub fn b_0x1(self) -> &'a mut W { self.variant(JEXTEN_A::B_0X1) } #[doc = "Each falling edge on the selected trigger makes a request to launch an injected conversion"] #[inline(always)] pub fn b_0x2(self) -> &'a mut W { self.variant(JEXTEN_A::B_0X2) } #[doc = "Both rising edges and falling edges on the selected trigger make requests to launch injected conversions"] #[inline(always)] pub fn b_0x3(self) -> &'a mut W { self.variant(JEXTEN_A::B_0X3) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 13)) | (((value as u32) & 0x03) << 13); self.w } } #[doc = "RSWSTART\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum RSWSTART_AW { #[doc = "0: Writing 0 has no\r\n effect"] B_0X0 = 0, #[doc = "1: Writing 1 makes a request to start a\r\n conversion on the regular channel and causes RCIP\r\n to become 1 . If RCIP=1 already, writing to\r\n RSWSTART has no effect. Writing 1 has no effect\r\n if RSYNC=1."] B_0X1 = 1, } impl From<RSWSTART_AW> for bool { #[inline(always)] fn from(variant: RSWSTART_AW) -> Self { variant as u8 != 0 } } #[doc = "Write proxy for field `RSWSTART`"] pub struct RSWSTART_W<'a> { w: &'a mut W, } impl<'a> RSWSTART_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: RSWSTART_AW) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "Writing 0 has no effect"] #[inline(always)] pub fn b_0x0(self) -> &'a mut W { self.variant(RSWSTART_AW::B_0X0) } #[doc = "Writing 1 makes a request to start a conversion on the regular channel and causes RCIP to become 1 . If RCIP=1 already, writing to RSWSTART has no effect. Writing 1 has no effect if RSYNC=1."] #[inline(always)] pub fn b_0x1(self) -> &'a mut W { self.variant(RSWSTART_AW::B_0X1) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 17)) | (((value as u32) & 0x01) << 17); self.w } } #[doc = "RCONT\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum RCONT_A { #[doc = "0: The regular channel is converted\r\n just once for each conversion\r\n request"] B_0X0 = 0, #[doc = "1: The regular channel is converted\r\n repeatedly after each conversion\r\n request"] B_0X1 = 1, } impl From<RCONT_A> for bool { #[inline(always)] fn from(variant: RCONT_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `RCONT`"] pub type RCONT_R = crate::R<bool, RCONT_A>; impl RCONT_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> RCONT_A { match self.bits { false => RCONT_A::B_0X0, true => RCONT_A::B_0X1, } } #[doc = "Checks if the value of the field is `B_0X0`"] #[inline(always)] pub fn is_b_0x0(&self) -> bool { *self == RCONT_A::B_0X0 } #[doc = "Checks if the value of the field is `B_0X1`"] #[inline(always)] pub fn is_b_0x1(&self) -> bool { *self == RCONT_A::B_0X1 } } #[doc = "Write proxy for field `RCONT`"] pub struct RCONT_W<'a> { w: &'a mut W, } impl<'a> RCONT_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: RCONT_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "The regular channel is converted just once for each conversion request"] #[inline(always)] pub fn b_0x0(self) -> &'a mut W { self.variant(RCONT_A::B_0X0) } #[doc = "The regular channel is converted repeatedly after each conversion request"] #[inline(always)] pub fn b_0x1(self) -> &'a mut W { self.variant(RCONT_A::B_0X1) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 18)) | (((value as u32) & 0x01) << 18); self.w } } #[doc = "RSYNC\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum RSYNC_A { #[doc = "0: Do not launch a regular conversion\r\n synchronously with DFSDM_FLT0"] B_0X0 = 0, #[doc = "1: Launch a regular conversion in this\r\n DFSDM_FLTx at the very moment when a regular\r\n conversion is launched in\r\n DFSDM_FLT0"] B_0X1 = 1, } impl From<RSYNC_A> for bool { #[inline(always)] fn from(variant: RSYNC_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `RSYNC`"] pub type RSYNC_R = crate::R<bool, RSYNC_A>; impl RSYNC_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> RSYNC_A { match self.bits { false => RSYNC_A::B_0X0, true => RSYNC_A::B_0X1, } } #[doc = "Checks if the value of the field is `B_0X0`"] #[inline(always)] pub fn is_b_0x0(&self) -> bool { *self == RSYNC_A::B_0X0 } #[doc = "Checks if the value of the field is `B_0X1`"] #[inline(always)] pub fn is_b_0x1(&self) -> bool { *self == RSYNC_A::B_0X1 } } #[doc = "Write proxy for field `RSYNC`"] pub struct RSYNC_W<'a> { w: &'a mut W, } impl<'a> RSYNC_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: RSYNC_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "Do not launch a regular conversion synchronously with DFSDM_FLT0"] #[inline(always)] pub fn b_0x0(self) -> &'a mut W { self.variant(RSYNC_A::B_0X0) } #[doc = "Launch a regular conversion in this DFSDM_FLTx at the very moment when a regular conversion is launched in DFSDM_FLT0"] #[inline(always)] pub fn b_0x1(self) -> &'a mut W { self.variant(RSYNC_A::B_0X1) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 19)) | (((value as u32) & 0x01) << 19); self.w } } #[doc = "RDMAEN\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum RDMAEN_A { #[doc = "0: The DMA channel is not enabled to\r\n read regular data"] B_0X0 = 0, #[doc = "1: The DMA channel is enabled to read\r\n regular data"] B_0X1 = 1, } impl From<RDMAEN_A> for bool { #[inline(always)] fn from(variant: RDMAEN_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `RDMAEN`"] pub type RDMAEN_R = crate::R<bool, RDMAEN_A>; impl RDMAEN_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> RDMAEN_A { match self.bits { false => RDMAEN_A::B_0X0, true => RDMAEN_A::B_0X1, } } #[doc = "Checks if the value of the field is `B_0X0`"] #[inline(always)] pub fn is_b_0x0(&self) -> bool { *self == RDMAEN_A::B_0X0 } #[doc = "Checks if the value of the field is `B_0X1`"] #[inline(always)] pub fn is_b_0x1(&self) -> bool { *self == RDMAEN_A::B_0X1 } } #[doc = "Write proxy for field `RDMAEN`"] pub struct RDMAEN_W<'a> { w: &'a mut W, } impl<'a> RDMAEN_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: RDMAEN_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "The DMA channel is not enabled to read regular data"] #[inline(always)] pub fn b_0x0(self) -> &'a mut W { self.variant(RDMAEN_A::B_0X0) } #[doc = "The DMA channel is enabled to read regular data"] #[inline(always)] pub fn b_0x1(self) -> &'a mut W { self.variant(RDMAEN_A::B_0X1) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 21)) | (((value as u32) & 0x01) << 21); self.w } } #[doc = "RCH\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] #[repr(u8)] pub enum RCH_A { #[doc = "0: Channel 0 is selected as the regular\r\n channel"] B_0X0 = 0, #[doc = "1: Channel 1 is selected as the regular\r\n channel"] B_0X1 = 1, #[doc = "7: Channel 7 is selected as the regular\r\n channel"] B_0X7 = 7, } impl From<RCH_A> for u8 { #[inline(always)] fn from(variant: RCH_A) -> Self { variant as _ } } #[doc = "Reader of field `RCH`"] pub type RCH_R = crate::R<u8, RCH_A>; impl RCH_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> crate::Variant<u8, RCH_A> { use crate::Variant::*; match self.bits { 0 => Val(RCH_A::B_0X0), 1 => Val(RCH_A::B_0X1), 7 => Val(RCH_A::B_0X7), i => Res(i), } } #[doc = "Checks if the value of the field is `B_0X0`"] #[inline(always)] pub fn is_b_0x0(&self) -> bool { *self == RCH_A::B_0X0 } #[doc = "Checks if the value of the field is `B_0X1`"] #[inline(always)] pub fn is_b_0x1(&self) -> bool { *self == RCH_A::B_0X1 } #[doc = "Checks if the value of the field is `B_0X7`"] #[inline(always)] pub fn is_b_0x7(&self) -> bool { *self == RCH_A::B_0X7 } } #[doc = "Write proxy for field `RCH`"] pub struct RCH_W<'a> { w: &'a mut W, } impl<'a> RCH_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: RCH_A) -> &'a mut W { unsafe { self.bits(variant.into()) } } #[doc = "Channel 0 is selected as the regular channel"] #[inline(always)] pub fn b_0x0(self) -> &'a mut W { self.variant(RCH_A::B_0X0) } #[doc = "Channel 1 is selected as the regular channel"] #[inline(always)] pub fn b_0x1(self) -> &'a mut W { self.variant(RCH_A::B_0X1) } #[doc = "Channel 7 is selected as the regular channel"] #[inline(always)] pub fn b_0x7(self) -> &'a mut W { self.variant(RCH_A::B_0X7) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x07 << 24)) | (((value as u32) & 0x07) << 24); self.w } } #[doc = "FAST\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum FAST_A { #[doc = "0: Fast conversion mode\r\n disabled"] B_0X0 = 0, #[doc = "1: Fast conversion mode\r\n enabled"] B_0X1 = 1, } impl From<FAST_A> for bool { #[inline(always)] fn from(variant: FAST_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `FAST`"] pub type FAST_R = crate::R<bool, FAST_A>; impl FAST_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> FAST_A { match self.bits { false => FAST_A::B_0X0, true => FAST_A::B_0X1, } } #[doc = "Checks if the value of the field is `B_0X0`"] #[inline(always)] pub fn is_b_0x0(&self) -> bool { *self == FAST_A::B_0X0 } #[doc = "Checks if the value of the field is `B_0X1`"] #[inline(always)] pub fn is_b_0x1(&self) -> bool { *self == FAST_A::B_0X1 } } #[doc = "Write proxy for field `FAST`"] pub struct FAST_W<'a> { w: &'a mut W, } impl<'a> FAST_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: FAST_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "Fast conversion mode disabled"] #[inline(always)] pub fn b_0x0(self) -> &'a mut W { self.variant(FAST_A::B_0X0) } #[doc = "Fast conversion mode enabled"] #[inline(always)] pub fn b_0x1(self) -> &'a mut W { self.variant(FAST_A::B_0X1) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 29)) | (((value as u32) & 0x01) << 29); self.w } } #[doc = "AWFSEL\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum AWFSEL_A { #[doc = "0: Analog watchdog on data output value\r\n (after the digital filter). The comparison is\r\n done after offset correction and\r\n shift"] B_0X0 = 0, #[doc = "1: Analog watchdog on channel\r\n transceivers value (after watchdog\r\n filter)"] B_0X1 = 1, } impl From<AWFSEL_A> for bool { #[inline(always)] fn from(variant: AWFSEL_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `AWFSEL`"] pub type AWFSEL_R = crate::R<bool, AWFSEL_A>; impl AWFSEL_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> AWFSEL_A { match self.bits { false => AWFSEL_A::B_0X0, true => AWFSEL_A::B_0X1, } } #[doc = "Checks if the value of the field is `B_0X0`"] #[inline(always)] pub fn is_b_0x0(&self) -> bool { *self == AWFSEL_A::B_0X0 } #[doc = "Checks if the value of the field is `B_0X1`"] #[inline(always)] pub fn is_b_0x1(&self) -> bool { *self == AWFSEL_A::B_0X1 } } #[doc = "Write proxy for field `AWFSEL`"] pub struct AWFSEL_W<'a> { w: &'a mut W, } impl<'a> AWFSEL_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: AWFSEL_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "Analog watchdog on data output value (after the digital filter). The comparison is done after offset correction and shift"] #[inline(always)] pub fn b_0x0(self) -> &'a mut W { self.variant(AWFSEL_A::B_0X0) } #[doc = "Analog watchdog on channel transceivers value (after watchdog filter)"] #[inline(always)] pub fn b_0x1(self) -> &'a mut W { self.variant(AWFSEL_A::B_0X1) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 30)) | (((value as u32) & 0x01) << 30); self.w } } impl R { #[doc = "Bit 0 - DFEN"] #[inline(always)] pub fn dfen(&self) -> DFEN_R { DFEN_R::new((self.bits & 0x01) != 0) } #[doc = "Bit 3 - JSYNC"] #[inline(always)] pub fn jsync(&self) -> JSYNC_R { JSYNC_R::new(((self.bits >> 3) & 0x01) != 0) } #[doc = "Bit 4 - JSCAN"] #[inline(always)] pub fn jscan(&self) -> JSCAN_R { JSCAN_R::new(((self.bits >> 4) & 0x01) != 0) } #[doc = "Bit 5 - JDMAEN"] #[inline(always)] pub fn jdmaen(&self) -> JDMAEN_R { JDMAEN_R::new(((self.bits >> 5) & 0x01) != 0) } #[doc = "Bits 8:10 - JEXTSEL"] #[inline(always)] pub fn jextsel(&self) -> JEXTSEL_R { JEXTSEL_R::new(((self.bits >> 8) & 0x07) as u8) } #[doc = "Bits 13:14 - JEXTEN"] #[inline(always)] pub fn jexten(&self) -> JEXTEN_R { JEXTEN_R::new(((self.bits >> 13) & 0x03) as u8) } #[doc = "Bit 18 - RCONT"] #[inline(always)] pub fn rcont(&self) -> RCONT_R { RCONT_R::new(((self.bits >> 18) & 0x01) != 0) } #[doc = "Bit 19 - RSYNC"] #[inline(always)] pub fn rsync(&self) -> RSYNC_R { RSYNC_R::new(((self.bits >> 19) & 0x01) != 0) } #[doc = "Bit 21 - RDMAEN"] #[inline(always)] pub fn rdmaen(&self) -> RDMAEN_R { RDMAEN_R::new(((self.bits >> 21) & 0x01) != 0) } #[doc = "Bits 24:26 - RCH"] #[inline(always)] pub fn rch(&self) -> RCH_R { RCH_R::new(((self.bits >> 24) & 0x07) as u8) } #[doc = "Bit 29 - FAST"] #[inline(always)] pub fn fast(&self) -> FAST_R { FAST_R::new(((self.bits >> 29) & 0x01) != 0) } #[doc = "Bit 30 - AWFSEL"] #[inline(always)] pub fn awfsel(&self) -> AWFSEL_R { AWFSEL_R::new(((self.bits >> 30) & 0x01) != 0) } } impl W { #[doc = "Bit 0 - DFEN"] #[inline(always)] pub fn dfen(&mut self) -> DFEN_W { DFEN_W { w: self } } #[doc = "Bit 1 - JSWSTART"] #[inline(always)] pub fn jswstart(&mut self) -> JSWSTART_W { JSWSTART_W { w: self } } #[doc = "Bit 3 - JSYNC"] #[inline(always)] pub fn jsync(&mut self) -> JSYNC_W { JSYNC_W { w: self } } #[doc = "Bit 4 - JSCAN"] #[inline(always)] pub fn jscan(&mut self) -> JSCAN_W { JSCAN_W { w: self } } #[doc = "Bit 5 - JDMAEN"] #[inline(always)] pub fn jdmaen(&mut self) -> JDMAEN_W { JDMAEN_W { w: self } } #[doc = "Bits 8:10 - JEXTSEL"] #[inline(always)] pub fn jextsel(&mut self) -> JEXTSEL_W { JEXTSEL_W { w: self } } #[doc = "Bits 13:14 - JEXTEN"] #[inline(always)] pub fn jexten(&mut self) -> JEXTEN_W { JEXTEN_W { w: self } } #[doc = "Bit 17 - RSWSTART"] #[inline(always)] pub fn rswstart(&mut self) -> RSWSTART_W { RSWSTART_W { w: self } } #[doc = "Bit 18 - RCONT"] #[inline(always)] pub fn rcont(&mut self) -> RCONT_W { RCONT_W { w: self } } #[doc = "Bit 19 - RSYNC"] #[inline(always)] pub fn rsync(&mut self) -> RSYNC_W { RSYNC_W { w: self } } #[doc = "Bit 21 - RDMAEN"] #[inline(always)] pub fn rdmaen(&mut self) -> RDMAEN_W { RDMAEN_W { w: self } } #[doc = "Bits 24:26 - RCH"] #[inline(always)] pub fn rch(&mut self) -> RCH_W { RCH_W { w: self } } #[doc = "Bit 29 - FAST"] #[inline(always)] pub fn fast(&mut self) -> FAST_W { FAST_W { w: self } } #[doc = "Bit 30 - AWFSEL"] #[inline(always)] pub fn awfsel(&mut self) -> AWFSEL_W { AWFSEL_W { w: self } } }
mod evenger; mod evdev; mod foreign; mod muxer; use evenger::Evenger; fn main() { let mut app = Evenger::new() .expect("app init failed"); app.open_device("mouse", "/dev/input/event2") .expect("can't open mouse"); app.open_device("keyboard", "/dev/input/event4") .expect("can't open keyboard"); app.run() .expect("error during runtime"); }
extern crate pancurses; use std::env; use std::fs; use std::collections::{HashMap, VecDeque}; use std::thread; use std::time; use pancurses::{Window, Input}; const TILE_WALL: u8 = 1; const TILE_BLOCK: u8 = 2; const TILE_PADDLE: u8 = 3; const TILE_BALL: u8 = 4; const MODE_POS: i64 = 0; const MODE_IMM: i64 = 1; const MODE_REL: i64 = 2; const OP_HALT: i64 = 99; const OP_ADD: i64 = 1; const OP_MULTIPLY: i64 = 2; const OP_INPUT: i64 = 3; const OP_OUTPUT: i64 = 4; const OP_JUMP_IF_TRUE: i64 = 5; const OP_JUMP_IF_FALSE: i64 = 6; const OP_LESS_THAN: i64 = 7; const OP_EQUALS: i64 = 8; const OP_REL_BASE_OFFSET: i64 = 9; const EXIT_ERROR: i64 = -1; const EXIT_HALT: i64 = 0; const EXIT_NEED_INPUT: i64 = 1; const EXIT_OUTPUT: i64 = 2; struct Program { code: Vec<i64>, ip: i64, memory: HashMap::<i64, i64>, rel_base: i64, data: VecDeque<i64> } impl Program { pub fn new(code: Vec<i64>) -> Program { Program { code: code, ip: 0, memory: HashMap::new(), rel_base: 0, data: VecDeque::new() } } } impl Clone for Program { fn clone(&self) -> Program { Program { code: self.code.clone(), ip: self.ip.clone(), memory: self.memory.clone(), rel_base: self.rel_base, data: self.data.clone() } } } fn load_program(path: &str) -> Program { let code = fs::read_to_string(path) .unwrap() .trim() .split(",") .map(|x| x.parse::<i64>().unwrap()) .collect::<Vec<i64>>(); return Program::new(code); } fn init_program(program: &mut Program) { program.ip = 0; program.memory.clear(); program.rel_base = 0; program.data.clear(); for i in 0..program.code.len() { program.memory.insert(i as i64, program.code[i]); } } fn run_program(program: &mut Program) -> i64 { let code = &mut program.code; let memory = &mut program.memory; let mut ip = program.ip; let mut rel_base = program.rel_base; let data = &mut program.data; while ip < code.len() as i64 { let instr = memory[&ip]; let opcode = read_opcode(instr); ip += 1; match opcode { OP_HALT => { return EXIT_HALT; }, OP_ADD | OP_MULTIPLY | OP_LESS_THAN | OP_EQUALS => { let param1 = read_param_value(ip, &memory, rel_base, 0); let param2 = read_param_value(ip, &memory, rel_base, 1); let result_address = read_param_value_out(ip, &memory, rel_base, 2); ip += 3; match opcode { OP_ADD => { memory.insert(result_address, param1 + param2); }, OP_MULTIPLY => { memory.insert(result_address, param1 * param2); }, OP_LESS_THAN => { memory.insert(result_address, if param1 < param2 {1} else {0}); }, OP_EQUALS => { memory.insert(result_address, if param1 == param2 {1} else {0}); } _ => panic!() } }, OP_INPUT => { let param = read_param_value_out(ip, &memory, rel_base, 0); ip += 1; match data.pop_front() { Some(value) => { memory.insert(param, value); }, None => return EXIT_NEED_INPUT } }, OP_OUTPUT => { let param = read_param_value(ip, &memory, rel_base, 0); ip += 1; data.push_back(param); program.ip = ip; return EXIT_OUTPUT; }, OP_JUMP_IF_TRUE | OP_JUMP_IF_FALSE => { let param1 = read_param_value(ip, &memory, rel_base, 0); let param2 = read_param_value(ip, &memory, rel_base, 1); ip += 2; if (opcode == OP_JUMP_IF_TRUE && param1 != 0) || (opcode == OP_JUMP_IF_FALSE && param1 == 0) { ip = param2; } }, OP_REL_BASE_OFFSET => { rel_base += read_param_value(ip, &memory, rel_base, 0); ip += 1; } _ => { println!("Invalid opcoe at address {}: {}", ip - 1, instr); return EXIT_ERROR; } } program.ip = ip; program.rel_base = rel_base; } fn read_opcode(instr: i64) -> i64 { instr % 100 } fn read_param_mode(instr: i64, index: u32) -> i64 { instr % 10_i64.pow(index + 3) / 10_i64.pow(index + 2) } fn read_param_value(start: i64, memory: &HashMap::<i64, i64>, rel_base: i64, index: u32) -> i64 { let mode = read_param_mode(memory[&(start - 1)], index); let param = *memory.get(&(start + index as i64)).unwrap_or(&0); match mode { MODE_POS => *memory.get(&param).unwrap_or(&0), MODE_IMM => param, MODE_REL => { if rel_base + param < 0 { panic!("Attempt to read memory at invalid address {}", rel_base + param); } *memory.get(&(param + rel_base)).unwrap_or(&0) } _ => panic!() } } fn read_param_value_out(start: i64, memory: &HashMap::<i64, i64>, rel_base: i64, index: u32) -> i64 { let mode = read_param_mode(memory[&(start - 1)], index); let param = *memory.get(&(start + index as i64)).unwrap_or(&0); match mode { MODE_POS => param, MODE_REL => param + rel_base, _ => panic!() } } return EXIT_ERROR; } struct GameState { grid: Vec<Vec<u8>>, score: i64, steps: Vec<i32>, steps_to_replay: VecDeque<i32> } impl GameState { pub fn new() -> GameState { GameState { grid: vec![vec![0; 80]; 60], score: 0, steps: Vec::new(), steps_to_replay: VecDeque::new() } } } fn run_game_manual(program: &mut Program, game: &mut GameState, window: &Window) { init_program(program); program.memory.insert(0, 2); // insert 2 quarters window.refresh(); window.keypad(true); 'game: loop { match run_program(program) { EXIT_NEED_INPUT => { window.clear(); window.printw(format!("Score: {}\n\n", game.score)); for i in 0..game.grid.len() { for j in 0..game.grid[i].len() { let tile = game.grid[i][j] as u8; let symbol = match tile { TILE_WALL => "#", TILE_BLOCK => "*", TILE_PADDLE => "_", TILE_BALL => "o", _ => " " }; window.printw(symbol); } window.printw("\n"); } match game.steps_to_replay.pop_front() { Some(v) => { program.data.push_back(v as i64); continue; } None => {} } loop { let key = window.getch(); match key { Some(Input::KeyLeft) => { program.data.push_back(-1); break; }, Some(Input::KeyRight) => { program.data.push_back(1); break; }, Some(Input::Character(' ')) => { program.data.push_back(0); break; }, Some(Input::Character('l')) => { game.steps = fs::read_to_string("save.txt").unwrap() .split(",") .map(|x| x.parse::<i32>().unwrap()) .collect(); game.steps_to_replay = game.steps.clone().into_iter().collect(); continue 'game; }, Some(Input::Character('s')) => { fs::write("save.txt", game.steps.iter() .map(|x| x.to_string()) .collect::<Vec<String>>().join(",")).unwrap(); break; }, Some(Input::Character('q')) => { println!("Goodbye!"); return; }, _ => {} } } if !program.data.is_empty() { game.steps.push(*program.data.back().unwrap() as i32); } }, EXIT_OUTPUT => { if program.data.len() == 3 { let x = program.data.pop_front().unwrap(); let y = program.data.pop_front().unwrap(); let tile = program.data.pop_front().unwrap(); match (x, y) { (-1, 0) => { if tile != 0 { game.score = tile; } }, _ => { game.grid[y as usize][x as usize] = tile as u8; } } } }, EXIT_HALT => { break; } _ => { println!("Oops, something went wrong"); break; } } } } fn run_game_auto(program: &mut Program, window: &Window) { init_program(program); program.memory.insert(0, 2); // insert 2 quarters let mut grid: Vec<Vec<u8>> = vec![vec![0; 50]; 25]; let mut score: i64 = 0; let mut ball_x = 0 ; let mut paddle_x = 0; loop { match run_program(program) { EXIT_NEED_INPUT => { thread::sleep(time::Duration::from_millis(50)); for i in 0..grid.len() { for j in 0..grid[i].len() { let tile = grid[i][j] as u8; match tile { TILE_BALL => { ball_x = j; }, TILE_PADDLE => { paddle_x = j; }, _ => {} } } } if paddle_x < ball_x { program.data.push_back(1); } else if paddle_x > ball_x { program.data.push_back(-1); } else { program.data.push_back(0); } }, EXIT_OUTPUT => { if program.data.len() == 3 { let x = program.data.pop_front().unwrap(); let y = program.data.pop_front().unwrap(); let tile = program.data.pop_front().unwrap(); match (x, y) { (-1, 0) => { score = tile; println!("Score: {}", score); } _ => { grid[y as usize][x as usize] = tile as u8; } } window.clear(); window.printw(format!("Score: {}\n", score)); window.printw("\n"); for i in 0..grid.len() { for j in 0..grid[i].len() { let tile = grid[i][j] as u8; let symbol = match tile { TILE_WALL => "#", TILE_BLOCK => "*", TILE_PADDLE => "_", TILE_BALL => "o", _ => " " }; window.printw(symbol); } window.printw("\n"); } window.refresh(); } }, EXIT_HALT => { window.clear(); window.printw(format!("You score: {}\n", score)); window.printw("Press any key to exit...\n"); window.getch(); break; } _ => { println!("Oops, something went wrong"); break; } } } } fn main() { let window = pancurses::initscr(); pancurses::noecho(); pancurses::cbreak(); let mut program = load_program("input.txt"); let args: Vec<String> = env::args().collect(); if args.len() > 1 && args[1] == "play" { let mut game = GameState::new(); run_game_manual(&mut program, &mut game, &window); println!("Your score was: {}", game.score); } else { run_game_auto(&mut program, &window); } // pancurses::endwin(); }