text
stringlengths
8
4.13M
use anyhow::Result; use maud::{html, Markup}; use rustimate_service::{RequestContext, Router}; pub fn list(ctx: &RequestContext, router: &dyn Router) -> Result<Markup> { let content = crate::components::card::card( ctx, &html!( h3 { "Project Administration" } ul { li { a.(ctx.user_profile().link_class()) href=(router.route_simple("admin.connections")?) { "Connection List" } } li { a.(ctx.user_profile().link_class()) href=(router.route_simple("admin.settings")?) { "Edit Settings" } } } ) ); crate::section(ctx, router, "Project Administration", &content) }
use super::{Chatbot, ChatbotError, CompiledChatbot}; pub fn eliza() -> Result<CompiledChatbot, ChatbotError> { let mut pairs: Vec<(String, Vec<String>)> = Vec::new(); pairs.push(( String::from(r"Hello(.*)"), vec![ String::from("Hello... I'm glad you could drop by today."), String::from("Hi there... how are you today?"), String::from("Hello, how are you feeling today?"), ], )); pairs.push(( String::from(r"(.*) sorry (.*)"), vec![ String::from("There are many times when no apology is needed."), String::from("What feelings do you have when you apologize?"), ], )); pairs.push(( String::from(r"I think (.*)"), vec![ String::from("Do you doubt \"%1?\""), String::from("Do you really think so?"), String::from("But you're not sure \"%1\"?"), ], )); pairs.push(( String::from(r"How (.*)"), vec![ String::from("How do you suppose?"), String::from("Perhaps you can answer your own question."), String::from("What is it you're really asking?"), ], )); let fallback = vec![String::from("Sorry I didn't understand")]; let reflections = Chatbot::default_reflections(); let eliza = Chatbot { pairs, fallback, reflections, }; let eliza = eliza.compile(); return eliza; }
use crate::lib::environment::Environment; use crate::lib::error::DfxResult; use clap::Clap; mod delete; mod install; mod list; mod show; /// Manages the dfx version cache. #[derive(Clap)] #[clap(name("cache"))] pub struct CacheOpts { #[clap(subcommand)] subcmd: SubCommand, } #[derive(Clap)] pub enum SubCommand { Delete(delete::CacheDeleteOpts), Install(install::CacheInstall), List(list::CacheListOpts), Show(show::CacheShowOpts), } pub fn exec(env: &dyn Environment, opts: CacheOpts) -> DfxResult { match opts.subcmd { SubCommand::Delete(v) => delete::exec(env, v), SubCommand::Install(v) => install::exec(env, v), SubCommand::List(v) => list::exec(env, v), SubCommand::Show(v) => show::exec(env, v), } }
//! Implements [this //! protocol](https://github.com/fluent/fluentd/wiki/Forward-Protocol-Specification-v1). use std::{collections::HashMap, io::Write}; use rand::{distributions::Standard, prelude::Distribution, Rng}; use serde_tuple::Serialize_tuple; use super::{common::AsciiString, Generator}; use crate::payload::{Error, Serialize}; #[derive(Debug, Default, Clone, Copy)] #[cfg_attr(test, derive(proptest_derive::Arbitrary))] pub(crate) struct Fluent {} pub(crate) type Object = HashMap<String, u8>; #[derive(serde::Serialize)] #[serde(untagged)] enum RecordValue { String(String), Object(Object), } impl Distribution<RecordValue> for Standard { fn sample<R>(&self, rng: &mut R) -> RecordValue where R: Rng + ?Sized, { match rng.gen_range(0..2) { 0 => RecordValue::String(AsciiString::default().generate(rng)), 1 => { let mut obj = HashMap::new(); for _ in 0..rng.gen_range(0..128) { let key = AsciiString::default().generate(rng); let val = rng.gen(); obj.insert(key, val); } RecordValue::Object(obj) } _ => unreachable!(), } } } #[derive(Serialize_tuple)] struct Entry { time: u32, record: HashMap<String, RecordValue>, // always contains 'message' and 'event' -> object key } impl Distribution<Entry> for Standard { fn sample<R>(&self, rng: &mut R) -> Entry where R: Rng + ?Sized, { let mut rec = HashMap::new(); rec.insert(String::from("message"), rng.gen()); rec.insert(String::from("event"), rng.gen()); for _ in 0..rng.gen_range(0..128) { let key = AsciiString::default().generate(rng); let val = rng.gen(); rec.insert(key, val); } Entry { time: rng.gen(), record: rec, } } } #[derive(Serialize_tuple)] struct FluentForward { tag: String, entries: Vec<Entry>, } impl Distribution<FluentForward> for Standard { fn sample<R>(&self, rng: &mut R) -> FluentForward where R: Rng + ?Sized, { let total_entries = rng.gen_range(0..32); FluentForward { tag: AsciiString::default().generate(rng), entries: rng.sample_iter(Standard).take(total_entries).collect(), } } } #[derive(serde::Serialize)] struct FluentMessage { tag: String, time: u32, record: HashMap<String, RecordValue>, // always contains 'message' key } impl Distribution<FluentMessage> for Standard { fn sample<R>(&self, rng: &mut R) -> FluentMessage where R: Rng + ?Sized, { let mut rec = HashMap::new(); rec.insert(String::from("message"), rng.gen()); for _ in 0..rng.gen_range(0..128) { let key = AsciiString::default().generate(rng); let val = rng.gen(); rec.insert(key, val); } FluentMessage { tag: AsciiString::default().generate(rng), time: rng.gen(), record: rec, } } } #[derive(serde::Serialize)] #[serde(untagged)] enum Member { Message(FluentMessage), Forward(FluentForward), } impl Distribution<Member> for Standard { fn sample<R>(&self, rng: &mut R) -> Member where R: Rng + ?Sized, { match rng.gen_range(0..2) { 0 => Member::Message(rng.gen()), 1 => Member::Forward(rng.gen()), _ => unimplemented!(), } } } impl Serialize for Fluent { fn to_bytes<W, R>(&self, mut rng: R, max_bytes: usize, writer: &mut W) -> Result<(), Error> where W: Write, R: Rng + Sized, { if max_bytes < 16 { // 16 is just an arbitrarily big constant return Ok(()); } // We will arbitrarily generate 1_000 Member instances and then // serialize. If this is below `max_bytes` we'll add more until we're // over. Once we are we'll start removing instances until we're back // below the limit. let mut members: Vec<Vec<u8>> = Standard .sample_iter(&mut rng) .take(10) .map(|m: Member| rmp_serde::to_vec(&m).unwrap()) .collect(); // Search for too many Member instances. loop { let encoding_len = members[0..].iter().fold(0, |acc, m| acc + m.len()); if encoding_len > max_bytes { break; } members.extend( Standard .sample_iter(&mut rng) .take(10) .map(|m: Member| rmp_serde::to_vec(&m).unwrap()), ); } // Search for an encoding that's just right. let mut high = members.len(); loop { let encoding_len = members[0..high].iter().fold(0, |acc, m| acc + m.len()); if encoding_len > max_bytes { high /= 2; } else { for m in &members[0..high] { writer.write_all(m)?; } break; } } Ok(()) } } #[cfg(test)] mod test { use proptest::prelude::*; use rand::{rngs::SmallRng, SeedableRng}; use crate::payload::{Fluent, Serialize}; // We want to be sure that the serialized size of the payload does not // exceed `max_bytes`. proptest! { #[test] fn payload_not_exceed_max_bytes(seed: u64, max_bytes: u16) { let max_bytes = max_bytes as usize; let rng = SmallRng::seed_from_u64(seed); let fluent = Fluent::default(); let mut bytes = Vec::with_capacity(max_bytes); fluent.to_bytes(rng, max_bytes, &mut bytes).unwrap(); debug_assert!( bytes.len() <= max_bytes, "{:?}", std::str::from_utf8(&bytes).unwrap() ); } } }
use std::{ borrow::BorrowMut, cell::RefCell, ops::{Deref, DerefMut}, }; use azuki_tac::{Branch, FunctionCall, Inst, TacFunc}; use crate::Frame; /// Trait for collecting data about the code running. pub trait Inspector { /// Called before every instruction is runned. fn before_inst(&mut self, inst: &Inst, frame: &Frame); /// Called before every instruction is runned. fn before_branch(&mut self, inst: &Branch, frame: &Frame); /// Called before every function call. fn before_call(&mut self, params: &[i64], func: &TacFunc); /// Called after every function is returned fn before_ret(&mut self, frame: &Frame); } impl<R, T> Inspector for R where R: DerefMut<Target = T>, T: Inspector, { fn before_inst(&mut self, inst: &Inst, frame: &Frame) { self.borrow_mut().before_inst(inst, frame) } fn before_branch(&mut self, inst: &Branch, frame: &Frame) { self.borrow_mut().before_branch(inst, frame) } fn before_call(&mut self, params: &[i64], func: &TacFunc) { self.borrow_mut().before_call(params, func) } fn before_ret(&mut self, frame: &Frame) { self.borrow_mut().before_ret(frame) } }
use core::{alloc::Layout, cell::RefCell, ptr::NonNull}; use cortex_m::interrupt::Mutex; use rlsf::Tlsf; extern crate alloc; pub struct MyAllocator(Mutex<RefCell<Tlsf<'static, u8, u8, 8, 8>>>); impl MyAllocator { pub const fn init() -> Self { Self(Mutex::new(RefCell::new(Tlsf::INIT))) } pub unsafe fn set_pool(&self, pool: *mut u8, len: usize) -> usize { cortex_m::interrupt::free(|cs| { let mut tlsf = self.0.borrow(cs).borrow_mut(); len - tlsf.append_free_block_ptr( NonNull::new(core::ptr::slice_from_raw_parts_mut(pool, len)) .expect("a null pointer was supplied"), ) }) } } unsafe impl alloc::alloc::GlobalAlloc for MyAllocator { unsafe fn alloc(&self, layout: Layout) -> *mut u8 { cortex_m::interrupt::free(|cs| { let mut tlsf = self.0.borrow(cs).borrow_mut(); tlsf.allocate(layout) .map_or(core::ptr::null_mut(), |p| p.as_ptr()) }) } unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) { let ptr = NonNull::new(ptr); if ptr.is_none() { return; } cortex_m::interrupt::free(|cs| { let mut tlsf = self.0.borrow(cs).borrow_mut(); tlsf.deallocate(ptr.unwrap(), layout.align()) }) } }
use wooting_sdk::Key; pub const ALL_KEYS: &'static [Key] = &[ Key::Escape, Key::F1, Key::F2, Key::F3, Key::F4, Key::F5, Key::F6, Key::F7, Key::F8, Key::F9, Key::F10, Key::F11, Key::F12, Key::PrintScreen, Key::Pause, Key::ScrollLock, Key::A1, Key::A2, Key::A3, Key::Mode, Key::Tilde, Key::One, Key::Two, Key::Three, Key::Four, Key::Five, Key::Six, Key::Seven, Key::Eight, Key::Nine, Key::Zero, Key::Dash, Key::Equals, Key::Backspace, Key::Insert, Key::Home, Key::PageUp, Key::NumLock, Key::NumDivide, Key::NumMultiply, Key::NumSubtract, Key::Tab, Key::Q, Key::W, Key::E, Key::R, Key::T, Key::Y, Key::U, Key::I, Key::O, Key::P, Key::LeftBracket, Key::RightBracket, Key::Backslash, Key::Delete, Key::End, Key::PageDown, Key::NumSeven, Key::NumEight, Key::NumNine, Key::NumAddition, Key::CapsLock, Key::A, Key::S, Key::D, Key::F, Key::G, Key::H, Key::J, Key::K, Key::L, Key::SemiColon, Key::Apostrophe, Key::ISO1, Key::Return, Key::NumFour, Key::NumFive, Key::NumSix, Key::LeftShift, Key::ISO2, Key::Z, Key::X, Key::C, Key::V, Key::B, Key::N, Key::M, Key::Comma, Key::Period, Key::ForwardSlash, Key::RightShift, Key::UpArrow, Key::NumOne, Key::NumTwo, Key::NumThree, Key::NumReturn, Key::LeftControl, Key::LeftMod, Key::LeftAlt, Key::Space, Key::RightAlt, Key::RightMod, Key::Fn, Key::RightControl, Key::LeftArrow, Key::DownArrow, Key::RightArrow, Key::NumZero, Key::NumDelete, ];
use actix_web::{ dev::{Payload, PayloadStream}, error, FromRequest, HttpRequest, }; use futures_util::future::{ready, Ready}; #[derive(Clone, Debug)] pub struct ClientCertificateChain(pub Vec<Vec<u8>>); #[macro_export] macro_rules! retriever { () => { /// Retrieve client certificates, possibly from a TLS stream. /// /// This trait can be implemented for underlying transport mechanisms to hand over the client /// certificates. /// /// There are default implementations for OpenSSL and RusTLS. Works with ntex and actix. pub trait ClientCertificateRetriever { fn client_certs(&self) -> Option<$crate::x509::ClientCertificateChain>; } }; } #[macro_export] macro_rules! retriever_rustls { ($name:ty) => { impl<T> ClientCertificateRetriever for $name { fn client_certs(&self) -> Option<$crate::x509::ClientCertificateChain> { log::debug!("Try extracting client cert: using rustls"); self.get_ref() .1 .get_peer_certificates() .map(|certs| certs.iter().map(|cert| cert.0.clone()).collect()) .map($crate::x509::ClientCertificateChain) } } }; } #[macro_export] macro_rules! retriever_openssl { ($name:ty) => { impl<T> ClientCertificateRetriever for $name { fn client_certs(&self) -> Option<$crate::x509::ClientCertificateChain> { log::debug!("Try extracting client cert: using OpenSSL"); let chain = self.ssl().verified_chain(); // **NOTE:** This chain (despite the function name) is **NOT** verified. // These are the client certificates, which will be passed on to the authentication service. let chain = chain .map(|chain| { log::debug!("Peer cert chain len: {}", chain.len()); chain .into_iter() .map(|cert| cert.to_der()) .collect::<Result<Vec<_>, _>>() }) .transpose() .unwrap_or_else(|err| { log::info!("Failed to retrieve client certificate: {}", err); None }); log::debug!("Client certificates: {:?}", chain); chain.map($crate::x509::ClientCertificateChain) } } }; } #[macro_export] macro_rules! retriever_none { ($name:ty) => { impl ClientCertificateRetriever for $name { fn client_certs(&self) -> Option<$crate::x509::ClientCertificateChain> { // we have no certificates None } } }; } impl FromRequest for ClientCertificateChain { type Config = (); type Error = actix_web::Error; type Future = Ready<Result<Self, Self::Error>>; fn from_request(req: &HttpRequest, _payload: &mut Payload<PayloadStream>) -> Self::Future { let result = req.extensions().get::<ClientCertificateChain>().cloned(); ready(result.ok_or(error::ErrorBadRequest("Missing certificate chain"))) } }
use std::io; use std::str::FromStr; use std::collections::HashMap; fn funnel(first: String, second: String) -> bool { for i in 0..first.len() { let mut word = first.clone(); word.replace_range(i..=i, ""); if (word == second) { return true; } } return false; } fn bonus(word: String, dict: HashMap<&str, bool>) -> Vec<String> { let mut output: HashMap<&str, bool> = HashMap::new(); for i in 0..word.len() { let mut _word = word.clone(); _word.replace_range(i..=i, ""); match dict.get(_word) { Some(_) => {output.insert(_word, true);}, None()=>{} } } return output.keys().collect(); } fn main() { let mut file = File::open("../../assets/enable1.txt").expect("file not found"); let mut strings = String::new(); file.read_to_string(&mut strings) .expect("something went wrong"); let dict = strings .split_whitespace() .fold(HashMap::new(), |mut acc: HashMap<&str, bool>, c: &str| { acc.insert(c, true); acc }); let mut input: String = String::new(); io::stdin() .read_line(&mut input) .expect("failed to read line"); let mut input = input.trim().split_whitespace(); let first = String::from(input.next().unwrap_or_default()); let second = String::from(input.next().unwrap_or_default()); if funnel(first, second) { println!("yay it matched"); } else { println!("heck, it didn't match") } }
#![no_std] // don't link the rust std library #![no_main] // disable all Rust-level entry points mod vga_buffer; use core::panic::PanicInfo; static HELLO: &[u8] = b"Hello World!"; #[no_mangle] // don't mangle the name of the function pub extern "C" fn _start() -> ! { // This is the entry point of the program // The function is named as _start since // the linker looks for the function called // `_start` by default use core::fmt::Write; println!("Hello World{}", "!"); loop {} } // This function is called on panic (Handling unrecoverable errors) #[panic_handler] fn panic(info: &PanicInfo) -> ! { println!("{}", info); loop {} }
use super::entity::Entity; use super::object::Object; use ptgui::prelude::*; use serde::Deserialize; #[derive(Deserialize, Debug)] pub struct Map { pub map_name: String, pub spawn_point: Point, pub objects: Vec<Object>, pub entities: Vec<Entity>, }
#![allow(clippy::module_inception)] #![allow(clippy::too_many_arguments)] #![allow(clippy::ptr_arg)] #![allow(clippy::large_enum_variant)] #![doc = "generated by AutoRust 0.1.0"] #[cfg(feature = "package-2021-01-01-preview-only")] pub mod package_2021_01_01_preview_only; #[cfg(all(feature = "package-2021-01-01-preview-only", not(feature = "no-default-version")))] pub use package_2021_01_01_preview_only::{models, operations, operations::Error}; #[cfg(feature = "package-2020-10-01-preview-only")] pub mod package_2020_10_01_preview_only; #[cfg(all(feature = "package-2020-10-01-preview-only", not(feature = "no-default-version")))] pub use package_2020_10_01_preview_only::{models, operations, operations::Error}; #[cfg(feature = "profile-hybrid-2020-09-01")] pub mod profile_hybrid_2020_09_01; #[cfg(all(feature = "profile-hybrid-2020-09-01", not(feature = "no-default-version")))] pub use profile_hybrid_2020_09_01::{models, operations, operations::Error}; #[cfg(feature = "package-2020-08-01-preview")] pub mod package_2020_08_01_preview; #[cfg(all(feature = "package-2020-08-01-preview", not(feature = "no-default-version")))] pub use package_2020_08_01_preview::{models, operations, operations::Error}; #[cfg(feature = "package-2020-04-01-preview")] pub mod package_2020_04_01_preview; #[cfg(all(feature = "package-2020-04-01-preview", not(feature = "no-default-version")))] pub use package_2020_04_01_preview::{models, operations, operations::Error}; #[cfg(feature = "package-2020-04-01-preview-only")] pub mod package_2020_04_01_preview_only; #[cfg(all(feature = "package-2020-04-01-preview-only", not(feature = "no-default-version")))] pub use package_2020_04_01_preview_only::{models, operations, operations::Error}; #[cfg(feature = "package-2020-03-01-preview")] pub mod package_2020_03_01_preview; #[cfg(all(feature = "package-2020-03-01-preview", not(feature = "no-default-version")))] pub use package_2020_03_01_preview::{models, operations, operations::Error}; #[cfg(feature = "package-2019-08-01-preview-only")] pub mod package_2019_08_01_preview_only; #[cfg(all(feature = "package-2019-08-01-preview-only", not(feature = "no-default-version")))] pub use package_2019_08_01_preview_only::{models, operations, operations::Error}; #[cfg(feature = "profile-hybrid-2019-03-01")] pub mod profile_hybrid_2019_03_01; #[cfg(all(feature = "profile-hybrid-2019-03-01", not(feature = "no-default-version")))] pub use profile_hybrid_2019_03_01::{models, operations, operations::Error}; #[cfg(feature = "package-2018-09-01-preview")] pub mod package_2018_09_01_preview; #[cfg(all(feature = "package-2018-09-01-preview", not(feature = "no-default-version")))] pub use package_2018_09_01_preview::{models, operations, operations::Error}; #[cfg(feature = "package-2018-09-01-preview-only")] pub mod package_2018_09_01_preview_only; #[cfg(all(feature = "package-2018-09-01-preview-only", not(feature = "no-default-version")))] pub use package_2018_09_01_preview_only::{models, operations, operations::Error}; #[cfg(feature = "package-2018-07-01-preview")] pub mod package_2018_07_01_preview; #[cfg(all(feature = "package-2018-07-01-preview", not(feature = "no-default-version")))] pub use package_2018_07_01_preview::{models, operations, operations::Error}; #[cfg(feature = "package-2018-07-01-preview-only")] pub mod package_2018_07_01_preview_only; #[cfg(all(feature = "package-2018-07-01-preview-only", not(feature = "no-default-version")))] pub use package_2018_07_01_preview_only::{models, operations, operations::Error}; #[cfg(feature = "package-2018-01-01-preview")] pub mod package_2018_01_01_preview; #[cfg(all(feature = "package-2018-01-01-preview", not(feature = "no-default-version")))] pub use package_2018_01_01_preview::{models, operations, operations::Error}; #[cfg(feature = "package-2018-01-01-preview-only")] pub mod package_2018_01_01_preview_only; #[cfg(all(feature = "package-2018-01-01-preview-only", not(feature = "no-default-version")))] pub use package_2018_01_01_preview_only::{models, operations, operations::Error}; #[cfg(feature = "package-2017-10-01-preview")] pub mod package_2017_10_01_preview; #[cfg(all(feature = "package-2017-10-01-preview", not(feature = "no-default-version")))] pub use package_2017_10_01_preview::{models, operations, operations::Error}; #[cfg(feature = "package-2017-10-01-preview-only")] pub mod package_2017_10_01_preview_only; #[cfg(all(feature = "package-2017-10-01-preview-only", not(feature = "no-default-version")))] pub use package_2017_10_01_preview_only::{models, operations, operations::Error}; #[cfg(feature = "package-2015-07-01")] pub mod package_2015_07_01; #[cfg(all(feature = "package-2015-07-01", not(feature = "no-default-version")))] pub use package_2015_07_01::{models, operations, operations::Error}; #[cfg(feature = "package-2015-06-01-preview")] pub mod package_2015_06_01_preview; use azure_core::setters; #[cfg(all(feature = "package-2015-06-01-preview", not(feature = "no-default-version")))] pub use package_2015_06_01_preview::{models, operations, operations::Error}; pub fn config( http_client: std::sync::Arc<dyn azure_core::HttpClient>, token_credential: Box<dyn azure_core::TokenCredential>, ) -> OperationConfigBuilder { OperationConfigBuilder { http_client, base_path: None, token_credential, token_credential_resource: None, } } pub struct OperationConfigBuilder { http_client: std::sync::Arc<dyn azure_core::HttpClient>, base_path: Option<String>, token_credential: Box<dyn azure_core::TokenCredential>, token_credential_resource: Option<String>, } impl OperationConfigBuilder { setters! { base_path : String => Some (base_path) , token_credential_resource : String => Some (token_credential_resource) , } pub fn build(self) -> OperationConfig { OperationConfig { http_client: self.http_client, base_path: self.base_path.unwrap_or_else(|| "https://management.azure.com".to_owned()), token_credential: Some(self.token_credential), token_credential_resource: self .token_credential_resource .unwrap_or_else(|| "https://management.azure.com/".to_owned()), } } } pub struct OperationConfig { http_client: std::sync::Arc<dyn azure_core::HttpClient>, base_path: String, token_credential: Option<Box<dyn azure_core::TokenCredential>>, token_credential_resource: String, } impl OperationConfig { pub fn http_client(&self) -> &dyn azure_core::HttpClient { self.http_client.as_ref() } pub fn base_path(&self) -> &str { self.base_path.as_str() } pub fn token_credential(&self) -> Option<&dyn azure_core::TokenCredential> { self.token_credential.as_deref() } pub fn token_credential_resource(&self) -> &str { self.token_credential_resource.as_str() } }
use crate::{Config, Result}; use axum::handler::post; use axum::{AddExtensionLayer, Router}; use std::sync::Arc; use discord_alerts::AlertClient; pub struct Server { pub config: Config, pub alert_client: AlertClient, } impl Server { pub fn new(config: Config, alert_client: AlertClient) -> Self { Server { config, alert_client } } pub async fn start(self) -> Result<()> { let address = self.config.server.address.parse()?; let server = Arc::new(self); let app = Router::new() .route("/promote", post(super::promote_handler)) .layer(AddExtensionLayer::new(server)); hyper::Server::bind(&address) .serve(app.into_make_service()) .await?; Ok(()) } }
use crate::strategy::Bet; use crate::common::{BetContext, BetRecord, Environment}; use crate::strategy::Strategy; use crate::utils; pub(crate) type SimulationResult = BetContext; pub(crate) fn simulate( env: Environment, strategy: &dyn Strategy, context_builder: impl Fn() -> BetContext, ) -> SimulationResult { let mut context = context_builder(); while !utils::should_end(&context) { perform_bet_strat(strategy, &mut context, &env) } context } fn perform_bet_strat(strat: &dyn Strategy, context: &mut BetContext, env: &Environment) { let bet = strat.bet(context); match bet { Bet::Hit(bet_amount) => { let bet_amount = if bet_amount > 0 && bet_amount <= context.total_money { bet_amount } else { context.total_money }; let before_bet_tot = context.total_money; context.total_money -= bet_amount; let res = utils::bet_result(env, bet_amount); if let Some(win_amount) = res { context.total_money += win_amount; context .records .push(BetRecord::Win(win_amount - bet_amount, before_bet_tot)); context.consec_bet_loses.clear(); if context.total_money > context.max_total_money { context.max_total_money = context.total_money; } } else { context.consec_bet_loses.push(bet_amount); context .records .push(BetRecord::Lose(bet_amount, before_bet_tot)); } } } }
//! The `accountant_stub` module is a client-side object that interfaces with a server-side Accountant //! object via the network interface exposed by AccountantSkel. Client code should use //! this object instead of writing messages to the network directly. The binary //! encoding of its messages are unstable and may change in future releases. use accountant_skel::{Request, Response, Subscription}; use bincode::{deserialize, serialize}; use futures::future::{ok, FutureResult}; use hash::Hash; use signature::{KeyPair, PublicKey, Signature}; use std::collections::HashMap; use std::io; use std::net::UdpSocket; use transaction::Transaction; pub struct AccountantStub { pub addr: String, pub socket: UdpSocket, last_id: Option<Hash>, num_events: u64, balances: HashMap<PublicKey, Option<i64>>, } impl AccountantStub { /// Create a new AccountantStub that will interface with AccountantSkel /// over `socket`. To receive responses, the caller must bind `socket` /// to a public address before invoking AccountantStub methods. pub fn new(addr: &str, socket: UdpSocket) -> Self { let stub = AccountantStub { addr: addr.to_string(), socket, last_id: None, num_events: 0, balances: HashMap::new(), }; stub.init(); stub } pub fn init(&self) { let subscriptions = vec![Subscription::EntryInfo]; let req = Request::Subscribe { subscriptions }; let data = serialize(&req).expect("serialize Subscribe"); let _res = self.socket.send_to(&data, &self.addr); } pub fn recv_response(&self) -> io::Result<Response> { let mut buf = vec![0u8; 1024]; self.socket.recv_from(&mut buf)?; let resp = deserialize(&buf).expect("deserialize balance"); Ok(resp) } pub fn process_response(&mut self, resp: Response) { match resp { Response::Balance { key, val } => { self.balances.insert(key, val); } Response::LastId { id } => { self.last_id = Some(id); } Response::EntryInfo(entry_info) => { self.last_id = Some(entry_info.id); self.num_events += entry_info.num_events; } } } /// Send a signed Transaction to the server for processing. This method /// does not wait for a response. pub fn transfer_signed(&self, tr: Transaction) -> io::Result<usize> { let req = Request::Transaction(tr); let data = serialize(&req).unwrap(); self.socket.send_to(&data, &self.addr) } /// Creates, signs, and processes a Transaction. Useful for writing unit-tests. pub fn transfer( &self, n: i64, keypair: &KeyPair, to: PublicKey, last_id: &Hash, ) -> io::Result<Signature> { let tr = Transaction::new(keypair, to, n, *last_id); let sig = tr.sig; self.transfer_signed(tr).map(|_| sig) } /// Request the balance of the user holding `pubkey`. This method blocks /// until the server sends a response. If the response packet is dropped /// by the network, this method will hang indefinitely. pub fn get_balance(&mut self, pubkey: &PublicKey) -> FutureResult<i64, i64> { let req = Request::GetBalance { key: *pubkey }; let data = serialize(&req).expect("serialize GetBalance"); self.socket .send_to(&data, &self.addr) .expect("buffer error"); let mut done = false; while !done { let resp = self.recv_response().expect("recv response"); if let &Response::Balance { ref key, .. } = &resp { done = key == pubkey; } self.process_response(resp); } ok(self.balances[pubkey].unwrap()) } /// Request the last Entry ID from the server. This method blocks /// until the server sends a response. At the time of this writing, /// it also has the side-effect of causing the server to log any /// entries that have been published by the Historian. pub fn get_last_id(&mut self) -> FutureResult<Hash, ()> { let req = Request::GetLastId; let data = serialize(&req).expect("serialize GetId"); self.socket .send_to(&data, &self.addr) .expect("buffer error"); let mut done = false; while !done { let resp = self.recv_response().expect("recv response"); if let &Response::LastId { .. } = &resp { done = true; } self.process_response(resp); } ok(self.last_id.unwrap_or(Hash::default())) } /// Return the number of transactions the server processed since creating /// this stub instance. pub fn transaction_count(&mut self) -> u64 { // Wait for at least one EntryInfo. let mut done = false; while !done { let resp = self.recv_response().expect("recv response"); if let &Response::EntryInfo(_) = &resp { done = true; } self.process_response(resp); } // Then take the rest. self.socket.set_nonblocking(true).expect("set nonblocking"); loop { match self.recv_response() { Err(_) => break, Ok(resp) => self.process_response(resp), } } self.socket.set_nonblocking(false).expect("set blocking"); self.num_events } } #[cfg(test)] mod tests { use super::*; use accountant::Accountant; use accountant_skel::AccountantSkel; use futures::Future; use historian::Historian; use mint::Mint; use signature::{KeyPair, KeyPairUtil}; use std::io::sink; use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::mpsc::sync_channel; use std::sync::{Arc, Mutex}; use std::thread::sleep; use std::time::Duration; // TODO: Figure out why this test sometimes hangs on TravisCI. #[test] fn test_accountant_stub() { let addr = "127.0.0.1:9000"; let send_addr = "127.0.0.1:9001"; let alice = Mint::new(10_000); let acc = Accountant::new(&alice); let bob_pubkey = KeyPair::new().pubkey(); let exit = Arc::new(AtomicBool::new(false)); let (input, event_receiver) = sync_channel(10); let historian = Historian::new(event_receiver, &alice.last_id(), Some(30)); let acc = Arc::new(Mutex::new(AccountantSkel::new( acc, alice.last_id(), sink(), input, historian, ))); let _threads = AccountantSkel::serve(&acc, addr, exit.clone()).unwrap(); sleep(Duration::from_millis(300)); let socket = UdpSocket::bind(send_addr).unwrap(); socket.set_read_timeout(Some(Duration::new(5, 0))).unwrap(); let mut acc = AccountantStub::new(addr, socket); let last_id = acc.get_last_id().wait().unwrap(); let _sig = acc.transfer(500, &alice.keypair(), bob_pubkey, &last_id) .unwrap(); assert_eq!(acc.get_balance(&bob_pubkey).wait().unwrap(), 500); exit.store(true, Ordering::Relaxed); } }
#[macro_use] extern crate serde_json; use serde::ser::{SerializeStruct, Serializer}; use serde::{Deserialize, Serialize}; #[allow(unused_imports)] use log::{debug, error, info, trace}; #[allow(non_snake_case)] pub mod android { use std::{thread, time}; use std::ffi::{CString, CStr}; use jni::JNIEnv; use jni::objects::{JClass, JString, JObject}; use jni::sys::jstring; use crate::*; #[derive(Debug, Serialize)] struct JNIError { error: String, code: i32, } fn string_to_jstring(env: &JNIEnv, input: &str) -> Result<jstring, String> { let cstring = CString::new(input).map_err(|e| format!("{:?}", e))?; let cstr = cstring.to_str().map_err(|e| format!("{:?}", e))?; let output = env.new_string(cstr).map_err(|e| format!("{:?}", e))?; Ok(output.into_inner()) } impl ToString for JNIError { fn to_string(&self) -> String { serde_json::to_string(self) .unwrap_or("{\"error\": \"Can't serialize error\", \"code\": -1000}".to_string()) } } #[no_mangle] pub unsafe extern "C" fn Java_org_notmandatory_echojni_Lib_echo( env: JNIEnv, _: JClass, incoming_jstring: JString, ) -> jstring { android_logger::init_once( android_logger::Config::default().with_min_level(log::Level::Debug), ); let incoming_string: String = match env.get_string(incoming_jstring) { Ok(string) => string.into(), Err(e) => { JNIError { error: format!("Invalid input string: {:?}", e), code: -1001, }.to_string() } }; let echo_string = incoming_string.clone(); debug!("echo \"{}\"", &echo_string); thread::sleep(time::Duration::from_millis(1)); string_to_jstring(&env, &echo_string).unwrap_or(JObject::null().into_inner()) } }
#[doc = "Register `FDCAN_TTMLM` reader"] pub type R = crate::R<FDCAN_TTMLM_SPEC>; #[doc = "Register `FDCAN_TTMLM` writer"] pub type W = crate::W<FDCAN_TTMLM_SPEC>; #[doc = "Field `CCM` reader - Cycle Count Max"] pub type CCM_R = crate::FieldReader; #[doc = "Field `CCM` writer - Cycle Count Max"] pub type CCM_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 6, O>; #[doc = "Field `CSS` reader - Cycle Start Synchronization"] pub type CSS_R = crate::FieldReader; #[doc = "Field `CSS` writer - Cycle Start Synchronization"] pub type CSS_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>; #[doc = "Field `TXEW` reader - Tx Enable Window"] pub type TXEW_R = crate::FieldReader; #[doc = "Field `TXEW` writer - Tx Enable Window"] pub type TXEW_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>; #[doc = "Field `ENTT` reader - Expected Number of Tx Triggers"] pub type ENTT_R = crate::FieldReader<u16>; #[doc = "Field `ENTT` writer - Expected Number of Tx Triggers"] pub type ENTT_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 12, O, u16>; impl R { #[doc = "Bits 0:5 - Cycle Count Max"] #[inline(always)] pub fn ccm(&self) -> CCM_R { CCM_R::new((self.bits & 0x3f) as u8) } #[doc = "Bits 6:7 - Cycle Start Synchronization"] #[inline(always)] pub fn css(&self) -> CSS_R { CSS_R::new(((self.bits >> 6) & 3) as u8) } #[doc = "Bits 8:11 - Tx Enable Window"] #[inline(always)] pub fn txew(&self) -> TXEW_R { TXEW_R::new(((self.bits >> 8) & 0x0f) as u8) } #[doc = "Bits 16:27 - Expected Number of Tx Triggers"] #[inline(always)] pub fn entt(&self) -> ENTT_R { ENTT_R::new(((self.bits >> 16) & 0x0fff) as u16) } } impl W { #[doc = "Bits 0:5 - Cycle Count Max"] #[inline(always)] #[must_use] pub fn ccm(&mut self) -> CCM_W<FDCAN_TTMLM_SPEC, 0> { CCM_W::new(self) } #[doc = "Bits 6:7 - Cycle Start Synchronization"] #[inline(always)] #[must_use] pub fn css(&mut self) -> CSS_W<FDCAN_TTMLM_SPEC, 6> { CSS_W::new(self) } #[doc = "Bits 8:11 - Tx Enable Window"] #[inline(always)] #[must_use] pub fn txew(&mut self) -> TXEW_W<FDCAN_TTMLM_SPEC, 8> { TXEW_W::new(self) } #[doc = "Bits 16:27 - Expected Number of Tx Triggers"] #[inline(always)] #[must_use] pub fn entt(&mut self) -> ENTT_W<FDCAN_TTMLM_SPEC, 16> { ENTT_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "FDCAN TT Matrix Limits Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`fdcan_ttmlm::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`fdcan_ttmlm::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct FDCAN_TTMLM_SPEC; impl crate::RegisterSpec for FDCAN_TTMLM_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`fdcan_ttmlm::R`](R) reader structure"] impl crate::Readable for FDCAN_TTMLM_SPEC {} #[doc = "`write(|w| ..)` method takes [`fdcan_ttmlm::W`](W) writer structure"] impl crate::Writable for FDCAN_TTMLM_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets FDCAN_TTMLM to value 0"] impl crate::Resettable for FDCAN_TTMLM_SPEC { const RESET_VALUE: Self::Ux = 0; }
mod payloads; mod header; mod parse; mod tojson; pub use self::payloads::*; pub use self::header::*; pub use self::tojson::*;
use rstest::*; use rstest_reuse::{self, *}; // Here we define the template. This define // * The test set name to `two_simple_cases` // * cases: here two cases that feed the `a`, `b` values #[template] #[rstest] #[case(2, 2)] #[case(4/2, 2)] fn two_simple_cases(#[case] a: u32, #[case] b: u32) {} // Here we apply the `two_simple_cases` template: That is expanded in // #[template] // #[rstest(a, b, // case(2, 2), // case(4/2, 2), // ) // ] // fn it_works(a: u32, b: u32) { // assert!(a == b); // } #[apply(two_simple_cases)] fn it_works(#[case] a: u32, #[case] b: u32) { assert!(a == b); } // Here we reuse the `two_simple_cases` template to create two other tests #[apply(two_simple_cases)] #[should_panic] fn it_fail(#[case] a: u32, #[case] b: u32) { assert!(a != b); } #[fixture(a = 42)] fn f(a: u32) -> u32 { a } #[fixture(f(42))] fn fix(f: u32) -> u32 { f } #[rstest] fn aaa(fix: u32) { assert_eq!(42, fix); } use std::net::SocketAddr; #[rstest] #[case("1.2.3.4:8080", 8080)] #[case("127.0.0.1:9000", 9000)] fn check_port(#[case] addr: SocketAddr, #[case] expected: u16) { assert_eq!(expected, addr.port()); }
mod args; pub mod errors; mod paths; use json; use std::{fs, io, path}; pub enum ConfigFileType { ConfigJson, PackageJson, } pub fn esbuild_conf(args: Vec<String>) -> Result<String, errors::EsbuildConfigError> { let (config_path, config_file_type) = paths::config_path(args.get(1)).map_err(|_| errors::EsbuildConfigError::ConfigPathError)?; let config_content = read_json_content(config_path).map_err(|_| errors::EsbuildConfigError::ConfigParseError)?; parse_esbuild_config(config_content, config_file_type) .map_err(|_| errors::EsbuildConfigError::ConfigParseError) } pub fn read_json_content(path: path::PathBuf) -> Result<String, errors::EsbuildConfigError> { match fs::read_to_string(&path) { Ok(content) => Ok(content), Err(_) => Err(errors::EsbuildConfigError::Io(io::Error::new( io::ErrorKind::Other, [ "Couldn’t read ", path.into_os_string() .into_string() .expect("The provided path couldn’t get read.") .as_str(), ] .concat(), ))), } } // Parse the entire esbuild.config.json pub fn parse_esbuild_config( content: String, config_file_type: ConfigFileType, ) -> Result<String, errors::ConfigParseError> { match json::parse(&content) { Ok(value) => match config_file_type { ConfigFileType::ConfigJson => args::args_from_config_json_value(value), ConfigFileType::PackageJson => args::args_from_package_json_value(value), } .map_err(|_| errors::ConfigParseError::InvalidConfigError), Err(_) => return Err(errors::ConfigParseError::InvalidConfigError), } } #[cfg(test)] mod tests { use super::*; #[test] fn test_parse_esbuild_config() { let config_json = r#" { "entry": "index.js", "a": true, "b": "abc", "c": ["def", "ghi"], "d": { "e": "jkl", "f": "mno" } } "#; assert_eq!( parse_esbuild_config(config_json.to_string(), ConfigFileType::ConfigJson).unwrap(), "--a --b=abc --c:def --c:ghi --d:e=jkl --d:f=mno index.js" ); assert!( match parse_esbuild_config("true".to_string(), ConfigFileType::ConfigJson) { Ok(_) => false, Err(_) => true, } ); let package_json = r#" { "esbuild": { "entry": "index.js", "a": true, "b": "abc", "c": ["def", "ghi"], "d": { "e": "jkl", "f": "mno" } } } "#; assert_eq!( parse_esbuild_config(package_json.to_string(), ConfigFileType::PackageJson).unwrap(), "--a --b=abc --c:def --c:ghi --d:e=jkl --d:f=mno index.js" ); assert!( match parse_esbuild_config("1".to_string(), ConfigFileType::PackageJson) { Ok(_) => false, Err(_) => true, } ); assert!( match parse_esbuild_config("{}".to_string(), ConfigFileType::PackageJson) { Ok(_) => false, Err(_) => true, } ); } }
use super::{ CompiledResult, DataTree, FileType, GeneratedResult, MergedResult, ScriptKind, Tag, TreeError, }; use serde::{Deserialize, Serialize}; use serde_json as js; use serde_json::Result as JsResult; use std::collections::HashSet; use std::fs; use std::hash::{Hash, Hasher}; use std::io; /// Script is any files or directories that does not follow `Namespace` rule. /// /// Script can also be a child of itself. #[derive(Clone, Eq)] pub struct Script { pub name: String, child: HashSet<Script>, kind: ScriptKind, file_type: FileType, } impl Script { pub fn new( name: impl Into<String>, child: HashSet<Script>, kind: ScriptKind, file_type: FileType, ) -> Script { let name = name.into(); Script { name, child, kind, file_type, } } /// Decode JSON data from slices fn decode<'a, T: Deserialize<'a>>(data: &'a [u8]) -> io::Result<T> { let result: T = js::from_slice(&data)?; Ok(result) } /// Encode JSON data to slices fn encode<T: Serialize>(data: &T) -> JsResult<Vec<u8>> { js::to_vec_pretty(data) } } use std::fs::{DirEntry, File}; use std::io::Write; use std::path::PathBuf; use zip::write::FileOptions; use zip::ZipWriter; impl DataTree for Script { fn generate( entry: DirEntry, kind: ScriptKind, event: impl Fn(u64) + Copy, ) -> GeneratedResult<Script> { if entry.metadata()?.is_file() { let data = fs::read(entry.path())?; let size = entry.metadata()?.len(); let name = os_str_to_string(entry.file_name()); let file_type = FileType::File(data); let script = Script::new(name, HashSet::default(), kind, file_type); event(size); Ok((script, size)) } else { let mut child: HashSet<Script> = HashSet::default(); let mut size = 0; for entry in entry.path().read_dir()? { let entry: DirEntry = entry?; match Script::generate(entry, kind, event) { Ok((script, child_size)) => { child.insert(script); size += child_size; } Err(error) => eprintln!("Unable to decode: {}", error), } } let name = os_str_to_string(&entry.file_name()); let script = Script::new(name, child, kind, FileType::Directory); Ok((script, size)) } } fn merge(&self, other: Script, event: impl Fn(u64) + Copy) -> MergedResult<Script> { match self.file_type.clone() { FileType::File(data) => { match self.kind { ScriptKind::Tag => { let original: Tag = match Script::decode(&data) { Ok(original) => original, Err(_) => return Ok(other), }; let prototype: io::Result<Tag> = match other.file_type.clone() { FileType::File(data) => Script::decode(&data), FileType::Directory => { return Err(TreeError::MismatchType( self.name.clone(), other.name.clone(), )) } }; let mut prototype = match prototype { Ok(prototype) => prototype, Err(_) => return Ok(self.clone()), }; let mut result = original; result.values.append(&mut prototype.values); let data = match Script::encode(&result) { Ok(x) => x, // Return `other` immediately if there are json error // Such as "Invalid syntax" Err(_error) => return Ok(other), }; let size = data.len() as u64; let name = other.name; let child = other.child; let kind = other.kind; let file_type = FileType::File(data); event(size); let result = Script::new(name, child, kind, file_type); Ok(result) } ScriptKind::Generic => Ok(other), ScriptKind::None => Err(TreeError::UnknownFormat(self.name.clone())), } } FileType::Directory => { let mut child = self.child.clone(); for value in other.child { let script = match child.get(&value) { Some(original) => original.merge(value, event), None => Ok(value), }; match script { Ok(script) => { child.replace(script); } Err(error) => eprintln!("{}", error), }; } let name = other.name; let kind = other.kind; let file_type = other.file_type; let result = Script::new(name, child, kind, file_type); Ok(result) } } } fn compile( &self, path: impl Into<PathBuf>, zip: &mut ZipWriter<File>, options: &FileOptions, event: impl Fn(u64) + Copy, ) -> CompiledResult<()> { let path: PathBuf = path.into(); match &self.file_type { FileType::Directory => { zip.add_directory_from_path(&path, *options)?; for script in &self.child { let child = path.join(&script.name); script.compile(child, zip, options, event)?; } } FileType::File(data) => { zip.start_file_from_path(&path, *options)?; zip.write_all(&data)?; event(data.len() as u64); } }; Ok(()) } } use crate::utils::os_str_to_string; impl From<(DirEntry, ScriptKind)> for Script { fn from((entry, kind): (DirEntry, ScriptKind)) -> Script { let name = os_str_to_string(&entry.file_name()); let child = HashSet::default(); let file_type = { if entry.metadata().unwrap().is_file() { FileType::File(Vec::default()) } else { FileType::Directory } }; Script { name, child, kind, file_type, } } } impl PartialEq for Script { fn eq(&self, other: &Script) -> bool { self.name == other.name && self.kind == other.kind } } impl Hash for Script { fn hash<H: Hasher>(&self, state: &mut H) { self.name.hash(state); self.kind.hash(state); } } use std::fmt; impl fmt::Debug for Script { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match &self.file_type { FileType::Directory => write!(f, "{:}: {:#?}", self.name, self.child), FileType::File(_) => write!(f, "{}", self.name), } } } #[cfg(test)] mod tests { use super::*; #[test] fn create_new_script_directory() { assert_eq!( Script::new( "hello_world", HashSet::default(), ScriptKind::Generic, FileType::Directory ), Script { name: String::from("hello_world"), child: HashSet::default(), kind: ScriptKind::Generic, file_type: FileType::Directory } ); } #[test] fn decode_jojo_name_json() { let data = r#" { "values": [ "Jonathan Joestar", "Joseph Joestar", "Jotaro Kujo", "Josuke Higashikata", "Giorno Giovanna", "Jolyne Cujoh", "Johnny Joestar", "Josuke Higashikata" ] } "#; let value: Tag = Script::decode(data.as_bytes()).unwrap(); let expect = Tag { replace: None, values: vec![ String::from("Jonathan Joestar"), String::from("Joseph Joestar"), String::from("Jotaro Kujo"), String::from("Josuke Higashikata"), String::from("Giorno Giovanna"), String::from("Jolyne Cujoh"), String::from("Johnny Joestar"), String::from("Josuke Higashikata"), ], }; assert_eq!(value, expect); } #[test] fn merge_jojo_and_fate_characters() { let jojo_data = r#" { "values": [ "Jonathan Joestar", "Joseph Joestar", "Jotaro Kujo", "Josuke Higashikata", "Giorno Giovanna", "Jolyne Cujoh", "Johnny Joestar", "Josuke Higashikata" ] } "# .as_bytes(); let fate_data = r#" { "values": [ "Shirou Emiya", "Saber", "Rin Tohsaka", "Archer", "Sakura Matou", "Sakura Matou", "Rider", "Illyasviel von Einzbern", "Kirei Kotomine", "Gilgamesh" ] } "# .as_bytes(); let expect_data = r#" { "values": [ "Jonathan Joestar", "Joseph Joestar", "Jotaro Kujo", "Josuke Higashikata", "Giorno Giovanna", "Jolyne Cujoh", "Johnny Joestar", "Josuke Higashikata", "Shirou Emiya", "Saber", "Rin Tohsaka", "Archer", "Sakura Matou", "Sakura Matou", "Rider", "Illyasviel von Einzbern", "Kirei Kotomine", "Gilgamesh" ] } "# .as_bytes(); let jojo_script = Script::new( "jojo", HashSet::default(), ScriptKind::Tag, FileType::File(jojo_data.to_vec()), ); let fate_script = Script::new( "fate", HashSet::default(), ScriptKind::Tag, FileType::File(fate_data.to_vec()), ); let value = jojo_script.merge(fate_script, |_| {}).unwrap(); let expect = Script::new( "fate", HashSet::default(), ScriptKind::Tag, FileType::File(expect_data.to_vec()), ); assert_eq!(value, expect); } }
use super::*; /// A video file. #[derive(Debug,Deserialize)] pub struct Video { /// A unique identifier for the file. pub file_id: String, /// The width of the video. pub width: i32, /// The height of the video. pub height: i32, /// The duration of the video in seconds. pub duration: i32, /// A thumbnail for the video. pub thumb: Option<Box<PhotoSize>>, /// The MIME type of the file. pub mime_type: Option<String>, /// The size of the file. pub file_size: Option<i32>, }
use std::io::Cursor; use byteorder::{NetworkEndian, ReadBytesExt}; /// Compute the Internet Checksum for a slice of bytes pub fn compute_checksum(data: &[u8]) -> u16 { let mut r = Cursor::new(data); let mut b = vec![]; while let Ok(u) = r.read_u16::<NetworkEndian>() { b.push(u); } compute_checksum_u16(b.as_slice()) } /// Compute the Internet Checksum for a slice of u16s pub fn compute_checksum_u16(data: &[u16]) -> u16 { let mut sum = 0u32; for d in data { sum += *d as u32; } // Fold 32-bit sum to 16 bits while (sum >> 16) > 0 { sum = (sum & 0xffff) + (sum >> 16); } !sum as u16 } #[test] fn test_compute_checksum() { let data: [u16; 2] = [0x0000, 0xffff]; let sum = 0x0000; let checksum = compute_checksum_u16(&data); assert_eq!(sum, checksum); } #[test] fn test_compute_checksum_1() { let data: [u16; 10] = [ 0x4500, 0x0073, 0x0000, 0x4000, 0x4011, 0xb861, 0xc0a8, 0x0001, 0xc0a8, 0x00c7, ]; let sum = 0x0000; let checksum = compute_checksum_u16(&data); assert_eq!(sum, checksum); }
use super::primitives::{many, one_of}; use super::*; pub fn integer() -> Parser<u64> { Box::new(|input| { let mut cs = input.chars(); let num_str: String = cs.by_ref().take_while(|c| c.is_digit(10)).collect(); if num_str.is_empty() { return Err(ParseError { input: input.to_string(), expected: String::from("integer"), fatal: true, }); } let num = num_str.parse::<u64>().unwrap(); Ok(ParseSuccess { value: num, // TODO: Better way to skip next: String::from(input.get(num_str.len()..).unwrap()), }) }) } pub fn text(text: &'static str) -> Parser<String> { Box::new(move |input| { if let Some(val) = input.get(..text.len()) { if val == text { Ok(ParseSuccess { value: String::from(text), next: input.chars().skip(text.len()).collect(), }) } else { Err(ParseError { input: input.to_string(), expected: format!("string {}", text), fatal: true, }) } } else { Err(ParseError { input: input.to_string(), expected: format!("string {}", text), fatal: true, }) } }) } pub fn eof() -> Parser<()> { Box::new(move |input| { if input.is_empty() { Ok(ParseSuccess { value: (), next: String::from(""), }) } else { Err(ParseError { input: input.to_string(), expected: String::from("end of input"), fatal: true, }) } }) } pub fn digit() -> Parser<u32> { Box::new(|input| { let mut cs = input.chars(); cs.next() .map(|c| { if c.is_digit(10) { Ok(ParseSuccess { next: cs.collect(), value: c.to_digit(10).unwrap(), }) } else { Err(ParseError { fatal: true, expected: String::from("digit"), input: input.to_string(), }) } }) .or_else(|| { Some(Err(ParseError { fatal: true, expected: String::from("digit"), input: input.to_string(), })) }) .unwrap() }) } pub fn char(c: char) -> Parser<char> { Box::new(move |input| { let mut cs = input.chars(); cs.next() .map(|v| { if v == c { Ok(ParseSuccess { value: v, next: cs.collect(), }) } else { Err(ParseError { fatal: true, input: input.to_string(), expected: format!("char {}", c), }) } }) .or_else(|| { Some(Err(ParseError { fatal: true, input: input.to_string(), expected: format!("char {}", c), })) }) .unwrap() }) } pub fn ws() -> Parser<char> { one_of(vec![char(' '), char('\t'), char('\n'), char('\r')]) } #[cfg(test)] mod tests { use super::primitives::*; use super::*; #[test] fn test_integer() { assert_eq!(parse(integer(), &String::from("123")), 123); assert_eq!( parse_with_next(integer(), &String::from("123abc")) .unwrap() .next, String::from("abc") ); } #[test] #[should_panic(expected = "Failed to parse input, expected integer")] fn test_integer_failure() { parse(integer(), &String::from("abc")); } #[test] fn test_text() { assert_eq!(parse(text("abc"), &String::from("abc")), "abc"); assert_eq!( parse_with_next(text("abc"), &String::from("abc123")) .unwrap() .next, String::from("123") ); } #[test] #[should_panic(expected = "Failed to parse input, expected string xyz")] fn test_text_failure() { println!("{}", parse(text("xyz"), &String::from("abc"))); } #[test] fn eof_test() { assert_eq!(parse(eof(), &String::from("")), ()); } #[test] #[should_panic(expected = "Failed to parse input, expected end of input")] fn eof_test_failure() { parse(eof(), &String::from("abc")); } #[test] fn digit_test() { assert_eq!(parse(digit(), "123"), 1); let many_digits = parse(many(digit()), "123"); assert_eq!(many_digits.len(), 3); assert_eq!(many_digits[0], 1); assert_eq!(many_digits[1], 2); assert_eq!(many_digits[2], 3); } #[test] fn char_test() { assert_eq!(parse(char('a'), "a"), 'a'); assert_eq!(parse(char('a'), "ab"), 'a'); } #[test] fn char_unicode_test() { assert_eq!(parse(char('💩'), "💩"), '💩'); } }
use std::collections::HashSet; use crate::parser::Stmt; use crate::parser::Stmt::{DefCmd, DefFun}; pub fn gen_sym_table(stmts: &Vec<Stmt>) -> HashSet<&String> { let mut sym_table = HashSet::new(); for stmt in stmts { match stmt { DefFun(fun_name, _, _) => { sym_table.insert(fun_name); } DefCmd(cmd_name, _, _) => { sym_table.insert(cmd_name); } _ => (), } } sym_table }
use serde::{Deserialize, Serialize}; #[derive(Serialize, Deserialize, Debug, PartialEq)] pub struct Metric { #[serde(rename = "type")] pub metric_type: String, pub value: String, pub vendor: Option<String>, pub ext: Option<MetricExt>, } #[derive(Serialize, Deserialize, Debug, PartialEq)] pub struct MetricExt {}
use std::iter; pub mod aes; pub mod b64; pub mod hex; pub mod pad; pub mod rand; pub mod xor; mod ascii { const ASCII: &str = "abcdefghijklmnopqrstuvwxyz "; pub fn ix(c: char) -> usize { assert!(ASCII.contains(c.to_ascii_lowercase())); ASCII.find(c.to_ascii_lowercase()).unwrap() } } /// Count the number of 1-valued bits in a byte slice. fn bit_count(bytes: &[u8]) -> u32 { bytes .iter() .map(|&x| (0..=7).map(|i| (x as u32 & 2_u32.pow(i)) >> i).sum::<u32>()) .sum() } pub fn english_score(text: &str) -> f32 { let english_freq = [ 0.08167, 0.01492, 0.02782, 0.04253, 0.12702, 0.02228, 0.02015, // A-G 0.06094, 0.06966, 0.00153, 0.00772, 0.04025, 0.02406, 0.06749, // H-N 0.07507, 0.01929, 0.00095, 0.05987, 0.06327, 0.09056, 0.02758, // O-U 0.00978, 0.02360, 0.00150, 0.01974, 0.00074, 0.13000, // V-Z, ' ' ]; text.chars() .filter(|&c| c.is_ascii_alphabetic() || c == ' ') .map(|c| english_freq[ascii::ix(c)]) .sum() } // (score, key, text) pub fn break_single_byte_xor(bytes: &[u8]) -> Result<(f32, u8, String), String> { let mut scores = vec![]; for k in 0..=255u8 { let key: Vec<u8> = iter::repeat(k).take(bytes.len()).collect(); match String::from_utf8(xor::xor_bytes(bytes, &key)) { Ok(text) => scores.push((english_score(&text), k, text)), _ => continue, } } scores.sort_by(|a, b| b.0.partial_cmp(&a.0).unwrap()); if !scores.is_empty() { Ok(scores[0].clone()) } else { Err("no valid utf8 strings found".to_string()) } } pub fn find_xor_key_size(bytes: &[u8]) -> usize { let mut mindist = f32::MAX; let mut keysize: usize = 0; for candidate in 2..=40 { let chunks: Vec<_> = bytes.chunks(candidate).take(4).collect(); let mut dist = 0f32; for i in 0..4 { for j in 0..4 { dist += hamming(chunks[i], chunks[j]) as f32; } } dist /= candidate as f32; if dist < mindist { keysize = candidate; mindist = dist; } } keysize } pub fn build_repeated_key(b: &[u8], len: usize) -> Vec<u8> { iter::repeat(b).flatten().cloned().take(len).collect() } pub fn hamming(b0: &[u8], b1: &[u8]) -> u32 { bit_count(&xor::xor_bytes(b0, b1)) } #[cfg(test)] mod tests {}
use std::{ hint::spin_loop, sync::{ atomic::{AtomicUsize, Ordering}, Arc, }, thread, }; #[cfg(feature = "bench")] use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion}; use option_lock::OptionLock; // these tests are used mainly to check that no deadlocks occur with many threads fn lock_contention_yield(threads: usize) { let lock = Arc::new(OptionLock::empty()); let done = Arc::new(AtomicUsize::new(0)); for _ in 0..threads - 1 { let done = done.clone(); let lock = lock.clone(); thread::spawn(move || { let val = loop { if let Ok(val) = lock.try_take() { break val; } loop { thread::yield_now(); if lock.is_some_unlocked() { break; } } }; done.fetch_add(val, Ordering::AcqRel); }); } let mut expected = 0; for val in 0..threads - 1 { expected += val; loop { if let Ok(mut guard) = lock.try_lock_none() { guard.replace(val); break; } loop { thread::yield_now(); if !lock.is_locked() { break; } } } } loop { if done.load(Ordering::Relaxed) == expected { break; } thread::yield_now(); } } // this can take a very long time if threads > # cpu cores fn lock_contention_spin(threads: usize) { let lock = Arc::new(OptionLock::empty()); let done = Arc::new(AtomicUsize::new(0)); for _ in 0..threads - 1 { let done = done.clone(); let lock = lock.clone(); thread::spawn(move || { let val = lock.spin_take(); done.fetch_add(val, Ordering::AcqRel); }); } let mut expected = 0; for val in 0..threads - 1 { expected += val; let mut guard = lock.spin_lock_none(); guard.replace(val); } while done.load(Ordering::Relaxed) != expected { spin_loop(); } } fn bench_contention(c: &mut Criterion) { let yield_thread_count = 500; c.bench_with_input( BenchmarkId::new("lock_contention_yield", yield_thread_count), &yield_thread_count, |b, &s| { b.iter(|| lock_contention_yield(s)); }, ); let spin_thread_count = 8; c.bench_with_input( BenchmarkId::new("lock_contention_spin", spin_thread_count), &spin_thread_count, |b, &s| { b.iter(|| lock_contention_spin(s)); }, ); } #[cfg(feature = "bench")] criterion_group!(benches, bench_contention); #[cfg(feature = "bench")] criterion_main!(benches);
use crate::client::Client; use crate::utils::EntryExt; use chrono::Utc; use rustenger_shared::{ account::{Account, Username}, message::UserMessage, RoomName, }; use std::{collections::HashMap, future::Future, sync::Arc}; use thiserror::Error; use tokio::sync::{mpsc, Mutex, RwLock}; pub type RoomMsgTx = mpsc::Sender<Client>; pub type RoomMsgRx = mpsc::Receiver<Client>; pub type Result<T> = std::result::Result<T, Error>; #[derive(Error, Debug)] pub enum Error { #[error("room '{0}' already exist")] RoomAlreadyExist(RoomName), #[error("room '{0}' does not exist")] RoomDoesNotExits(RoomName), #[error("send error: {0}")] Send(#[from] mpsc::error::SendError<Client>), #[error("bincode error: {0}")] Bincode(#[from] bincode::Error), } // for rooms it is used RwLock, because it is often used for reading // - access to ServerRoomMessageTx and rarely for writing - adding a new Room; // used Mutex for ServerRoomMessage because it is always used for writing /// A mediator between Rooms, contains links to each room and is accessible from each room #[derive(Clone)] pub struct Server { links: Arc<RwLock<HashMap<RoomName, Mutex<RoomMsgTx>>>>, } impl Server { pub fn new() -> Self { let raw_links = HashMap::<RoomName, Mutex<RoomMsgTx>>::new(); let links = Arc::new(RwLock::new(raw_links)); Self { links } } /// create link to room with name 'name' // pub async fn create_room(self, name: RoomName) -> Result<()> { pub fn create_room(self, name: RoomName) -> impl Future<Output = Result<()>> + Send { async move { log::info!("attempt to create new room '{}'", name); let (msg_tx, msg_rx) = mpsc::channel(64); let mut lock = self.links.write().await; lock.entry(name) .vacant() .ok_or(Error::RoomAlreadyExist(name))? .insert(Mutex::new(msg_tx)); let room = Room::new(name, msg_rx, self.clone()); tokio::spawn(room.run()); Ok(()) } } /// remove link to room with name 'name' /// 'self' insted of '&self" due to this method used in Drop pub async fn revome_room(self, name: RoomName) -> Result<()> { log::info!("attempt to remove link to room '{}'", name); let mut lock = self.links.write().await; lock.entry(name) .occupied() .ok_or(Error::RoomDoesNotExits(name))? .remove(); Ok(()) } /// inser user 'user' into room with name 'room_name' pub async fn insert_user(&self, client: Client, room_name: RoomName) -> Result<()> { log::info!( "attempt to insert user '{}' to room '{}'", client.username(), room_name ); let lock = self.links.read().await; let msg_tx = lock .get(&room_name) .ok_or(Error::RoomDoesNotExits(room_name))?; let mut msg_tx_lock = msg_tx.lock().await; msg_tx_lock.send(client).await.map_err(Error::Send) } /// build 'Vec' of names of all rooms in the server pub async fn rooms(&self) -> Vec<RoomName> { self.links.read().await.keys().cloned().collect() } } pub type Clients = HashMap<Username, Option<Client>>; pub struct Room { name: RoomName, clients: Clients, msg_rx: RoomMsgRx, server: Server, } impl Room { /// creates new room without links with other rooms fn new(name: RoomName, msg_rx: RoomMsgRx, server: Server) -> Self { let clients = HashMap::new(); Self { name, clients, msg_rx, server, } } /// runs the room pub async fn run(mut self) { log::info!("run room: {}", self.name()); loop { self.accept_client(); if self.clients.is_empty() { // yield the current task to allow to accept // another clients if work in single thread tokio::task::yield_now().await; continue; } self.update().await; } } /// check if new client is avaliable and accpet it fn accept_client(&mut self) { use futures::future; let recv = future::maybe_done(self.msg_rx.recv()); futures::pin_mut!(recv); if let Some(client) = recv.as_mut().take_output() { let client = client.unwrap(); let username = client.username(); self.clients.insert(username, Some(client)); log::info!( "accepted client with name '{}' to room '{}'", username, self.name ); } } /// selects clients messages and handles them async fn update(&mut self) { use futures::future::{self, FutureExt}; use rustenger_shared::message::ClientMessage; let iter = self.clients.values_mut().map(|client| { let client = client.as_mut().unwrap(); let adresser = client.account(); client.read().map(move |m| (adresser, m)).boxed() }); let (adresser, res) = future::select_all(iter).await.0; match res { Err(e) => log::error!("failed to recieve client message: {}", e), Ok(ClientMessage::UserMessage(msg)) => { if let Err(e) = self.broadcast(adresser, msg).await { log::error!("failed to broadcast user message: {}", e); } } Ok(ClientMessage::Command(cmd)) => { let mut entry = self.clients.entry(adresser.username()).occupied().unwrap(); let client = entry.get_mut().take().unwrap(); match client.handle(cmd).await { Err(e) => log::error!("failed to handle command: {}", e), Ok(None) => { entry.remove(); } Ok(Some(client)) => { *entry.get_mut() = Some(client); } } } } } /// sends messages to all clients except 'account' async fn broadcast(&mut self, adresser: Account, text: UserMessage) -> Result<()> { use rustenger_shared::message::{AccountMessage, ServerMessage}; let msg = AccountMessage { text, adresser, utc: Utc::now(), }; for client in self .clients .values_mut() .map(|c| c.as_mut().unwrap()) .filter(|c| c.username() != adresser.username()) { client.write(ServerMessage::AccountMessage(msg)).await?; } Ok(()) } pub fn name(&self) -> RoomName { self.name } } impl Drop for Room { fn drop(&mut self) { log::debug!("drop the room: {}", self.name()); let fut = self.server.clone().revome_room(self.name()); tokio::spawn(fut); } }
use crate::pool::TxPool; use ckb_types::core::{Cycle, TransactionView}; use ckb_types::packed::ProposalShortId; use futures::future::Future; use tokio::prelude::{Async, Poll}; use tokio::sync::lock::Lock; pub struct FetchTxsWithCyclesProcess { pub tx_pool: Lock<TxPool>, pub short_ids: Option<Vec<ProposalShortId>>, } impl FetchTxsWithCyclesProcess { pub fn new( tx_pool: Lock<TxPool>, short_ids: Vec<ProposalShortId>, ) -> FetchTxsWithCyclesProcess { FetchTxsWithCyclesProcess { tx_pool, short_ids: Some(short_ids), } } } impl Future for FetchTxsWithCyclesProcess { type Item = Vec<(ProposalShortId, (TransactionView, Cycle))>; type Error = (); fn poll(&mut self) -> Poll<Self::Item, Self::Error> { match self.tx_pool.poll_lock() { Async::Ready(guard) => { let short_ids = self.short_ids.take().expect("cannot poll twice"); let ret = short_ids .into_iter() .filter_map(|short_id| { guard .get_tx_with_cycles(&short_id) .and_then(|(tx, cycles)| cycles.map(|cycles| (short_id, (tx, cycles)))) }) .collect(); Ok(Async::Ready(ret)) } Async::NotReady => Ok(Async::NotReady), } } }
use {FloatNum, Point, SignedNum}; use octant::Octant; use steps::Steps; /// An implementation of the [mid-point line drawing algorithm]. /// /// The biggest difference between this algorithm and [`Bresenham`] is that it uses floating-point points. /// /// Example: /// /// ``` /// extern crate line_drawing; /// use line_drawing::Midpoint; /// /// fn main() { /// for (x, y) in Midpoint::<f32, i8>::new((0.2, 0.02), (2.8, 7.7)) { /// print!("({}, {}), ", x, y); /// } /// } /// ``` /// /// ```text /// (0, 0), (1, 1), (1, 2), (1, 3), (2, 4), (2, 5), (2, 6), (3, 7), (3, 8), /// ``` /// /// [mid-point line drawing algorithm]: http://www.mat.univie.ac.at/~kriegl/Skripten/CG/node25.html /// [`Bresenham`]: struct.bresenham.html pub struct Midpoint<I, O> { octant: Octant, point: Point<O>, a: I, b: I, k: I, end_x: O, } impl<I: FloatNum, O: SignedNum> Midpoint<I, O> { #[inline] pub fn new(start: Point<I>, end: Point<I>) -> Self { // Get the octant to use let octant = Octant::new(start, end); // Convert the points into the octant versions let start = octant.to(start); let end = octant.to(end); // Initialise the variables let a = -(end.1 - start.1); let b = end.0 - start.0; let c = start.0 * end.1 - end.0 * start.1; Self { octant, a, b, point: (O::cast(start.0.round()), O::cast(start.1.round())), k: a * (start.0.round() + I::one()) + b * (start.1.round() + I::cast(0.5)) + c, end_x: O::cast(end.0.round()), } } #[inline] pub fn steps(self) -> Steps<Point<O>, Self> { Steps::new(self) } } impl<I: FloatNum, O: SignedNum> Iterator for Midpoint<I, O> { type Item = Point<O>; #[inline] fn next(&mut self) -> Option<Self::Item> { if self.point.0 <= self.end_x { let point = self.octant.from(self.point); // Take an N step if self.k <= I::zero() { self.k += self.b; self.point.1 += O::one(); } // Take an E step self.k += self.a; self.point.0 += O::one(); Some(point) } else { None } } } #[test] fn tests() { let midpoint = |a, b| Midpoint::new(a, b).collect::<Vec<_>>(); assert_eq!( midpoint((0.0, 0.0), (-5.0, -5.0)), [(0, 0), (-1, -1), (-2, -2), (-3, -3), (-4, -4), (-5, -5)] ); assert_eq!( midpoint((0.0, 0.0), (6.0, 3.0)), [(0, 0), (1, 1), (2, 1), (3, 2), (4, 2), (5, 3), (6, 3)] ); }
use super::point::Point; #[derive(Debug)] pub struct Triangle { pub vertices: [Point; 3], } impl Triangle { pub fn new (a: Point, b: Point, c: Point) -> Triangle { Triangle{ vertices: [a, b, c] } } }
#[allow(unused_imports)] use handlegraph::{ handle::{Direction, Handle, NodeId}, handlegraph::*, mutablehandlegraph::*, packed::*, pathhandlegraph::*, }; #[allow(unused_imports)] use handlegraph::packedgraph::PackedGraph; #[allow(unused_imports)] use log::{debug, error, info, trace, warn}; use anyhow::Result; use crate::vulkan::{draw_system::Vertex, GfaestusVk}; use crate::{geometry::*, vulkan::draw_system::nodes::NodeVertices}; pub mod config; pub mod graph_layout; pub mod grid; pub mod physics; pub mod selection; pub use config::*; pub use graph_layout::*; pub use selection::*; // Trait abstracting over Grid and FlatLayout -- this definition only // supports FlatLayout, though, and should be changed to use iterators // to support more solutions // // note: `node_ids` and `nodes` must be in the same order pub trait GraphLayout { fn node_ids(&self) -> &[NodeId]; fn nodes(&self) -> &[Node]; fn bounding_box(&self) -> (Point, Point); // `vertices` must contain the vertices for nodes in the same // order as returned by the `node_ids` and `nodes` methods #[inline] fn node_line_vertices(&self, vertices: &mut Vec<Vertex>) { vertices.clear(); for node in self.nodes().iter() { let v0 = Vertex { position: [node.p0.x, node.p0.y], }; let v1 = Vertex { position: [node.p1.x, node.p1.y], }; vertices.push(v0); vertices.push(v1); } } } #[derive(Debug, Clone)] pub struct Universe<G: GraphLayout> { // TODO bp_per_world_unit isn't used yet; and it should probably // be a 2D vector allowing nonuniform scaling bp_per_world_unit: f32, // grid: grid::Grid<NodeId>, graph_layout: G, // node_ids: Vec<NodeId>, pub offset: Point, pub angle: f32, // physics_config: PhysicsConfig, // layout_config: LayoutConfig, // view_config: ViewConfig, } impl<G: GraphLayout> Universe<G> { pub fn layout(&self) -> &G { &self.graph_layout } pub fn layout_mut(&mut self) -> &mut G { &mut self.graph_layout } } impl Universe<FlatLayout> { pub fn from_laid_out_graph( graph: &PackedGraph, layout_path: &str, ) -> Result<Self> { let bp_per_world_unit = 1.0; let offset = Point::new(0.0, 0.0); let angle = 0.0; let graph_layout = FlatLayout::from_laid_out_graph(graph, layout_path)?; Ok(Self { bp_per_world_unit, graph_layout, offset, angle, }) } pub fn update_positions_from_gpu( &mut self, app: &GfaestusVk, vertices: &NodeVertices, ) -> Result<()> { let node_count = self.graph_layout.nodes.len(); vertices.download_vertices( app, node_count, &mut self.graph_layout.nodes, ) } /* pub fn update_positions_from_gpu(&mut self, device: &Device, vertices: &NodeVertices) -> Result<()> { let node_count = self.graph_layout.nodes.len(); unsafe { let data_ptr = device.map_memory( vertices.memory, 0, self.size, vk::MemoryMapFlags::empty(), )?; let val_ptr = data_ptr as *const u32; let sel_slice = std::slice::from_raw_parts(val_ptr, node_count); self.latest_selection.extend( sel_slice.iter().enumerate().filter_map(|(ix, &val)| { let node_id = NodeId::from((ix + 1) as u64); if val == 1 { Some(node_id) } else { None } }), ); device.unmap_memory(self.memory); } } */ pub fn node_vertices(&self) -> Vec<Vertex> { let mut vertices = Vec::new(); for node in self.graph_layout.nodes().iter() { let v0 = Vertex { position: [node.p0.x, node.p0.y], }; let v1 = Vertex { position: [node.p1.x, node.p1.y], }; vertices.push(v0); vertices.push(v1); } vertices } } #[derive(Debug, Clone, Copy, PartialEq, PartialOrd)] pub struct Node { pub p0: Point, pub p1: Point, } impl Node { pub fn center(&self) -> Point { let diff = self.p1 - self.p0; self.p0 + (diff / 2.0) } } #[derive(Debug, Clone, PartialEq, PartialOrd)] pub struct FlatLayout { node_ids: Vec<NodeId>, nodes: Vec<Node>, // pub components: Vec<(usize, usize)>, pub component_offsets: Vec<usize>, top_left: Point, bottom_right: Point, } impl GraphLayout for FlatLayout { fn node_ids(&self) -> &[NodeId] { &self.node_ids } fn nodes(&self) -> &[Node] { &self.nodes } fn bounding_box(&self) -> (Point, Point) { (self.top_left, self.bottom_right) } } impl FlatLayout { pub fn node_component(&self, node_id: NodeId) -> usize { let offset = self.component_offsets.iter().enumerate().find(|(_, o)| { let id = (node_id.0) as usize; id >= **o }); if let Some((ix, _)) = offset { ix } else { self.component_offsets.len() } } fn from_laid_out_graph( graph: &PackedGraph, layout_path: &str, ) -> Result<Self> { use std::fs::File; use std::io::prelude::*; use std::io::BufReader; use rustc_hash::FxHashMap; info!("loading layout"); let layout_file = File::open(layout_path)?; let reader = BufReader::new(layout_file); let mut lines = reader.lines(); // throw away header lines.next().unwrap()?; let mut layout_map: FxHashMap<NodeId, (Point, Point)> = FxHashMap::default(); let mut component_map: FxHashMap<NodeId, usize> = FxHashMap::default(); let mut components: Vec<usize> = Vec::new(); let mut cur_comp = 0; let mut prev_point = None; let mut line_count = 0; for line in lines { let line: String = line?; let trimmed = line.trim(); if trimmed.is_empty() { continue; } let mut fields = trimmed.split_whitespace(); let ix = fields.next().unwrap().parse::<usize>()?; let x = fields.next().unwrap().parse::<f32>()?; let y = fields.next().unwrap().parse::<f32>()?; let component = if let Some(c) = fields.next() { let val = c.parse::<usize>()?; if val != cur_comp { let id = (line_count / 2) + 1; components.push(id); cur_comp = val; } Some(c.parse::<usize>()?) } else { None }; let this_p = Point { x, y }; let node_ix = (ix / 2) + 1; let node_id = NodeId::from(node_ix); line_count += 1; if let Some(prev_p) = prev_point { layout_map.insert(node_id, (prev_p, this_p)); if let Some(comp) = component { component_map.insert(node_id, comp); } prev_point = None; } else { prev_point = Some(this_p); } } let mut node_ids = Vec::with_capacity(graph.node_count()); let mut nodes = Vec::with_capacity(graph.node_count()); // make sure the nodes are stored in ascending NodeId order so // that the vertex index in the NodeDrawSystem render pipeline // is correctly mapped to node ID let mut handles = graph.handles().collect::<Vec<_>>(); handles.sort(); let mut min_x = std::f32::MAX; let mut max_x = std::f32::MIN; let mut min_y = std::f32::MAX; let mut max_y = std::f32::MIN; for handle in handles { let id = handle.id(); let (p0, p1) = *layout_map.get(&id).unwrap(); let comp = component_map.get(&id).copied().unwrap_or(0); let delta = Point::new(0.0, (comp as f32) * 10_000.0); // let delta = Point::new(0.0, 0.0); let p0 = p0 + delta; let p1 = p1 + delta; min_x = min_x.min(p0.x).min(p1.x); max_x = max_x.max(p0.x).max(p1.x); min_y = min_y.min(p0.y).min(p1.y); max_y = max_y.max(p0.y).max(p1.y); node_ids.push(id); nodes.push(Node { p0, p1 }); } let top_left = Point::new(min_x, min_y); let bottom_right = Point::new(max_x, max_y); Ok(FlatLayout { node_ids, nodes, component_offsets: components, top_left, bottom_right, }) } }
extern crate sdl; use std::collections::HashMap; use std::f32::consts::PI; use ao_rs::{Ao, Device, Driver, Format}; use rand::rngs::SmallRng; use rand::{RngCore, SeedableRng}; use sdl::event::{Event, Key}; use sdl::video::{Color, Surface, SurfaceFlag, VideoFlag}; const SIZE_X: i16 = 320; const SIZE_Y: i16 = 240; const CONTROL_UP: Key = Key::Up; const CONTROL_RIGHT: Key = Key::Right; const CONTROL_DOWN: Key = Key::Down; const CONTROL_LEFT: Key = Key::Left; const CONTROL_A: Key = Key::LCtrl; const CONTROL_B: Key = Key::LAlt; const CONTROL_X: Key = Key::Space; const CONTROL_Y: Key = Key::LShift; const CONTROL_TRIGGER_LEFT: Key = Key::Tab; const CONTROL_TRIGGER_RIGHT: Key = Key::Backspace; const CONTROL_SELECT: Key = Key::Escape; const CONTROL_START: Key = Key::Return; fn main() { let _ao = Ao::new(); let driver = Driver::new().unwrap(); let format = Format::new(); let device = Device::new(&driver, &format, None).unwrap(); let freq = 440.0; // Create PCM data formatted as 2 channels // of 16 bits each (Time1, Channel1; Time2, Channel2...). let buff_size = format.bits / 8 * format.channels * format.rate; let mut buffer: Vec<i8> = vec![0; buff_size as usize]; for (i, chunk) in buffer.chunks_mut(4).enumerate() { let sin = (2.0 * PI * freq * (i as f32) / (format.rate as f32)).sin(); let sample = (0.75 * 32768.0 * sin) as i16; chunk[0] = (sample & 0xff) as i8; chunk[2] = chunk[0]; chunk[1] = ((sample >> 8) & 0xff) as i8; chunk[3] = chunk[1]; } sdl::init(&[sdl::InitFlag::Video]); sdl::wm::set_caption("rust-sdl demo - video", "rust-sdl"); let screen = match sdl::video::set_video_mode( SIZE_X as isize, SIZE_Y as isize, 16, &[SurfaceFlag::SWSurface], &[VideoFlag::DoubleBuf], ) { Ok(screen) => screen, Err(err) => panic!("failed to set video mode: {}", err), }; let mut mode = 0; let mut pressed_keys = HashMap::new(); let mut frame: i16 = 0; 'main: loop { 'event: loop { match sdl::event::poll_event() { Event::Quit => break 'main, Event::None => break 'event, // Key(_, false, _) means key up, true key down Event::Key(CONTROL_SELECT, false, _, _) => break 'main, Event::Key(CONTROL_START, false, _, _) => mode += 1, Event::Key(k, pressed, _, _) => pressed_keys .insert(k as isize, pressed) .map_or_else(|| (), |_| ()), _ => {} } } match mode % 5 { 0 => draw_colors(&screen, frame), 1 => draw_controls(&screen, &pressed_keys), 2 => draw_alternating(&screen, frame), 3 => make_sine(&device, &buffer, frame), 4 => draw_raw(&screen), _ => panic!("bad mode"), } screen.flip(); frame += 1; } sdl::quit(); } fn make_sine(device: &Device, buffer: &[i8], frame: i16) { if frame % 100 == 0 { device.play(&buffer); } } fn draw_raw(screen: &Surface) { let _draw = |pixels: &mut [u8]| -> bool { SmallRng::from_entropy().fill_bytes(pixels); true }; screen.with_lock(_draw); } fn draw_colors(screen: &Surface, frame: i16) { for i in 0..SIZE_X { for j in 0..SIZE_Y { screen.fill_rect( Some(sdl::Rect { x: i, y: j, w: 1, h: 1, }), Color::RGB( (3 * i + 2 * j - frame) as u8, (4 * i - 3 * j + 2 * frame) as u8, (-5 * i + j + frame) as u8, ), ); } } } fn draw_alternating(screen: &Surface, frame: i16) { for i in 0..SIZE_X { for j in 0..SIZE_Y { let v = if (i + j + frame) % 2 == 0 { 255 } else { 0 }; screen.fill_rect( Some(sdl::Rect { x: i, y: j, w: 1, h: 1, }), Color::RGB(v, v, v), ); } } } fn draw_controls(screen: &Surface, pressed_keys: &HashMap<isize, bool>) { screen.fill_rect( Some(sdl::Rect { x: 0, y: 0, w: SIZE_X as u16, h: SIZE_Y as u16, }), sdl::video::Color::RGB(0, 0, 0), ); for (k, pressed) in pressed_keys { if *pressed { let (x, y) = if *k == CONTROL_UP as isize { (1, 2) } else if *k == CONTROL_LEFT as isize { (0, 3) } else if *k == CONTROL_DOWN as isize { (1, 4) } else if *k == CONTROL_RIGHT as isize { (2, 3) } else if *k == CONTROL_TRIGGER_LEFT as isize { (1, 0) } else if *k == CONTROL_TRIGGER_RIGHT as isize { (5, 0) } else if *k == CONTROL_A as isize { (6, 3) } else if *k == CONTROL_B as isize { (5, 4) } else if *k == CONTROL_X as isize { (5, 2) } else if *k == CONTROL_Y as isize { (4, 3) } else if *k == CONTROL_SELECT as isize { (2, 6) } else if *k == CONTROL_START as isize { (4, 6) } else { panic!("unknown key"); }; screen.fill_rect( Some(sdl::Rect { x: x * (SIZE_X / 7), y: y * (SIZE_Y / 7), w: (SIZE_X / 7) as u16, h: (SIZE_Y / 7) as u16, }), sdl::video::Color::RGB(255, 255, 255), ); } } }
extern crate image; extern crate imageproc; extern crate rusttype; extern crate conv; use decoder::*; use decoder_class::*; use decoder_usecase::*; use image::{GenericImage, ImageBuffer, Pixel}; use imageproc::definitions::{Clamp, Image}; use conv::ValueInto; use std::f32; use std::i32; use std; use imageproc::pixelops::weighted_sum; use imageproc::drawing::*; use imageproc::rect::Rect; use imageproc::rect::Region; use rusttype::{Font, Scale, point, PositionedGlyph}; use std::path::Path; use std::env; use std::fs::File; use std::io::*; use std::collections::HashMap; use std::ops::Mul; use image::{Rgb,RgbImage}; use rusttype::{FontCollection}; use image::GenericImageView; pub fn draw_text_mut<'a, I>( image: &'a mut I, color: I::Pixel, x: u32, y: u32, scale: Scale, font: &'a Font<'a>, text: &'a str, ) where I: GenericImage, <I::Pixel as Pixel>::Subpixel: ValueInto<f32> + Clamp<f32>, { let v_metrics = font.v_metrics(scale); let offset = point(0.0, v_metrics.ascent); let glyphs: Vec<PositionedGlyph> = font.layout(text, scale, offset).collect(); for g in glyphs { if let Some(bb) = g.pixel_bounding_box() { g.draw(|gx, gy, gv| { let gx = gx as i32 + bb.min.x; let gy = gy as i32 + bb.min.y; let image_x = gx + x as i32; let image_y = gy + y as i32; let image_width = image.width() as i32; let image_height = image.height() as i32; if image_x >= 0 && image_x < image_width && image_y >= 0 && image_y < image_height { let pixel = image.get_pixel(image_x as u32, image_y as u32); let weighted_color = weighted_sum(pixel, color, 1.0 - gv, gv); image.put_pixel(image_x as u32, image_y as u32, weighted_color); } }) } } } pub fn draw_text<'a, I>( image: &'a mut I, color: I::Pixel, x: u32, y: u32, scale: Scale, font: &'a Font<'a>, text: &'a str, ) -> Image<I::Pixel> where I: GenericImage, <I::Pixel as Pixel>::Subpixel: ValueInto<f32> + Clamp<f32>, I::Pixel: 'static, { let mut out = ImageBuffer::new(image.width(), image.height()); out.copy_from(image, 0, 0); draw_text_mut(&mut out, color, x, y, scale, font, text); out } pub fn erstelle_image()->(image::ImageBuffer<Rgb<u8>, Vec<u8> >){ let mut image = ImageBuffer::<Rgb<u8>, Vec<u8> >::new(1000, 1000); for a in 0..1000 { for b in 0..1000 { image.get_pixel_mut(a,b).data=[255,255,255]; } } return(image) } pub fn main() { } pub fn create_system_and_akteur(path: &std::path::Path, image: image::ImageBuffer<Rgb<u8>, Vec<u8>>,systemname:&str,vec_akteure: &Vec<Actor>) -> (image::ImageBuffer<Rgb<u8>, Vec<u8>>) { let mut image=image; let mut systemname=systemname; let mut done_create=false; let mut vec_stelle=0; let mut anzahl=0; image=draw_systemborder(image,systemname); for vec_stelle in 0..vec_akteure.len(){ let mut position = vec_akteure[vec_stelle].id; image = draw_akteur(image, 0, position,"l");//0 muss da bleiben image = name_akteur(image, position, &vec_akteure[vec_stelle].name,"l"); let mut relation = vec_akteure[vec_stelle].extends_from; match relation { Some(relation) => image = draw_relationship_akteur(image, position,relation, "l"), None => (), } for id in &vec_akteure[vec_stelle].has_use_case{ image=draw_case_with_assoziation(image,*id,position,"","","l"); } } let _ = image.save(path).unwrap(); return(image); } pub fn create_cases(path: &std::path::Path, image: image::ImageBuffer<Rgb<u8>, Vec<u8>>,vec_cases: &Vec<UseCase>)-> (image::ImageBuffer<Rgb<u8>, Vec<u8>>) { let mut image=image; let mut done_create=false; let mut vec_stelle=0; let mut name=""; while !done_create { let mut place = vec_cases[vec_stelle].id; let mut extend = vec_cases[vec_stelle].is_extension_point; name= &vec_cases[vec_stelle].name; if extend==true{ image=draw_case_extend(image,place) } image = draw_case(image, place); image=name_case(image,place,name); vec_stelle=vec_stelle+1; if vec_stelle==vec_cases.iter().len(){ done_create=true; } } let _ = image.save(path).unwrap(); return(image); } pub fn create_relations(path: &std::path::Path, image: image::ImageBuffer<Rgb<u8>, Vec<u8>>,vec: &Vec<UseCaseRelation>)->(image::ImageBuffer<Rgb<u8>, Vec<u8>>){ let mut image=image; let mut done_create=false; let mut vec_stelle=0; for rel in vec{ if let UseCaseRelationType::Include = rel.relation_type { image=draw_arrow(image,rel.from,rel.to,"<<include>>"); } else{ image=draw_arrow(image,rel.from,rel.to,"<<extend>>"); } //von ist von welchem case aus gezeichnet wird (i32) //nach ist zu welchem case gezeichnet wird (i32) //beschriftung ist ob include oder extend (&str) } let _ = image.save(path).unwrap(); return(image); } fn draw_systemborder(image: image::ImageBuffer<Rgb<u8>, Vec<u8>>, name: &str) -> (image::ImageBuffer<Rgb<u8>, Vec<u8>>) { let font = Vec::from(include_bytes!("../res/fonts/DejaVuSans-Bold.ttf") as &[u8]); let font = FontCollection::from_bytes(font).unwrap().into_font().unwrap(); let schrift = Scale { x: 20.0, y: 20.0 }; let draw_color = Rgb([0u8, 0u8, 0u8]); let rect = Rect::at(200, 10).of_size(600, 990); let mut image = image; let mut name = name; draw_hollow_rect_mut(&mut image, rect, draw_color); draw_text_mut(&mut image, Rgb([0u8, 0u8, 0u8]), 400, 20, schrift, &font, name); return (image); } fn draw_akteur(image: image::ImageBuffer<Rgb<u8>, Vec<u8>>, ist_anzahl_guys: i32, soll_anzahl_guys: i32,side: &str) -> (image::ImageBuffer<Rgb<u8>, Vec<u8>>) { let draw_color = Rgb([0u8, 0u8, 0u8]); let mut image = image; let mut ist_anzahl_guys = ist_anzahl_guys; let mut soll_anzahl_guys = soll_anzahl_guys; let mut side=side; let mut fertig = false; let mut x_anfang = 80; let mut head_anfang = 50; let mut body_anfang = 60; let mut arm_anfang = 70; let mut bein_anfang = 90; let mut bein_ende = 110; if side=="l" { while !fertig { if soll_anzahl_guys-1==ist_anzahl_guys { draw_hollow_circle_mut(&mut image, (x_anfang as i32, head_anfang as i32), 10 as i32, draw_color); draw_line_segment_mut(&mut image, (x_anfang as f32, body_anfang as f32), (x_anfang as f32, bein_anfang as f32), draw_color); draw_line_segment_mut(&mut image, (x_anfang as f32, arm_anfang as f32), ((90 as f32), (body_anfang as f32)), draw_color); draw_line_segment_mut(&mut image, (x_anfang as f32, arm_anfang as f32), ((70 as f32), (body_anfang as f32)), draw_color); draw_line_segment_mut(&mut image, (x_anfang as f32, bein_anfang as f32), ((90 as f32), (bein_ende as f32)), draw_color); draw_line_segment_mut(&mut image, (x_anfang as f32, bein_anfang as f32), ((70 as f32), (bein_ende as f32)), draw_color); } head_anfang = head_anfang + 130; body_anfang = body_anfang + 130; arm_anfang = arm_anfang + 130; bein_anfang = bein_anfang + 130; bein_ende = bein_ende + 130; ist_anzahl_guys = ist_anzahl_guys + 1; if ist_anzahl_guys == 10 { fertig = true; } } }else if side=="r"{ x_anfang = 920; while !fertig { if soll_anzahl_guys-1==ist_anzahl_guys { draw_hollow_circle_mut(&mut image, (x_anfang as i32, head_anfang as i32), 10 as i32, draw_color); draw_line_segment_mut(&mut image, (x_anfang as f32, body_anfang as f32), (x_anfang as f32, bein_anfang as f32), draw_color); draw_line_segment_mut(&mut image, (x_anfang as f32, arm_anfang as f32), ((910 as f32), (body_anfang as f32)), draw_color); draw_line_segment_mut(&mut image, (x_anfang as f32, arm_anfang as f32), ((930 as f32), (body_anfang as f32)), draw_color); draw_line_segment_mut(&mut image, (x_anfang as f32, bein_anfang as f32), ((910 as f32), (bein_ende as f32)), draw_color); draw_line_segment_mut(&mut image, (x_anfang as f32, bein_anfang as f32), ((930 as f32), (bein_ende as f32)), draw_color); } head_anfang = head_anfang + 130; body_anfang = body_anfang + 130; arm_anfang = arm_anfang + 130; bein_anfang = bein_anfang + 130; bein_ende = bein_ende + 130; ist_anzahl_guys = ist_anzahl_guys + 1; if ist_anzahl_guys == 10 { fertig = true; } } } return (image); } fn name_akteur(image: image::ImageBuffer<Rgb<u8>, Vec<u8>>, person: i32, name: &str,side: &str) -> (image::ImageBuffer<Rgb<u8>, Vec<u8>>) { let font = Vec::from(include_bytes!("../res/fonts/DejaVuSans.ttf") as &[u8]); let font = FontCollection::from_bytes(font).unwrap().into_font().unwrap(); let schrift = Scale { x: 10.0, y: 10.0 }; let mut image = image; let mut bein_ende = 110; let mut person = person - 1; let mut name = name; let mut side=side; bein_ende = bein_ende + (130 * person); if side=="l" { draw_text_mut(&mut image, Rgb([0u8, 0u8, 0u8]), 70, (bein_ende + 10) as u32, schrift, &font, name); }if side=="r"{ draw_text_mut(&mut image, Rgb([0u8, 0u8, 0u8]), 910, (bein_ende + 10) as u32, schrift, &font, name); } return (image); } fn draw_case(image: image::ImageBuffer<Rgb<u8>, Vec<u8>>,stelle: i32) -> (image::ImageBuffer<Rgb<u8>, Vec<u8>>) { let draw_color = Rgb([0u8, 0u8, 0u8]); let mut image = image; let mut stelle=stelle; let mut tuple = get_case_koordinaten(stelle); let mut y_ellipse = tuple.1; let mut x_ellipse = tuple.0; draw_hollow_ellipse_mut(&mut image, (x_ellipse as i32, y_ellipse as i32), 50 as i32, 25 as i32, draw_color); return (image); } fn draw_case_with_assoziation(image: image::ImageBuffer<Rgb<u8>, Vec<u8>>,stelle: i32, person: i32, von: &str, nach: &str,side: &str) -> (image::ImageBuffer<Rgb<u8>, Vec<u8>>) { let draw_color = Rgb([0u8, 0u8, 0u8]); let mut image = image; let font = Vec::from(include_bytes!("../res/fonts/DejaVuSans.ttf") as &[u8]); let font = FontCollection::from_bytes(font).unwrap().into_font().unwrap(); let schrift = Scale { x: 10.0, y: 10.0 }; let mut stelle = stelle; let mut tuple = get_case_koordinaten(stelle); let mut x_anfang = 80; let mut y_ellipse = tuple.1; let mut x_ellipse = tuple.0; let mut person = person - 1; let mut anfang = 75; anfang = anfang + (130 * person); if side == "l" { draw_line_segment_mut(&mut image, (x_anfang as f32, anfang as f32), ((x_ellipse - 50) as f32, y_ellipse as f32), draw_color); draw_hollow_ellipse_mut(&mut image, (x_ellipse as i32, y_ellipse as i32), 50 as i32, 25 as i32, draw_color); draw_text_mut(&mut image, Rgb([0u8, 0u8, 0u8]), (x_anfang + 10) as u32, (anfang - 5) as u32, schrift, &font, von); draw_text_mut(&mut image, Rgb([0u8, 0u8, 0u8]), (x_ellipse - 60) as u32, (y_ellipse) as u32, schrift, &font, nach); } else if side == "r" { x_anfang = 920; draw_line_segment_mut(&mut image, (x_anfang as f32, anfang as f32), ((x_ellipse + 50) as f32, y_ellipse as f32), draw_color); draw_hollow_ellipse_mut(&mut image, (x_ellipse as i32, y_ellipse as i32), 50 as i32, 25 as i32, draw_color); draw_text_mut(&mut image, Rgb([0u8, 0u8, 0u8]), (x_anfang - 10) as u32, (anfang - 5) as u32, schrift, &font, von); draw_text_mut(&mut image, Rgb([0u8, 0u8, 0u8]), (x_ellipse + 60) as u32, (y_ellipse) as u32, schrift, &font, nach); } return (image); } fn draw_relationship_akteur(image: image::ImageBuffer<Rgb<u8>, Vec<u8>>, person_von: i32, person_nach: i32,side:&str) -> (image::ImageBuffer<Rgb<u8>, Vec<u8>>) { let draw_color = Rgb([0u8, 0u8, 0u8]); let mut image = image; let mut person_von = person_von - 1; let mut person_nach = person_nach - 1; let mut kopf_oben_x = 80; let mut kopf_oben_y = 50 - 10; let mut side=side; if side=="r" {kopf_oben_x = 920;} kopf_oben_y = kopf_oben_y + (130 * person_von); draw_line_segment_mut(&mut image, ((kopf_oben_x) as f32, (kopf_oben_y) as f32), ((kopf_oben_x) as f32, (kopf_oben_y - 30) as f32), draw_color); draw_line_segment_mut(&mut image, (kopf_oben_x as f32, (kopf_oben_y - 50) as f32), ((kopf_oben_x - 10) as f32, (kopf_oben_y - 30) as f32), draw_color); draw_line_segment_mut(&mut image, (kopf_oben_x as f32, (kopf_oben_y - 50) as f32), ((kopf_oben_x + 10) as f32, (kopf_oben_y - 30) as f32), draw_color); draw_line_segment_mut(&mut image, ((kopf_oben_x - 10) as f32, (kopf_oben_y - 30) as f32), ((kopf_oben_x + 10) as f32, (kopf_oben_y - 30) as f32), draw_color); return (image); } fn draw_case_extend(image: image::ImageBuffer<Rgb<u8>, Vec<u8>>, stelle: i32) -> (image::ImageBuffer<Rgb<u8>, Vec<u8>>) { let draw_color = Rgb([0u8, 0u8, 0u8]); let mut image = image; let mut stelle=stelle; let mut tuple = get_case_koordinaten(stelle); let mut y_ellipse = tuple.1; let mut x_ellipse = tuple.0; draw_hollow_ellipse_mut(&mut image, (x_ellipse as i32, y_ellipse as i32), 50 as i32, 25 as i32, draw_color); draw_line_segment_mut(&mut image, ((x_ellipse-30) as f32, (y_ellipse-20) as f32), ((x_ellipse+30) as f32, (y_ellipse-20) as f32), draw_color); return (image); } fn get_case_koordinaten(ende: i32) -> (i32,i32,i32,i32) { let draw_color = Rgb([0u8, 0u8, 0u8]); let mut fertig=false; let mut y_ellipse = 70; let mut x_ellipse = 250; let mut reihe=1; let mut anzahl=1; let mut ende=ende; let mut spalte=1; if ende==1{ y_ellipse = 70; x_ellipse = 250; } else { while !fertig { x_ellipse = x_ellipse + 200; if spalte == 3 { y_ellipse = y_ellipse + 70; x_ellipse = 250; spalte = 0; reihe=reihe+1; } anzahl = anzahl + 1; spalte = spalte + 1; if anzahl == ende { fertig = true; } } } return (x_ellipse,y_ellipse,spalte,reihe); } fn draw_arrow(image: image::ImageBuffer<Rgb<u8>, Vec<u8>>, von: i32,nach: i32,beschriftung: &str)->(image::ImageBuffer<Rgb<u8>, Vec<u8>>) { let draw_color = Rgb([0u8, 0u8, 0u8]); let draw_white = Rgb([255u8, 255u8, 255u8]); let font = Vec::from(include_bytes!("../res/fonts/DejaVuSans.ttf") as &[u8]); let font = FontCollection::from_bytes(font).unwrap().into_font().unwrap(); let schrift = Scale { x: 13.0, y: 13.0 }; let mut beschriftung=beschriftung; let mut image=image; let mut von=get_case_koordinaten(von); let mut nach=get_case_koordinaten(nach); let mut start_x=von.0; let mut ende_x=nach.0; let mut start_y=von.1; let mut ende_y=nach.1; let mut spalte_von=von.2; let mut spalte_nach=nach.2; let mut reihe_von=von.3; let mut reihe_nach=nach.3; let mut dazu=10; let mut dazu_y=10; let mut anderes=10; let mut richtung_h=""; let mut richtung_w=""; let mut richtung_pfeil=""; if reihe_von==reihe_nach && spalte_von<=spalte_nach{ start_x=start_x+50; ende_x=ende_x-50; dazu=10; richtung_pfeil="rechts"; } else if reihe_von==reihe_nach && spalte_von>=spalte_nach{ start_x=start_x-50; ende_x=ende_x+50; dazu=-10; richtung_pfeil="links"; } else if spalte_von==spalte_nach && reihe_von<=reihe_nach{ start_y=start_y+25; ende_y=ende_y-25; dazu=10; richtung_pfeil="unten"; } else if spalte_von==spalte_nach && reihe_von>=reihe_nach{ start_y=start_y-25; ende_y=ende_y+25; dazu=-10; richtung_pfeil="oben"; } else if spalte_von < spalte_nach && reihe_von <reihe_nach{ start_x=start_x+50; ende_x=ende_x-50; dazu=10; dazu_y=10; anderes=-10; richtung_h="unten"; richtung_w="rechts"; richtung_pfeil="rechts"; } else if spalte_von < spalte_nach && reihe_von >reihe_nach{ start_x=start_x+50; ende_x=ende_x-50; dazu=10; dazu_y=-10; anderes=10; richtung_h="oben"; richtung_w="rechts"; richtung_pfeil="rechts"; } else if spalte_von > spalte_nach && reihe_von < reihe_nach{ start_x=start_x-50; ende_x=ende_x+50; dazu=10; dazu_y=10; anderes=-10; richtung_h="unten"; richtung_w="links"; richtung_pfeil="links"; } else if spalte_von > spalte_nach && reihe_von > reihe_nach{ start_x=start_x-50; ende_x=ende_x+50; dazu=10; dazu_y=-10; anderes=10; richtung_h="oben"; richtung_w="links"; richtung_pfeil="links"; } let mut zwischen_x=start_x; let mut zwischen_y=start_y; let mut fertig=false; while fertig==false { draw_line_segment_mut(&mut image, ((start_x) as f32, (start_y) as f32), ((zwischen_x) as f32, (zwischen_y) as f32), draw_color); if start_y==ende_y{ start_x=zwischen_x; zwischen_x=zwischen_x+dazu; if spalte_von>spalte_nach{ if zwischen_x <= ende_x { fertig = true; } }else { if zwischen_x >= ende_x { fertig = true; } } start_x=zwischen_x; zwischen_x=zwischen_x+dazu; } else if start_x==ende_x{ start_y=zwischen_y; zwischen_y=zwischen_y+dazu; if reihe_von>reihe_nach{ if zwischen_y <= ende_y { fertig = true; } }else { if zwischen_y >= ende_y { fertig = true; } } start_y=zwischen_y; zwischen_y=zwischen_y+dazu; } else{ let mut tuple=zeiche_pfeil_richtung_eins(start_x,start_y,ende_x,ende_y,zwischen_x,zwischen_y,dazu,dazu_y,richtung_h,richtung_w); start_x=tuple.0; start_y=tuple.1; ende_x=tuple.2; ende_y=tuple.3; zwischen_x=tuple.4; zwischen_y=tuple.5; if zwischen_y >= ende_y && richtung_pfeil=="rechts"{ if zwischen_x>=ende_x{ fertig = true; } } else if zwischen_y >= ende_y && richtung_pfeil=="links"{ if zwischen_x<=ende_x{ fertig = true; } } let mut tuple=zeiche_pfeil_richtung_zwei(start_x,start_y,ende_x,ende_y,zwischen_x,zwischen_y,dazu,dazu_y,anderes,richtung_h,richtung_w); start_x=tuple.0; start_y=tuple.1; ende_x=tuple.2; ende_y=tuple.3; zwischen_x=tuple.4; zwischen_y=tuple.5; } } if richtung_pfeil=="links" { draw_line_segment_mut(&mut image, ((ende_x) as f32, (zwischen_y) as f32), ((ende_x + 20) as f32, (zwischen_y-10) as f32), draw_color); draw_line_segment_mut(&mut image, ((ende_x) as f32, (zwischen_y) as f32), ((ende_x + 20) as f32, (zwischen_y+10 ) as f32), draw_color); draw_text_mut(&mut image, Rgb([0u8, 0u8, 0u8]),(ende_x+30)as u32 , (ende_y+5) as u32, schrift, &font, beschriftung); }else if richtung_pfeil=="rechts"{ draw_line_segment_mut(&mut image, ((ende_x) as f32, (zwischen_y) as f32), ((ende_x - 20) as f32, (zwischen_y-10) as f32), draw_color); draw_line_segment_mut(&mut image, ((ende_x) as f32, (zwischen_y) as f32), ((ende_x - 20) as f32, (zwischen_y+10 ) as f32), draw_color); draw_text_mut(&mut image, Rgb([0u8, 0u8, 0u8]),(ende_x-80)as u32 , (ende_y+5) as u32, schrift, &font, beschriftung); }else if richtung_pfeil=="oben"{ draw_line_segment_mut(&mut image, ((ende_x) as f32, (ende_y) as f32), ((ende_x + 10) as f32, (ende_y+10) as f32), draw_color); draw_line_segment_mut(&mut image, ((ende_x) as f32, (ende_y) as f32), ((ende_x - 10) as f32, (ende_y+10 ) as f32), draw_color); draw_text_mut(&mut image, Rgb([0u8, 0u8, 0u8]),(ende_x+8)as u32 , (ende_y+10) as u32, schrift, &font, beschriftung); }else if richtung_pfeil=="unten"{ draw_line_segment_mut(&mut image, ((ende_x) as f32, (ende_y) as f32), ((ende_x + 10) as f32, (ende_y-10) as f32), draw_color); draw_line_segment_mut(&mut image, ((ende_x) as f32, (ende_y) as f32), ((ende_x - 10) as f32, (ende_y-10 ) as f32), draw_color); draw_text_mut(&mut image, Rgb([0u8, 0u8, 0u8]),(ende_x+8)as u32 , (ende_y-10) as u32, schrift, &font, beschriftung); } return(image); } fn name_case(image: image::ImageBuffer<Rgb<u8>, Vec<u8>>,stelle: i32,text: &str)->(image::ImageBuffer<Rgb<u8>, Vec<u8>>){ let mut image=image; let font = Vec::from(include_bytes!("../res/fonts/DejaVuSans.ttf") as &[u8]); let font = FontCollection::from_bytes(font).unwrap().into_font().unwrap(); let schrift = Scale { x: 13.0, y: 13.0 }; let mut stelle=stelle; let mut text=text; let mut tuple = get_case_koordinaten(stelle); draw_text_mut(&mut image, Rgb([0u8, 0u8, 0u8]),(tuple.0-45)as u32 , (tuple.1-5) as u32, schrift, &font, text); return(image); } fn zeiche_pfeil_richtung_eins(start_x: i32,start_y: i32,ende_x: i32,ende_y: i32,zwischen_x: i32,zwischen_y: i32,dazu: i32,dazu_y: i32,richtung_h: &str,richtung_w: &str) ->(i32,i32,i32,i32,i32,i32){ let mut start_x=start_x; let mut start_y=start_y; let mut ende_x=ende_x; let mut ende_y=ende_y; let mut zwischen_x=zwischen_x; let mut zwischen_y=zwischen_y; let mut dazu=dazu; let mut dazu_y=dazu_y; let mut richtung_h=richtung_h; let mut richtung_w=richtung_w; //rechts oben if richtung_h=="oben" && richtung_w=="rechts"{ if zwischen_y <= ende_y { zwischen_y = zwischen_y + dazu; start_x = zwischen_x; zwischen_x = zwischen_x + dazu; } else if zwischen_x >= ende_x { start_y = zwischen_y; zwischen_y = zwischen_y + dazu_y; } else { start_x = zwischen_x; zwischen_x = zwischen_x + dazu; start_y = zwischen_y; zwischen_y = zwischen_y + dazu_y; } } //rechts unten else if richtung_h=="unten" && richtung_w=="rechts"{ if zwischen_y >= ende_y { zwischen_y = zwischen_y + dazu; start_x = zwischen_x; zwischen_x = zwischen_x + dazu; } else if zwischen_x >= ende_x { start_y = zwischen_y; zwischen_y = zwischen_y + dazu_y; } else { start_x = zwischen_x; zwischen_x = zwischen_x + dazu; start_y = zwischen_y; zwischen_y = zwischen_y + dazu_y; } } //links unten else if richtung_h=="unten" && richtung_w=="links"{ if zwischen_y >= ende_y { zwischen_y = zwischen_y + dazu; start_x = zwischen_x; zwischen_x = zwischen_x - dazu; } else if zwischen_x <= ende_x { start_y = zwischen_y; zwischen_y = zwischen_y + dazu_y; } else { start_x = zwischen_x; zwischen_x = zwischen_x - dazu; start_y = zwischen_y; zwischen_y = zwischen_y + dazu_y; } } //links oben else if richtung_h=="oben" && richtung_w=="links"{ if zwischen_y <= ende_y { zwischen_y = zwischen_y + dazu; start_x = zwischen_x; zwischen_x = zwischen_x - dazu; } else if zwischen_x <= ende_x { start_y = zwischen_y; zwischen_y = zwischen_y + dazu_y; } else { start_x = zwischen_x; zwischen_x = zwischen_x - dazu; start_y = zwischen_y; zwischen_y = zwischen_y + dazu_y; } } return(start_x,start_y,ende_x,ende_y,zwischen_x,zwischen_y) } fn zeiche_pfeil_richtung_zwei(start_x: i32,start_y: i32,ende_x: i32,ende_y: i32,zwischen_x: i32,zwischen_y: i32,dazu: i32,dazu_y: i32,anderes: i32,richtung_h: &str,richtung_w: &str) ->(i32,i32,i32,i32,i32,i32){ let mut start_x=start_x; let mut start_y=start_y; let mut ende_x=ende_x; let mut ende_y=ende_y; let mut zwischen_x=zwischen_x; let mut zwischen_y=zwischen_y; let mut dazu=dazu; let mut dazu_y=dazu_y; let mut anderes=anderes; let mut richtung_h=richtung_h; let mut richtung_w=richtung_w; //rechts oben if richtung_h=="oben" && richtung_w=="rechts"{ if zwischen_y <= ende_y { zwischen_y = zwischen_y + anderes; start_x = zwischen_x; zwischen_x = zwischen_x + dazu; } else if zwischen_x >= ende_x { start_y = zwischen_y; zwischen_y = zwischen_y + dazu_y; } else { start_x=zwischen_x; zwischen_x=zwischen_x+dazu; start_y = zwischen_y; zwischen_y = zwischen_y + dazu_y; } } //rechts unten else if richtung_h=="unten" && richtung_w=="rechts"{ if zwischen_y >= ende_y { zwischen_y = zwischen_y + anderes; start_x = zwischen_x; zwischen_x = zwischen_x + dazu; } else if zwischen_x >= ende_x { start_y = zwischen_y; zwischen_y = zwischen_y + dazu_y; } else { start_x=zwischen_x; zwischen_x=zwischen_x+dazu; start_y = zwischen_y; zwischen_y = zwischen_y + dazu_y; } } //links unten else if richtung_h=="unten" && richtung_w=="links"{ if zwischen_y >= ende_y { zwischen_y = zwischen_y + anderes; start_x = zwischen_x; zwischen_x = zwischen_x - dazu; } else if zwischen_x <= ende_x { start_y = zwischen_y; zwischen_y = zwischen_y + dazu_y; } else { start_x=zwischen_x; zwischen_x=zwischen_x-dazu; start_y = zwischen_y; zwischen_y = zwischen_y + dazu_y; } } //oben links else if richtung_h=="oben" && richtung_w=="links"{ if zwischen_y <= ende_y { zwischen_y = zwischen_y + anderes; start_x = zwischen_x; zwischen_x = zwischen_x - dazu; } else if zwischen_x <= ende_x { start_y = zwischen_y; zwischen_y = zwischen_y + dazu_y; } else { start_x=zwischen_x; zwischen_x=zwischen_x-dazu; start_y = zwischen_y; zwischen_y = zwischen_y + dazu_y; } } return(start_x,start_y,ende_x,ende_y,zwischen_x,zwischen_y) } pub fn klasse(ueberschrift: &str,klassentyp: &str,image: image::ImageBuffer<Rgb<u8>, Vec<u8> >,file: &std::path::Path,anzahl: i32,vec_attribute: &Vec<Attribute>,vec_methode: &Vec<Method>) ->(image::ImageBuffer<Rgb<u8>, Vec<u8> >){//,i32,i32){//,HashMap<u32, i32>) { let mut eingabe_ueberschift=ueberschrift; let mut klassentyp=klassentyp; //let mut eingabe_pfeil=pfeil; let mut file=file; let mut image=image; let mut anzahl=anzahl; //erster wert sagt die höhe aus in y //erster wert bei get pixel sagt die höhe in x let mut erster_wert=30; let mut zweiter_wert=180; let mut erster_wert_x=30; let mut zweiter_wert_x=180; let mut ab = erster_wert; //let mut anzahl_alt=anzahl_alt; let mut fertig=false; let mut done = false; let mut zeile=1; let mut pfeil_hoehe=zweiter_wert-erster_wert; //let mut eingabe = eingabe_pfeil; let mut pfeil_schr=pfeil_hoehe; //let mut pfeil_richtung=richtung; let mut vec_attribute=vec_attribute; let mut vec_methode=vec_methode; let mut tuple= zeichne_klasse(anzahl,"",image,erster_wert,zweiter_wert,erster_wert_x,zweiter_wert_x); //let mut alte_werte=(tuple.1,tuple.2,tuple.3,tuple.4,anzahl); image=zeichne_schrift(tuple.0,eingabe_ueberschift,klassentyp,vec_attribute,vec_methode,tuple.1,tuple.2,tuple.3,anzahl); //image=zeichne_pfeil(image,"asso",von,nach); let mut anzahl_alt=koordinaten(anzahl); let _ = image.save(file).unwrap(); anzahl=anzahl+1; //let _ = image.save("res/UML_visual_result.png").unwrap(); return(image);//,anzahl,anzahl_alt.4); } fn zeichne_klasse(nummer: i32,eingabe: &str,image: image::ImageBuffer<Rgb<u8>, Vec<u8> >,eins:u32,zwei:u32,drei:u32,vier:u32)->(image::ImageBuffer<Rgb<u8>, Vec<u8> >,u32,u32,u32,u32){ let mut erster_wert=eins; let mut zweiter_wert=zwei; let mut erster_wert_x=drei; let mut zweiter_wert_x=vier; let mut done = false; let mut ab = erster_wert; let mut zeile=1; let mut fertig=false; let mut eingabe=eingabe; let mut anzahl=0; anzahl=nummer; let mut image=image; while !fertig { let mut tuple=koordinaten(anzahl); erster_wert=tuple.0; zweiter_wert=tuple.1; erster_wert_x=tuple.2; zweiter_wert_x=tuple.3; for d in erster_wert..zweiter_wert { image.get_pixel_mut(erster_wert_x,d).data=[0,0,0]; image.get_pixel_mut(zweiter_wert_x,d).data=[0,0,0]; } ab=erster_wert; while !done { for c in erster_wert_x..zweiter_wert_x{ image.get_pixel_mut(c,ab).data=[0,0,0]; } if zeile==2 || zeile == 3{ ab=ab+65; } else { ab=ab+20; } zeile=zeile+1; if ab > zweiter_wert { zeile=1; done = true; fertig=true; } } } return (image,erster_wert,zweiter_wert,erster_wert_x,zweiter_wert_x); } fn zeichne_schrift(image: image::ImageBuffer<Rgb<u8>, Vec<u8> >,name: &str,klassentyp: &str,vec_attribute: &Vec<Attribute>,vec_methode: &Vec<Method>,erster_wert: u32,zweiter_wert: u32,erster_wert_x: u32,anzahl: i32)-> (image::ImageBuffer<Rgb<u8>, Vec<u8> >){ let font = Vec::from(include_bytes!("../res/fonts/DejaVuSans.ttf") as &[u8]); let font = FontCollection::from_bytes(font).unwrap().into_font().unwrap(); let mut anzahl=anzahl; let ueberschrift = Scale { x: 13.0 , y: 13.0 }; let mut erster_wert=erster_wert; let mut ab = erster_wert; let mut erster_wert_x = erster_wert_x; let mut zweiter_wert=zweiter_wert; let mut eingabe_ueberschift=name; let mut done=false; let mut done_schrift = false; let mut zahl = 1; let mut image=image; let mut vektor_inhalt="".to_string(); let mut vec_attribute=vec_attribute; let mut vec_methode=vec_methode; let mut vec_stelle=0; let mut sichtbarkeit_ueberschrift=klassentyp; let mut schreiben=100; if anzahl >= 5{ schreiben=370 } if anzahl >= 9 { schreiben=640; } if anzahl >= 13 { schreiben=910; } if sichtbarkeit_ueberschrift == "Paket"{ draw_text_mut(&mut image, Rgb([0u8, 0u8, 0u8]), erster_wert_x+5, ab+5, ueberschrift, &font, "Paket::"); draw_text_mut(&mut image, Rgb([0u8, 0u8, 0u8]), erster_wert_x+42, ab+5, ueberschrift, &font, eingabe_ueberschift); }else if sichtbarkeit_ueberschrift=="Interface"{ let mut ueberschrift = Scale { x: 12.0 , y: 12.0 }; let font = Vec::from(include_bytes!("../res/fonts/DejaVuSans-Oblique.ttf") as &[u8]); let font = FontCollection::from_bytes(font).unwrap().into_font().unwrap(); draw_text_mut(&mut image, Rgb([0u8, 0u8, 0u8]), erster_wert_x+5, ab, ueberschrift, &font, "<<<Interface>>>"); draw_text_mut(&mut image, Rgb([0u8, 0u8, 0u8]), erster_wert_x+5, ab+10, ueberschrift, &font, eingabe_ueberschift); } else if sichtbarkeit_ueberschrift=="Abstrakt"{ let mut ueberschrift = Scale { x: 12.0 , y: 12.0 }; let font = Vec::from(include_bytes!("../res/fonts/DejaVuSans-Oblique.ttf") as &[u8]); let font = FontCollection::from_bytes(font).unwrap().into_font().unwrap(); draw_text_mut(&mut image, Rgb([0u8, 0u8, 0u8]), erster_wert_x+5, ab, ueberschrift, &font, "<<<Abstract>>>"); draw_text_mut(&mut image, Rgb([0u8, 0u8, 0u8]), erster_wert_x+5, ab+10, ueberschrift, &font, eingabe_ueberschift); } else if sichtbarkeit_ueberschrift=="Class"{ draw_text_mut(&mut image, Rgb([0u8, 0u8, 0u8]), erster_wert_x+5, ab+5, ueberschrift, &font, eingabe_ueberschift); } let mut attribute = Scale { x: 8.0, y: 8.0 }; ab=ab+20; while !done_schrift { if ab<=schreiben{ if vec_stelle < vec_attribute.iter().len(){ vektor_inhalt=vec_attribute[vec_stelle].to_string(); if vektor_inhalt.contains("static") { let v: Vec<&str> = vektor_inhalt.split('/').collect(); draw_text_mut(&mut image, Rgb([0u8, 0u8, 0u8]), erster_wert_x+5, ab, attribute, &font, v[0]); for d in erster_wert_x+10..erster_wert_x+130{ image.get_pixel_mut(d,ab+8).data=[0,0,0]; } } else{ draw_text_mut(&mut image, Rgb([0u8, 0u8, 0u8]), erster_wert_x+5, ab, attribute, &font, vec_attribute[vec_stelle].to_string().as_ref()); } if vec_stelle <= vec_attribute.iter().len()-1{ vec_stelle=vec_stelle+1; } } } else if ab>schreiben{ if vec_stelle < vec_methode.iter().len(){ vektor_inhalt=vec_methode[vec_stelle].to_string(); if vektor_inhalt.contains("static") { let v: Vec<&str> = vektor_inhalt.split('/').collect(); draw_text_mut(&mut image, Rgb([0u8, 0u8, 0u8]), erster_wert_x+5, ab, attribute, &font, v[0]); for d in erster_wert_x+10..erster_wert_x+130{ image.get_pixel_mut(d,ab+8).data=[0,0,0]; } } else{ draw_text_mut(&mut image, Rgb([0u8, 0u8, 0u8]), erster_wert_x+5, ab, attribute, &font, vec_methode[vec_stelle].to_string().as_ref()); } if vec_stelle <= vec_attribute.iter().len(){ vec_stelle=vec_stelle+1; } } } if ab==schreiben{ ab=schreiben+10; vec_stelle=0; } ab=ab+10; zahl=zahl+1; if ab == zweiter_wert { ab=erster_wert; zahl=1; done_schrift = true; schreiben=100; } } return(image); } pub fn zeichne_pfeil(image: image::ImageBuffer<Rgb<u8>, Vec<u8> >,file: &std::path::Path,pfeilart: &str,von: i32,nach: i32, multi_von: &str,multi_nach:&str)->(image::ImageBuffer<Rgb<u8>, Vec<u8> >){ let draw_color = Rgb([0u8, 0u8, 0u8]); let mut image=image; let mut von=von; let mut nach=nach; let mut file=file; let mut multi_von=multi_von; let mut multi_nach=multi_nach; let mut anzahl_alt=5; if von == 0 {anzahl_alt=0;} if nach ==0 {anzahl_alt=0;} let mut von=koordinaten(von); let mut zweiter_wert=von.1; let mut erster_wert=von.0; let mut zweiter_wert_x=von.3; let mut erster_wert_x=von.2; let mut pfeil_hoehe=erster_wert+70; let mut eingabe = pfeilart; let mut pfeil_schr=pfeil_hoehe; let mut c=pfeil_hoehe; let mut richtung=""; let mut mitte_oberseite=0; let mut tuple=koordinaten(nach); mitte_oberseite=erster_wert_x+50; let mut mitte_unterseite=tuple.2+von.5; if eingabe == "asso" { if anzahl_alt>0{ draw_line_segment_mut(&mut image,(mitte_oberseite as f32, (zweiter_wert-150) as f32),((mitte_unterseite) as f32,tuple.1 as f32), draw_color); } } if eingabe == "ge_asso" { if anzahl_alt>0{ draw_line_segment_mut(&mut image,(mitte_oberseite as f32, (zweiter_wert-150) as f32),((mitte_unterseite) as f32,(tuple.1+20) as f32), draw_color); draw_line_segment_mut(&mut image,((mitte_unterseite) as f32, (tuple.1+20) as f32),((mitte_unterseite) as f32,(tuple.1) as f32), draw_color); draw_line_segment_mut(&mut image,(mitte_unterseite as f32, tuple.1 as f32),((mitte_unterseite-10) as f32,(tuple.1+25) as f32), draw_color); draw_line_segment_mut(&mut image,(mitte_unterseite as f32, tuple.1 as f32),((mitte_unterseite+10) as f32,(tuple.1+25) as f32), draw_color); } } if eingabe == "ver" { if anzahl_alt>0{ draw_line_segment_mut(&mut image,(mitte_oberseite as f32, (zweiter_wert-150) as f32),((mitte_unterseite) as f32,(tuple.1+35) as f32), draw_color); draw_line_segment_mut(&mut image,((mitte_unterseite) as f32, (tuple.1+25) as f32),((mitte_unterseite) as f32,(tuple.1+35) as f32), draw_color); draw_line_segment_mut(&mut image,(mitte_unterseite as f32, tuple.1 as f32),((mitte_unterseite-10) as f32,(tuple.1+25) as f32), draw_color); draw_line_segment_mut(&mut image,(mitte_unterseite as f32, tuple.1 as f32),((mitte_unterseite+10) as f32,(tuple.1+25) as f32), draw_color); draw_line_segment_mut(&mut image,((mitte_unterseite+10) as f32,(tuple.1+25) as f32),((mitte_unterseite-10) as f32,(tuple.1+25) as f32), draw_color); } } if eingabe == "agg" { if anzahl_alt>0{ draw_line_segment_mut(&mut image,(mitte_oberseite as f32, (zweiter_wert-150) as f32),(mitte_unterseite as f32,(tuple.1+20) as f32), draw_color); draw_line_segment_mut(&mut image,(mitte_unterseite as f32, tuple.1 as f32),((mitte_unterseite+10) as f32,(tuple.1+10) as f32), draw_color); draw_line_segment_mut(&mut image,((mitte_unterseite-10) as f32,(tuple.1+10) as f32),((mitte_unterseite) as f32,(tuple.1+20) as f32), draw_color); draw_line_segment_mut(&mut image,((mitte_unterseite+10) as f32,(tuple.1+10) as f32),((mitte_unterseite) as f32,(tuple.1+20) as f32), draw_color); draw_line_segment_mut(&mut image,((mitte_unterseite) as f32,(tuple.1) as f32),((mitte_unterseite-10) as f32,(tuple.1+10) as f32), draw_color); } } if eingabe == "kompo" { if anzahl_alt>0{ draw_line_segment_mut(&mut image,(mitte_oberseite as f32, (zweiter_wert-150) as f32),(mitte_unterseite as f32,(tuple.1+20) as f32), draw_color); draw_line_segment_mut(&mut image,(mitte_unterseite as f32, tuple.1 as f32),((mitte_unterseite+10) as f32,(tuple.1+10) as f32), draw_color); draw_line_segment_mut(&mut image,((mitte_unterseite-10) as f32,(tuple.1+10) as f32),((mitte_unterseite) as f32,(tuple.1+20) as f32), draw_color); draw_line_segment_mut(&mut image,((mitte_unterseite+10) as f32,(tuple.1+10) as f32),((mitte_unterseite) as f32,(tuple.1+20) as f32), draw_color); draw_line_segment_mut(&mut image,((mitte_unterseite) as f32,(tuple.1) as f32),((mitte_unterseite-10) as f32,(tuple.1+10) as f32), draw_color); mitte_unterseite=tuple.2+von.5; let mut gemalt=false; let mut anfang=mitte_unterseite-10; let mut ende=mitte_unterseite+10; let mut c=tuple.1+10; let mut d=tuple.1+10; while !gemalt { for x in anfang..ende { image.get_pixel_mut(x,c).data=[0,0,0]; } for x in anfang..ende { image.get_pixel_mut(x,d).data=[0,0,0]; } anfang=anfang+1; ende=ende-1; c=c+1; d=d-1; if c==tuple.1+105{ gemalt = true; } } } } if eingabe == "abh" { let draw_color_white = Rgb([255u8, 255u8, 255u8]); draw_line_segment_mut(&mut image,(mitte_unterseite as f32, tuple.1 as f32),((mitte_unterseite-10) as f32,(tuple.1+25) as f32), draw_color); draw_line_segment_mut(&mut image,(mitte_unterseite as f32, tuple.1 as f32),((mitte_unterseite+10) as f32,(tuple.1+25) as f32), draw_color); if anzahl_alt>0{ let mut s=0; let mut w=0; let mut anfang=tuple.1+20; let mut ende=mitte_unterseite; let mut ka=mitte_oberseite;//mitte_unterseite; let mut ak=von.0; for d in 1..2000{ if s<=8 { if ak != anfang{ ak=ak-1; } if ka > mitte_unterseite { if ka != ende{ ka=ka-1; } } else if ka < mitte_unterseite { if ka != ende{ ka=ka+1; } } } else if s>8{ image.get_pixel_mut(ka,ak).data=[0,0,0]; w=w+1; if w==10 { s=0; w=0; } } s=s+1; } } } if eingabe == "imple" { let draw_color_white = Rgb([255u8, 255u8, 255u8]); draw_line_segment_mut(&mut image,(mitte_unterseite as f32, tuple.1 as f32),((mitte_unterseite-10) as f32,(tuple.1+25) as f32), draw_color); draw_line_segment_mut(&mut image,(mitte_unterseite as f32, tuple.1 as f32),((mitte_unterseite+10) as f32,(tuple.1+25) as f32), draw_color); draw_line_segment_mut(&mut image,((mitte_unterseite+10) as f32,(tuple.1+25) as f32),((mitte_unterseite-10) as f32,(tuple.1+25) as f32), draw_color); if anzahl_alt>0{ let mut s=0; let mut w=0; let mut anfang=tuple.1+20; let mut ende=mitte_unterseite; let mut ka=mitte_oberseite;//mitte_unterseite; let mut ak=von.0; for d in 1..2000{ if s<=8 { if ak != anfang{ ak=ak-1; } if ka > mitte_unterseite { if ka != ende{ ka=ka-1; } } else if ka < mitte_unterseite { if ka != ende{ ka=ka+1; } } } else if s>8{ image.get_pixel_mut(ka,ak).data=[0,0,0]; w=w+1; if w==10 { s=0; w=0; } } s=s+1; } } } eingabe=""; let font = Vec::from(include_bytes!("../res/fonts/DejaVuSans.ttf") as &[u8]); let font = FontCollection::from_bytes(font).unwrap().into_font().unwrap(); let mut multi = Scale { x: 10.0, y: 10.0 }; draw_text_mut(&mut image, Rgb([0u8, 0u8, 0u8]), mitte_oberseite+5, (zweiter_wert-160), multi, &font, multi_von); draw_text_mut(&mut image, Rgb([0u8, 0u8, 0u8]), mitte_unterseite+5, (tuple.1), multi, &font, multi_nach); let _ = image.save(file).unwrap(); return(image); } fn koordinaten(anzahl:i32)->(u32,u32,u32,u32,i32,u32) { let mut erster_wert=0; let mut zweiter_wert=0; let mut erster_wert_x=0; let mut zweiter_wert_x=0; let mut mitte_unterseite=0; if anzahl == 1{ erster_wert=30; zweiter_wert=180; erster_wert_x=30; zweiter_wert_x=180; mitte_unterseite=0; //eingabe=""; } if anzahl == 2{ erster_wert=30; zweiter_wert=180; erster_wert_x=280; zweiter_wert_x=430; mitte_unterseite=10; //eingabe=""; } if anzahl == 3{ erster_wert=30; zweiter_wert=180; erster_wert_x=530; zweiter_wert_x=680; mitte_unterseite=20; //eingabe=""; } if anzahl == 4{ erster_wert=30; zweiter_wert=180; erster_wert_x=780; zweiter_wert_x=930; mitte_unterseite=30; //eingabe=""; } if anzahl == 5{ erster_wert=300; zweiter_wert=450; erster_wert_x=780; zweiter_wert_x=930; mitte_unterseite=40; //eingabe=""; } if anzahl == 6{ erster_wert=300; zweiter_wert=450; erster_wert_x=530; zweiter_wert_x=680; mitte_unterseite=50; //eingabe=""; } if anzahl == 7{ erster_wert=300; zweiter_wert=450; erster_wert_x=280; zweiter_wert_x=430; mitte_unterseite=60; //eingabe=""; } if anzahl == 8{ erster_wert=300; zweiter_wert=450; erster_wert_x=30; zweiter_wert_x=180; mitte_unterseite=70; //eingabe=""; } if anzahl == 9{ erster_wert=570; zweiter_wert=720; erster_wert_x=30; zweiter_wert_x=180; mitte_unterseite=80; //eingabe=""; } if anzahl == 10{ erster_wert=570; zweiter_wert=720; erster_wert_x=280; zweiter_wert_x=430; mitte_unterseite=90; //eingabe=""; } if anzahl == 11{ erster_wert=570; zweiter_wert=720; erster_wert_x=530; zweiter_wert_x=680; mitte_unterseite=100; //eingabe=""; } if anzahl == 12{ erster_wert=570; zweiter_wert=720; erster_wert_x=780; zweiter_wert_x=930; mitte_unterseite=110; //eingabe=""; } if anzahl == 13{ erster_wert=840; zweiter_wert=990; erster_wert_x=630; zweiter_wert_x=930; mitte_unterseite=120; //eingabe=""; } if anzahl == 14{ erster_wert=840; zweiter_wert=990; erster_wert_x=230; zweiter_wert_x=530; mitte_unterseite=130; //eingabe=""; } if anzahl == 15{ erster_wert=840; zweiter_wert=990; erster_wert_x=1; zweiter_wert_x=130; mitte_unterseite=140; //eingabe=""; } return(erster_wert,zweiter_wert,erster_wert_x,zweiter_wert_x,anzahl,mitte_unterseite); }
// Reachable modules mod hasher; mod packet; mod primary_header; mod secondary_header; mod user_data_field; // Re-exporting pub use packet::Packet; pub use primary_header::PktType; pub use primary_header::PrimaryHeader; pub use secondary_header::SecondaryHeader; pub use user_data_field::UserDataField;
use rand::Rng; #[derive(Debug)] pub struct Blob { pub radius: f32, pub position: cgmath::Point2<f32>, pub id: isize, } impl Blob { pub fn new(id: isize) -> Blob { let mut rng = rand::thread_rng(); Blob { id, radius: rng.gen_range(10.0, 100.0), position: cgmath::Point2::new(rng.gen_range(0.0, 600.0), rng.gen_range(0.0, 300.0)), } } }
// Copyright 2019 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. extern crate vimscript_core; use pretty_assertions::assert_eq; use std::path::PathBuf; use vimscript_core::format::format; use vimscript_core::lexer::Lexer; use vimscript_core::parser::Parser; #[derive(Debug)] struct TestCase { before: PathBuf, after: PathBuf, } #[derive(PartialEq, Eq)] #[doc(hidden)] pub struct PrettyString<'a>(pub &'a str); /// Make diff to display string as multi-line string impl<'a> std::fmt::Debug for PrettyString<'a> { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { f.write_str(self.0) } } fn read_test_cases() -> Vec<TestCase> { let mut d = PathBuf::from(env!("CARGO_MANIFEST_DIR")); d.push("tests/format"); let mut entries = std::fs::read_dir(d) .unwrap() .map(|res| res.map(|e| e.path())) .collect::<Result<Vec<_>, std::io::Error>>() .unwrap(); entries.sort(); let before = entries .iter() .filter(|path| path.to_str().unwrap().ends_with(".before.vim")); let after = entries .iter() .filter(|path| path.to_str().unwrap().ends_with(".after.vim")); before .zip(after) .map(|pair| TestCase { before: pair.0.clone(), after: pair.1.clone(), }) .collect() } #[test] fn test_format() { println!("Running"); for case in read_test_cases() { println!("Testing {:?}", case); let content = std::fs::read_to_string(&case.before).unwrap(); let mut parser = Parser::new(Lexer::new(&content)); let program = parser.parse(); assert_eq!(parser.errors, vec![]); let formatted = format(&program); let expected = std::fs::read_to_string(&case.after).unwrap(); assert_eq!( PrettyString(&formatted), PrettyString(&expected), "invalid formatting of {:?}", case.before.file_name() ); } }
use sqlx::postgres::PgConnection; use uuid::Uuid; enum AuthzClassQueryState { Event(Uuid), Id(Uuid), Conference(Uuid), RecordingRtcId(Uuid), Scope { audience: String, scope: String }, } #[derive(Clone, Debug, sqlx::FromRow)] pub struct AuthzClass { pub id: String, } pub struct AuthzReadQuery { state: AuthzClassQueryState, } impl AuthzReadQuery { pub fn by_event(id: Uuid) -> Self { Self { state: AuthzClassQueryState::Event(id), } } pub fn by_conference(id: Uuid) -> Self { Self { state: AuthzClassQueryState::Conference(id), } } pub fn by_id(id: Uuid) -> Self { Self { state: AuthzClassQueryState::Id(id), } } pub fn by_rtc_id(id: Uuid) -> Self { Self { state: AuthzClassQueryState::RecordingRtcId(id), } } pub fn by_scope(audience: String, scope: String) -> Self { Self { state: AuthzClassQueryState::Scope { audience, scope }, } } pub async fn execute(self, conn: &mut PgConnection) -> sqlx::Result<Option<AuthzClass>> { match self.state { AuthzClassQueryState::Event(id) => { sqlx::query_as!( AuthzClass, r#" SELECT id::text AS "id!: String" FROM class WHERE event_room_id = $1 OR original_event_room_id = $1 OR modified_event_room_id = $1 "#, id, ) .fetch_optional(conn) .await } AuthzClassQueryState::Conference(id) => { sqlx::query_as!( AuthzClass, r#" SELECT id::text AS "id!: String" FROM class WHERE conference_room_id = $1 "#, id, ) .fetch_optional(conn) .await } AuthzClassQueryState::RecordingRtcId(id) => { sqlx::query_as!( AuthzClass, r#" SELECT class.id::text AS "id!: String" FROM class INNER JOIN recording r ON r.class_id = class.id WHERE rtc_id = $1 "#, id, ) .fetch_optional(conn) .await } AuthzClassQueryState::Scope { scope, audience } => { sqlx::query_as!( AuthzClass, r#" SELECT id::text AS "id!: String" FROM class WHERE audience = $1 AND scope = $2 "#, audience, scope ) .fetch_optional(conn) .await } AuthzClassQueryState::Id(id) => { sqlx::query_as!( AuthzClass, r#" SELECT id::text AS "id!: String" FROM class WHERE id = $1 "#, id, ) .fetch_optional(conn) .await } } } }
use std::u8; use std::str; use std::mem::drop; use std::net::SocketAddr; use std::{fmt, num::ParseIntError}; use std::sync::{Mutex, Arc}; use std::collections::HashMap; use crate::hash::*; use crate::remote::*; use crate::record::{Record, Deleted}; use ascii::AsciiString; use serde::{Deserialize, Serialize}; use tiny_http::{Server, Method, Response, Header}; #[derive(Clone)] pub struct RebalanceRequest { key: String, volumes: Vec<String>, kvolumes: Vec<String>, } #[derive(Clone)] pub struct RebuildRequest { vol: String, url: String, } #[derive(Clone, Deserialize, Serialize)] struct File { name: String, file_type: String, time: String, } struct FileWrapper(Vec<File>); impl FileWrapper { fn new() -> Self { Self { 0: vec![], } } } #[derive(Clone, Deserialize, Serialize, Default)] struct ListResponse { next: String, keys: Vec<String>, } #[derive(Debug, Clone, PartialEq, Eq)] pub enum DecodeHexError { OddLength, ParseInt(ParseIntError), } impl From<ParseIntError> for DecodeHexError { fn from(e: ParseIntError) -> Self { DecodeHexError::ParseInt(e) } } impl fmt::Display for DecodeHexError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { DecodeHexError::OddLength => "input string has an odd number of bytes".fmt(f), DecodeHexError::ParseInt(e) => e.fmt(f), } } } impl std::error::Error for DecodeHexError {} #[derive(Clone)] pub struct Minikeyvalue { db: HashMap<String, String>, lock: Arc<Mutex<HashMap<String, u8>>>, volumes: Vec<String>, fallback: String, replicas: i32, subvolumes: i32, port: u16, protect: bool, } impl Minikeyvalue { pub fn new(volumes: Vec<String>, fallback: String, replicas: i32, subvolumes: i32, protect: bool, port: u16) -> Self { Self { db: HashMap::new(), lock: Arc::new(Mutex::new(HashMap::new())), volumes, fallback, replicas, subvolumes, port, protect, } } pub fn unlock_key(&self, key: &str) { let mut map = self.lock.lock().unwrap(); map.remove(key); drop(map); } pub fn lock_key(&self, key: &str) -> bool { let mut map = self.lock.lock().unwrap(); if map.contains_key(key) { return false; } map.insert(key.to_string(), 1); drop(map); true } pub fn get_record(&self, key: &str) -> Record { match self.db.get(key) { Some(val) => val.to_string().into(), None => Record::new(), } } // If `None` is returned, it is the first insertion else if `String` is returned, // then it updates the previous record and returns the old value. Will never fail // probably. pub fn put_record(&mut self, key: &str, rec: Record) -> Option<String> { self.db.insert(key.to_string(), rec.into()) } pub fn rebuild(&mut self) { self.db.clear(); let mut reqs = Vec::<RebuildRequest>::with_capacity(20000); for vol in self.volumes.iter() { let mut has_subvolumes = false; for f in get_files(&format!("http://{}/", vol)).0 { if f.name.len() == 4 && f.name.starts_with("sv") && f.file_type == "directory" { if let Some(v) = parse_volume(format!("{}/{}", vol, f.name)) { reqs.push(v); } has_subvolumes = true; } } if !has_subvolumes { if let Some(v) = parse_volume(vol.to_string()) { reqs.push(v); } } } for _i in 0..128 { crossbeam::scope(|scope| { scope.spawn({ let reqs_c = reqs.clone(); let mut that = self.clone(); move |_| { for req in reqs_c.iter() { for f in get_files(&req.url).0 { rebuild(&mut that, &req.vol, &f.name); } } } }); }).expect("rebalance: crossbeam failed"); } } pub fn rebalance(&mut self) { let mut reqs = Vec::<RebalanceRequest>::with_capacity(20000); for (key, value) in self.db.iter() { let rec = Record::from(value.to_string()); let kvolumes = key_to_volume(key, &self.volumes, self.replicas, self.subvolumes); reqs.push(RebalanceRequest { key: key.to_string(), kvolumes, volumes: rec.rvolumes, }); } for _i in 0..16 { crossbeam::scope(|scope| { let reqs_c = reqs.clone(); let mut that = self.clone(); scope.spawn(move |_| { for req in reqs_c.iter() { rebalance(&mut that, req); } }); }).expect("rebalance: crossbeam failed"); } } pub fn server(&mut self) { println!("[OK] Listening on 127.0.0.1:{}", self.port); let addr = SocketAddr::from(([127, 0, 0, 1], self.port)); let server = Server::http(addr).unwrap(); let method_unlink = &Method::NonStandard(AsciiString::from_ascii("UNLINK").unwrap()); let method_rebalance = &Method::NonStandard(AsciiString::from_ascii("REBALANCE").unwrap()); for mut req in server.incoming_requests().by_ref() { let split = req.url().split('?').map(|x| x.to_string()).collect::<Vec<String>>(); let key = split[0].clone(); let q = split[1].clone(); let method = req.method(); let mut query = HashMap::new(); let qs = q.split('&').collect::<Vec<&str>>(); qs.iter() .for_each(|x| { for (_i, v) in x.split('=') .collect::<Vec<&str>>() .chunks(2) .enumerate() { query.insert(v[0], v[1]); } }); if query.len() > 0 { if method == &Method::Get { req.respond(Response::empty(403)).expect("error while responding"); return; } let operation = &qs[0]; match *operation { "link" | "unlinked" => { let q_limit = query.get("limit"); let mut limit = 0; let qlimit = if q_limit.is_some() { q_limit.unwrap() } else { &"" }; if qlimit != &"" { match qlimit.parse::<i32>() { Ok(nlimit) => limit = nlimit, Err(_e) => { req.respond(Response::empty(400)).expect("error while responding"); return; }, } } let mut counter = 0; let mut keys = Vec::<String>::new(); let mut next = String::new(); for (k, v) in self.db.iter() { let rec = Record::from(v.clone()); if (rec.deleted != Deleted::No && operation == &"list") || (rec.deleted != Deleted::Soft && operation == &"unlinked") { continue; } if counter > 1000000 { req.respond(Response::empty(403)).expect("error while responding"); return; } if limit > 0 && keys.len() as i32 == limit { next = k.to_string(); } counter += 1; keys.push(k.to_string()); } let lsw = match serde_json::to_string(&ListResponse {next, keys}) { Ok(v) => v, Err(_e) => { req.respond(Response::empty(500)).expect("error while responding"); return; } }; let mut headers = Vec::new(); headers.push(Header::from_bytes(&b"Content-Type"[..], &b"application/json"[..]).unwrap()); let bytes = lsw.as_bytes(); let resp = Response::new(200.into(), headers, bytes, Some(bytes.len()), None); req.respond(resp).expect("error while responding"); continue; } _ => { req.respond(Response::empty(403)).expect("error while responding"); continue; } } } // end query handler if method == &Method::Put || method == &Method::Delete || method == method_unlink || method == method_rebalance { if !self.lock_key(&key) { req.respond(Response::empty(409)).expect("error while responding"); continue; } self.unlock_key(&key); } if method == &Method::Get || method == &Method::Head { let rec = self.get_record(&key); let mut remote = String::new(); let mut resp = Response::empty(404); if rec.hash.len() != 0 { let header = Header::from_bytes(&b"Content-Md5"[..], rec.hash.as_bytes()).unwrap(); resp.add_header(header); } if rec.deleted == Deleted::Soft || rec.deleted == Deleted::Hard { if self.fallback == "" { let header = Header::from_bytes(&b"Content-Length"[..], &b"0"[..]).unwrap(); resp.add_header(header); resp = Response::with_status_code(resp, 404); req.respond(resp).expect("error while responding"); continue; } } else { let kvolumes = key_to_volume(&key, &self.volumes, self.replicas, self.subvolumes); if needs_rebalance(&rec.rvolumes, &kvolumes) { eprintln!("On wrong volumes, needs rebalance"); } let mut good = false; for rvol in rec.rvolumes.iter() { remote = format!("http://{}{}", rvol, key_to_path(&key)); if remote_head(&remote) { good = true; break; } } if !good { let header = Header::from_bytes(&b"Content-Length"[..], &b"0"[..]).unwrap(); resp.add_header(header); resp = Response::with_status_code(resp, 404); req.respond(resp).expect("error while responding"); continue; } } resp = Response::with_header(resp, Header::from_bytes(&b"Location"[..], remote).unwrap()); resp = Response::with_header(resp, Header::from_bytes(&b"Content-Length"[..], &b"0"[..]).unwrap()); resp = Response::with_status_code(resp, 302); req.respond(resp).expect("error while responding"); continue; } else if method == &Method::Put { let mut flag = false; for head in &req.headers().to_vec() { if head.field.as_str() == "Content-Length" && head.value == "0" { flag = true; } } if flag == true { req.respond(Response::empty(411)).expect("error while responding"); continue; } let rec = self.get_record(&key); if rec.deleted == Deleted::No { req.respond(Response::empty(403)).expect("error while responding"); continue; } let kvolumes = key_to_volume(&key, &self.volumes, self.replicas, self.subvolumes); self.put_record(&key, Record {rvolumes: kvolumes.clone(), deleted: Deleted::Soft, hash: "".to_string()}); // TODO: not handling errors here let mut buffer = Vec::<u8>::new(); req.as_reader().read_to_end(&mut buffer).unwrap(); // TODO: handle error for i in 0..kvolumes.len() { let remote = format!("http://{}{}", kvolumes[i], key_to_path(&key)); match remote_put(&remote, buffer.len(), &str::from_utf8(&buffer).to_owned().unwrap().to_string()) { Err(_e) => flag = true, Ok(_v) => flag = false, } if flag == true { break; } } if flag == true { eprintln!("replica write failed"); req.respond(Response::empty(500)).expect("error while responding"); continue; } let hash = format!("{}", String::from_utf8(md5::compute(buffer).0.to_vec()).unwrap()); self.put_record(&key, Record {rvolumes: kvolumes, deleted: Deleted::No, hash }); // TODO: not handling errors here req.respond(Response::empty(201)).expect("error while responding"); } else if method == &Method::Delete || method == method_unlink { let unlink = method == method_unlink; let rec = self.get_record(&key); if rec.deleted == Deleted::Hard || (unlink && rec.deleted == Deleted::Soft) { req.respond(Response::empty(404)).expect("error while responding"); return; } if !unlink && self.protect && rec.deleted == Deleted::No { req.respond(Response::empty(403)).expect("error while responding"); return; } self.put_record(&key, Record { rvolumes: rec.rvolumes.clone(), deleted: Deleted::Soft, hash: rec.hash }); if !unlink { let mut delete_error = false; for volume in rec.rvolumes { let remote = format!("http://{}{}", volume, key_to_path(&key)); match remote_delete(remote) { Err(_e) => delete_error = true, Ok(_v) => {}, } } if delete_error { req.respond(Response::empty(500)).expect("error while responding"); return; } self.db.remove(&key); } req.respond(Response::empty(204)).expect("error while responding"); } else if method == method_rebalance { let rec = self.get_record(&key); if rec.deleted != Deleted::No { req.respond(Response::empty(404)).expect("error while responding"); return; } let kvolumes = key_to_volume(&key, &self.volumes, self.replicas, self.subvolumes); let rbreq = RebalanceRequest { key, volumes: rec.rvolumes, kvolumes }; if !rebalance(self, &rbreq) { req.respond(Response::empty(400)).expect("error while responding"); return; } req.respond(Response::empty(204)).expect("error while responding"); } } // loop } // fn } pub fn rebuild(that: &mut Minikeyvalue, vol: &str, name: &str) -> bool { let mut buf = Vec::new(); buf.resize((name.len() + 3) / 12, 0); let bytes_decoded = match base64::decode_config_slice(name, base64::STANDARD, &mut buf) { Ok(v) => v, Err(e) => { eprintln!("rebuild: base64 decode error: {}", e); return false; } }; buf.resize(bytes_decoded, 0); let key = std::str::from_utf8(&buf).expect("rebuild: cannot unwrap buffer"); let kvolumes = key_to_volume(key, &that.volumes, that.replicas, that.subvolumes); if !that.lock_key(key) { eprintln!("rebuild: lock key issue"); return false; } that.unlock_key(key); let rec = match that.db.get(key) { Some(v) => { Record::from(v.clone()) } None => { Record { rvolumes: vec![vol.to_string()], deleted: Deleted::No, hash: String::new(), } } }; let mut pvalues = Vec::<String>::new(); for v in &kvolumes { for v2 in &rec.rvolumes { if *v == *v2 { pvalues.push(v.to_string()); } } } for v2 in &rec.rvolumes { let mut insert = true; for v in &kvolumes { if *v == *v2 { insert = false; break; } } if insert { pvalues.push(v2.to_string()); } } that.put_record(key, Record { rvolumes: pvalues, deleted: Deleted::No, hash: String::new(), }); true } pub fn rebalance(that: &mut Minikeyvalue, req: &RebalanceRequest) -> bool { let kp = key_to_path(&req.key); let mut rvolumes = Vec::<String>::new(); for rv in &req.volumes { if remote_head(&format!("http://{}{}", rv, kp)) { rvolumes.push(rv.to_string()); } } if rvolumes.is_empty() { eprintln!("rebalance: {} is missing", req.key); return false; } if !needs_rebalance(&rvolumes, &req.kvolumes) { return true; } let s = match remote_get(&format!("http://{}{}", &rvolumes[0], kp)) { Ok(ss) => ss, Err(_e) => return false, }; for v in req.kvolumes.iter() { let mut needs_write = true; for v2 in rvolumes.iter() { if v == v2 { needs_write = false; break; } } if needs_write { if let Err(e) = remote_put(&format!("http://{}{}", v, kp), s.len(), &s) { eprintln!("put error: {}", e); return false; } } } that.put_record(&req.key, Record { rvolumes: req.kvolumes.clone(), deleted: Deleted::No, hash: String::new(), }); for v2 in rvolumes.iter() { let mut needs_delete = true; for v in req.kvolumes.iter() { if v == v2 { needs_delete = true; break; } } if needs_delete { if let Err(e) = remote_delete(format!("http://{}{}", v2, kp)) { eprintln!("delete error: {}", e); return false; } } } true } fn parse_volume(vol: String) -> Option<RebuildRequest> { for i in get_files(&format!("http://{}/", vol)).0 { if valid(&i) { for j in get_files(&format!("http://{}/{}", vol, i.name)).0 { if valid(&j) { let url = format!("http://{}/{}/{}/", vol, i.name, j.name); return Some(RebuildRequest { vol, url }); } } } } None } fn decode_hex(s: &str) -> Result<Vec<u8>, DecodeHexError> { if s.len() % 2 != 0 { Err(DecodeHexError::OddLength) } else { (0..s.len()).step_by(2).map(|i| u8::from_str_radix(&s[i..i + 2], 16).map_err(|e| e.into())).collect() } } fn get_files(url: &str) -> FileWrapper { let mut res = FileWrapper::new(); match remote_get(&url.to_string()) { Ok(ss) => res.0 = serde_json::from_str(&ss).expect("get_files: Cannot parse"), Err(e) => { eprintln!("get_files: remote_get error {}", e); return res; }, }; res } fn valid(f: &File) -> bool { if f.name.len() != 2 || f.file_type != "directory" { return false; } let decoded = match decode_hex(&f.name) { Ok(dec) => dec, Err(_e) => return false, }; if decoded.len() != 1 { return false; } true }
use std; fn char_to_byte_offset (&&ss: str, char_position: uint) -> uint { let cursor_char = 0u; let cursor_byte = 0u; while cursor_char < char_position { let sz = str::utf8_char_width(ss[cursor_byte]); assert sz > 0u; cursor_byte += sz; cursor_char += 1u; } assert cursor_char <= str::char_len(ss); assert cursor_byte <= str::byte_len(ss); ret cursor_byte; } #[test] fn test () { let s = "ประเทศไทย中华Việt Nam"; let i = 0u; while i < str::byte_len(s) { let {ch, next} = str::char_range_at(s, i); std::io::println(#fmt("%u: %c",i,ch)); i = next; } }
use std::ffi::CStr; use std::str::Utf8Error; /// Information about a receive channel #[derive(Debug, Clone)] pub struct ReceiveInfo { motherboard_id: String, motherboard_name: String, motherboard_serial: String, daughterboard_id: String, daughterboard_serial: String, subdev_name: String, subdev_spec: String, antenna: String, } impl ReceiveInfo { pub fn motherboard_id(&self) -> &str { &self.motherboard_id } pub fn motherboard_name(&self) -> &str { &self.motherboard_name } pub fn motherboard_serial(&self) -> &str { &self.motherboard_serial } pub fn daughterboard_id(&self) -> &str { &self.daughterboard_id } pub fn daughterboard_serial(&self) -> &str { &self.daughterboard_serial } pub fn subdev_name(&self) -> &str { &self.subdev_name } pub fn subdev_spec(&self) -> &str { &self.subdev_spec } pub fn antenna(&self) -> &str { &self.antenna } pub(crate) unsafe fn from_c(info_c: &uhd_sys::uhd_usrp_rx_info_t) -> Result<Self, Utf8Error> { Ok(ReceiveInfo { motherboard_id: CStr::from_ptr(info_c.mboard_id).to_str()?.into(), motherboard_name: CStr::from_ptr(info_c.mboard_name).to_str()?.into(), motherboard_serial: CStr::from_ptr(info_c.mboard_serial).to_str()?.into(), daughterboard_id: CStr::from_ptr(info_c.rx_id).to_str()?.into(), daughterboard_serial: CStr::from_ptr(info_c.rx_serial).to_str()?.into(), subdev_name: CStr::from_ptr(info_c.rx_subdev_name).to_str()?.into(), subdev_spec: CStr::from_ptr(info_c.rx_subdev_spec).to_str()?.into(), antenna: CStr::from_ptr(info_c.rx_antenna).to_str()?.into(), }) } }
#![doc = "generated by AutoRust 0.1.0"] #![allow(non_camel_case_types)] #![allow(unused_imports)] use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct OperationListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<Operation>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Operation { #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub display: Option<operation::Display>, } pub mod operation { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Display { #[serde(default, skip_serializing_if = "Option::is_none")] pub provider: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub resource: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub operation: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub description: Option<String>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct StreamingJob { #[serde(flatten)] pub tracked_resource: TrackedResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<StreamingJobProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct StreamingJobProperties { #[serde(default, skip_serializing_if = "Option::is_none")] pub sku: Option<Sku>, #[serde(rename = "jobId", default, skip_serializing_if = "Option::is_none")] pub job_id: Option<String>, #[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")] pub provisioning_state: Option<String>, #[serde(rename = "jobState", default, skip_serializing_if = "Option::is_none")] pub job_state: Option<String>, #[serde(rename = "outputStartMode", default, skip_serializing_if = "Option::is_none")] pub output_start_mode: Option<OutputStartMode>, #[serde(rename = "outputStartTime", default, skip_serializing_if = "Option::is_none")] pub output_start_time: Option<String>, #[serde(rename = "lastOutputEventTime", default, skip_serializing_if = "Option::is_none")] pub last_output_event_time: Option<String>, #[serde(rename = "eventsOutOfOrderPolicy", default, skip_serializing_if = "Option::is_none")] pub events_out_of_order_policy: Option<EventsOutOfOrderPolicy>, #[serde(rename = "outputErrorPolicy", default, skip_serializing_if = "Option::is_none")] pub output_error_policy: Option<OutputErrorPolicy>, #[serde(rename = "eventsOutOfOrderMaxDelayInSeconds", default, skip_serializing_if = "Option::is_none")] pub events_out_of_order_max_delay_in_seconds: Option<i32>, #[serde(rename = "eventsLateArrivalMaxDelayInSeconds", default, skip_serializing_if = "Option::is_none")] pub events_late_arrival_max_delay_in_seconds: Option<i32>, #[serde(rename = "dataLocale", default, skip_serializing_if = "Option::is_none")] pub data_locale: Option<String>, #[serde(rename = "compatibilityLevel", default, skip_serializing_if = "Option::is_none")] pub compatibility_level: Option<CompatibilityLevel>, #[serde(rename = "createdDate", default, skip_serializing_if = "Option::is_none")] pub created_date: Option<String>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub inputs: Vec<Input>, #[serde(default, skip_serializing_if = "Option::is_none")] pub transformation: Option<Transformation>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub outputs: Vec<Output>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub functions: Vec<Function>, #[serde(default, skip_serializing_if = "Option::is_none")] pub etag: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Sku { #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<sku::Name>, } pub mod sku { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum Name { Standard, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct StreamingJobListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<StreamingJob>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct StartStreamingJobParameters { #[serde(rename = "outputStartMode", default, skip_serializing_if = "Option::is_none")] pub output_start_mode: Option<OutputStartMode>, #[serde(rename = "outputStartTime", default, skip_serializing_if = "Option::is_none")] pub output_start_time: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum OutputStartMode { JobStartTime, CustomTime, LastOutputEventTime, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum EventsOutOfOrderPolicy { Adjust, Drop, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum OutputErrorPolicy { Stop, Drop, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum CompatibilityLevel { #[serde(rename = "1.0")] N1_0, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Resource { #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct TrackedResource { #[serde(flatten)] pub resource: Resource, #[serde(default, skip_serializing_if = "Option::is_none")] pub tags: Option<serde_json::Value>, #[serde(default, skip_serializing_if = "Option::is_none")] pub location: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ProxyResource { #[serde(flatten)] pub resource: Resource, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Input { #[serde(flatten)] pub sub_resource: SubResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<InputProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct InputProperties { #[serde(rename = "type")] pub type_: String, #[serde(default, skip_serializing_if = "Option::is_none")] pub serialization: Option<Serialization>, #[serde(default, skip_serializing_if = "Option::is_none")] pub diagnostics: Option<Diagnostics>, #[serde(default, skip_serializing_if = "Option::is_none")] pub etag: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct StreamInputProperties { #[serde(flatten)] pub input_properties: InputProperties, #[serde(default, skip_serializing_if = "Option::is_none")] pub datasource: Option<StreamInputDataSource>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ReferenceInputProperties { #[serde(flatten)] pub input_properties: InputProperties, #[serde(default, skip_serializing_if = "Option::is_none")] pub datasource: Option<ReferenceInputDataSource>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct StreamInputDataSource { #[serde(rename = "type")] pub type_: String, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct BlobStreamInputDataSource { #[serde(flatten)] pub stream_input_data_source: StreamInputDataSource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<BlobStreamInputDataSourceProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct BlobStreamInputDataSourceProperties { #[serde(flatten)] pub blob_data_source_properties: BlobDataSourceProperties, #[serde(rename = "sourcePartitionCount", default, skip_serializing_if = "Option::is_none")] pub source_partition_count: Option<i32>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct EventHubStreamInputDataSource { #[serde(flatten)] pub stream_input_data_source: StreamInputDataSource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<EventHubStreamInputDataSourceProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct EventHubStreamInputDataSourceProperties { #[serde(flatten)] pub event_hub_data_source_properties: EventHubDataSourceProperties, #[serde(rename = "consumerGroupName", default, skip_serializing_if = "Option::is_none")] pub consumer_group_name: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IoTHubStreamInputDataSource { #[serde(flatten)] pub stream_input_data_source: StreamInputDataSource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<IoTHubStreamInputDataSourceProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IoTHubStreamInputDataSourceProperties { #[serde(rename = "iotHubNamespace", default, skip_serializing_if = "Option::is_none")] pub iot_hub_namespace: Option<String>, #[serde(rename = "sharedAccessPolicyName", default, skip_serializing_if = "Option::is_none")] pub shared_access_policy_name: Option<String>, #[serde(rename = "sharedAccessPolicyKey", default, skip_serializing_if = "Option::is_none")] pub shared_access_policy_key: Option<String>, #[serde(rename = "consumerGroupName", default, skip_serializing_if = "Option::is_none")] pub consumer_group_name: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub endpoint: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ReferenceInputDataSource { #[serde(rename = "type")] pub type_: String, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct BlobReferenceInputDataSource { #[serde(flatten)] pub reference_input_data_source: ReferenceInputDataSource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<BlobReferenceInputDataSourceProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct BlobReferenceInputDataSourceProperties { #[serde(flatten)] pub blob_data_source_properties: BlobDataSourceProperties, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct BlobDataSourceProperties { #[serde(rename = "storageAccounts", default, skip_serializing_if = "Vec::is_empty")] pub storage_accounts: Vec<StorageAccount>, #[serde(default, skip_serializing_if = "Option::is_none")] pub container: Option<String>, #[serde(rename = "pathPattern", default, skip_serializing_if = "Option::is_none")] pub path_pattern: Option<String>, #[serde(rename = "dateFormat", default, skip_serializing_if = "Option::is_none")] pub date_format: Option<String>, #[serde(rename = "timeFormat", default, skip_serializing_if = "Option::is_none")] pub time_format: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct StorageAccount { #[serde(rename = "accountName", default, skip_serializing_if = "Option::is_none")] pub account_name: Option<String>, #[serde(rename = "accountKey", default, skip_serializing_if = "Option::is_none")] pub account_key: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ServiceBusDataSourceProperties { #[serde(rename = "serviceBusNamespace", default, skip_serializing_if = "Option::is_none")] pub service_bus_namespace: Option<String>, #[serde(rename = "sharedAccessPolicyName", default, skip_serializing_if = "Option::is_none")] pub shared_access_policy_name: Option<String>, #[serde(rename = "sharedAccessPolicyKey", default, skip_serializing_if = "Option::is_none")] pub shared_access_policy_key: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct EventHubDataSourceProperties { #[serde(flatten)] pub service_bus_data_source_properties: ServiceBusDataSourceProperties, #[serde(rename = "eventHubName", default, skip_serializing_if = "Option::is_none")] pub event_hub_name: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Diagnostics { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub conditions: Vec<DiagnosticCondition>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct DiagnosticCondition { #[serde(default, skip_serializing_if = "Option::is_none")] pub since: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub code: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub message: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct InputListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<Input>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Serialization { #[serde(rename = "type")] pub type_: EventSerializationType, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum EventSerializationType { Csv, Avro, Json, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct CsvSerialization { #[serde(flatten)] pub serialization: Serialization, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<CsvSerializationProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct CsvSerializationProperties { #[serde(rename = "fieldDelimiter", default, skip_serializing_if = "Option::is_none")] pub field_delimiter: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub encoding: Option<Encoding>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct JsonSerialization { #[serde(flatten)] pub serialization: Serialization, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<JsonSerializationProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct JsonSerializationProperties { #[serde(default, skip_serializing_if = "Option::is_none")] pub encoding: Option<Encoding>, #[serde(default, skip_serializing_if = "Option::is_none")] pub format: Option<JsonOutputSerializationFormat>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct AvroSerialization { #[serde(flatten)] pub serialization: Serialization, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<AvroSerializationProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct AvroSerializationProperties {} #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum Encoding { #[serde(rename = "UTF8")] Utf8, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum JsonOutputSerializationFormat { LineSeparated, Array, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ResourceTestStatus { #[serde(default, skip_serializing_if = "Option::is_none")] pub status: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub error: Option<ErrorResponse>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ErrorResponse { #[serde(default, skip_serializing_if = "Option::is_none")] pub code: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub message: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SubResource { #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Transformation { #[serde(flatten)] pub sub_resource: SubResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<TransformationProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct TransformationProperties { #[serde(rename = "streamingUnits", default, skip_serializing_if = "Option::is_none")] pub streaming_units: Option<i32>, #[serde(default, skip_serializing_if = "Option::is_none")] pub query: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub etag: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Output { #[serde(flatten)] pub sub_resource: SubResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<OutputProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct OutputProperties { #[serde(default, skip_serializing_if = "Option::is_none")] pub datasource: Option<OutputDataSource>, #[serde(default, skip_serializing_if = "Option::is_none")] pub serialization: Option<Serialization>, #[serde(default, skip_serializing_if = "Option::is_none")] pub diagnostics: Option<Diagnostics>, #[serde(default, skip_serializing_if = "Option::is_none")] pub etag: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct OutputDataSource { #[serde(rename = "type")] pub type_: String, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct BlobOutputDataSource { #[serde(flatten)] pub output_data_source: OutputDataSource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<BlobOutputDataSourceProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct BlobOutputDataSourceProperties { #[serde(flatten)] pub blob_data_source_properties: BlobDataSourceProperties, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct AzureTableOutputDataSource { #[serde(flatten)] pub output_data_source: OutputDataSource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<AzureTableOutputDataSourceProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct AzureTableOutputDataSourceProperties { #[serde(rename = "accountName", default, skip_serializing_if = "Option::is_none")] pub account_name: Option<String>, #[serde(rename = "accountKey", default, skip_serializing_if = "Option::is_none")] pub account_key: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub table: Option<String>, #[serde(rename = "partitionKey", default, skip_serializing_if = "Option::is_none")] pub partition_key: Option<String>, #[serde(rename = "rowKey", default, skip_serializing_if = "Option::is_none")] pub row_key: Option<String>, #[serde(rename = "columnsToRemove", default, skip_serializing_if = "Vec::is_empty")] pub columns_to_remove: Vec<String>, #[serde(rename = "batchSize", default, skip_serializing_if = "Option::is_none")] pub batch_size: Option<i32>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct EventHubOutputDataSource { #[serde(flatten)] pub output_data_source: OutputDataSource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<EventHubOutputDataSourceProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct EventHubOutputDataSourceProperties { #[serde(flatten)] pub event_hub_data_source_properties: EventHubDataSourceProperties, #[serde(rename = "partitionKey", default, skip_serializing_if = "Option::is_none")] pub partition_key: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct AzureSqlDatabaseOutputDataSource { #[serde(flatten)] pub output_data_source: OutputDataSource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<AzureSqlDatabaseOutputDataSourceProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct AzureSqlDatabaseOutputDataSourceProperties { #[serde(flatten)] pub azure_sql_database_data_source_properties: AzureSqlDatabaseDataSourceProperties, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct DocumentDbOutputDataSource { #[serde(flatten)] pub output_data_source: OutputDataSource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<DocumentDbOutputDataSourceProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct DocumentDbOutputDataSourceProperties { #[serde(rename = "accountId", default, skip_serializing_if = "Option::is_none")] pub account_id: Option<String>, #[serde(rename = "accountKey", default, skip_serializing_if = "Option::is_none")] pub account_key: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub database: Option<String>, #[serde(rename = "collectionNamePattern", default, skip_serializing_if = "Option::is_none")] pub collection_name_pattern: Option<String>, #[serde(rename = "partitionKey", default, skip_serializing_if = "Option::is_none")] pub partition_key: Option<String>, #[serde(rename = "documentId", default, skip_serializing_if = "Option::is_none")] pub document_id: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ServiceBusQueueOutputDataSource { #[serde(flatten)] pub output_data_source: OutputDataSource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<ServiceBusQueueOutputDataSourceProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ServiceBusQueueOutputDataSourceProperties { #[serde(flatten)] pub service_bus_data_source_properties: ServiceBusDataSourceProperties, #[serde(rename = "queueName", default, skip_serializing_if = "Option::is_none")] pub queue_name: Option<String>, #[serde(rename = "propertyColumns", default, skip_serializing_if = "Vec::is_empty")] pub property_columns: Vec<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ServiceBusTopicOutputDataSource { #[serde(flatten)] pub output_data_source: OutputDataSource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<ServiceBusTopicOutputDataSourceProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ServiceBusTopicOutputDataSourceProperties { #[serde(flatten)] pub service_bus_data_source_properties: ServiceBusDataSourceProperties, #[serde(rename = "topicName", default, skip_serializing_if = "Option::is_none")] pub topic_name: Option<String>, #[serde(rename = "propertyColumns", default, skip_serializing_if = "Vec::is_empty")] pub property_columns: Vec<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct PowerBiOutputDataSource { #[serde(flatten)] pub output_data_source: OutputDataSource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<PowerBiOutputDataSourceProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct PowerBiOutputDataSourceProperties { #[serde(flatten)] pub o_auth_based_data_source_properties: OAuthBasedDataSourceProperties, #[serde(default, skip_serializing_if = "Option::is_none")] pub dataset: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub table: Option<String>, #[serde(rename = "groupId", default, skip_serializing_if = "Option::is_none")] pub group_id: Option<String>, #[serde(rename = "groupName", default, skip_serializing_if = "Option::is_none")] pub group_name: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct AzureDataLakeStoreOutputDataSource { #[serde(flatten)] pub output_data_source: OutputDataSource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<AzureDataLakeStoreOutputDataSourceProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct AzureDataLakeStoreOutputDataSourceProperties { #[serde(flatten)] pub o_auth_based_data_source_properties: OAuthBasedDataSourceProperties, #[serde(rename = "accountName", default, skip_serializing_if = "Option::is_none")] pub account_name: Option<String>, #[serde(rename = "tenantId", default, skip_serializing_if = "Option::is_none")] pub tenant_id: Option<String>, #[serde(rename = "filePathPrefix", default, skip_serializing_if = "Option::is_none")] pub file_path_prefix: Option<String>, #[serde(rename = "dateFormat", default, skip_serializing_if = "Option::is_none")] pub date_format: Option<String>, #[serde(rename = "timeFormat", default, skip_serializing_if = "Option::is_none")] pub time_format: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct OutputListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<Output>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct AzureSqlDatabaseDataSourceProperties { #[serde(default, skip_serializing_if = "Option::is_none")] pub server: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub database: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub user: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub password: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub table: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct OAuthBasedDataSourceProperties { #[serde(rename = "refreshToken", default, skip_serializing_if = "Option::is_none")] pub refresh_token: Option<String>, #[serde(rename = "tokenUserPrincipalName", default, skip_serializing_if = "Option::is_none")] pub token_user_principal_name: Option<String>, #[serde(rename = "tokenUserDisplayName", default, skip_serializing_if = "Option::is_none")] pub token_user_display_name: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Function { #[serde(flatten)] pub sub_resource: SubResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<FunctionProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct FunctionProperties { #[serde(rename = "type")] pub type_: String, #[serde(default, skip_serializing_if = "Option::is_none")] pub etag: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ScalarFunctionProperties { #[serde(flatten)] pub function_properties: FunctionProperties, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<ScalarFunctionConfiguration>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ScalarFunctionConfiguration { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub inputs: Vec<FunctionInput>, #[serde(default, skip_serializing_if = "Option::is_none")] pub output: Option<FunctionOutput>, #[serde(default, skip_serializing_if = "Option::is_none")] pub binding: Option<FunctionBinding>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct FunctionInput { #[serde(rename = "dataType", default, skip_serializing_if = "Option::is_none")] pub data_type: Option<String>, #[serde(rename = "isConfigurationParameter", default, skip_serializing_if = "Option::is_none")] pub is_configuration_parameter: Option<bool>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct FunctionOutput { #[serde(rename = "dataType", default, skip_serializing_if = "Option::is_none")] pub data_type: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct FunctionBinding { #[serde(rename = "type")] pub type_: String, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct AzureMachineLearningWebServiceFunctionBinding { #[serde(flatten)] pub function_binding: FunctionBinding, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<AzureMachineLearningWebServiceFunctionBindingProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct AzureMachineLearningWebServiceFunctionBindingProperties { #[serde(default, skip_serializing_if = "Option::is_none")] pub endpoint: Option<String>, #[serde(rename = "apiKey", default, skip_serializing_if = "Option::is_none")] pub api_key: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub inputs: Option<AzureMachineLearningWebServiceInputs>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub outputs: Vec<AzureMachineLearningWebServiceOutputColumn>, #[serde(rename = "batchSize", default, skip_serializing_if = "Option::is_none")] pub batch_size: Option<i32>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct AzureMachineLearningWebServiceInputs { #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "columnNames", default, skip_serializing_if = "Vec::is_empty")] pub column_names: Vec<AzureMachineLearningWebServiceInputColumn>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct AzureMachineLearningWebServiceInputColumn { #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "dataType", default, skip_serializing_if = "Option::is_none")] pub data_type: Option<String>, #[serde(rename = "mapTo", default, skip_serializing_if = "Option::is_none")] pub map_to: Option<i32>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct AzureMachineLearningWebServiceOutputColumn { #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "dataType", default, skip_serializing_if = "Option::is_none")] pub data_type: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct JavaScriptFunctionBinding { #[serde(flatten)] pub function_binding: FunctionBinding, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<JavaScriptFunctionBindingProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct JavaScriptFunctionBindingProperties { #[serde(default, skip_serializing_if = "Option::is_none")] pub script: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct FunctionRetrieveDefaultDefinitionParameters { #[serde(rename = "bindingType")] pub binding_type: String, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters { #[serde(flatten)] pub function_retrieve_default_definition_parameters: FunctionRetrieveDefaultDefinitionParameters, #[serde(rename = "bindingRetrievalProperties", default, skip_serializing_if = "Option::is_none")] pub binding_retrieval_properties: Option<AzureMachineLearningWebServiceFunctionBindingRetrievalProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct AzureMachineLearningWebServiceFunctionBindingRetrievalProperties { #[serde(rename = "executeEndpoint", default, skip_serializing_if = "Option::is_none")] pub execute_endpoint: Option<String>, #[serde(rename = "udfType", default, skip_serializing_if = "Option::is_none")] pub udf_type: Option<UdfType>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct JavaScriptFunctionRetrieveDefaultDefinitionParameters { #[serde(flatten)] pub function_retrieve_default_definition_parameters: FunctionRetrieveDefaultDefinitionParameters, #[serde(rename = "bindingRetrievalProperties", default, skip_serializing_if = "Option::is_none")] pub binding_retrieval_properties: Option<JavaScriptFunctionBindingRetrievalProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct JavaScriptFunctionBindingRetrievalProperties { #[serde(default, skip_serializing_if = "Option::is_none")] pub script: Option<String>, #[serde(rename = "udfType", default, skip_serializing_if = "Option::is_none")] pub udf_type: Option<UdfType>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct FunctionListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<Function>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum UdfType { Scalar, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SubscriptionQuotasListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<SubscriptionQuota>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SubscriptionQuota { #[serde(flatten)] pub sub_resource: SubResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<subscription_quota::Properties>, } pub mod subscription_quota { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(rename = "maxCount", default, skip_serializing_if = "Option::is_none")] pub max_count: Option<i32>, #[serde(rename = "currentCount", default, skip_serializing_if = "Option::is_none")] pub current_count: Option<i32>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Error { #[serde(default, skip_serializing_if = "Option::is_none")] pub error: Option<error::Error>, } pub mod error { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Error { #[serde(default, skip_serializing_if = "Option::is_none")] pub code: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub message: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub target: Option<String>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub details: Vec<ErrorDetails>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ErrorDetails { #[serde(default, skip_serializing_if = "Option::is_none")] pub code: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub target: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub message: Option<String>, }
/** Author : Niharika Gauraha KTH Email : niharika@kth.se function: rust_normalize compute normalized weights using log sum trick to avoid numerical underflow **/ use pyo3::prelude::*; #[pyfunction] fn rust_normalize(x: Vec<f64>) -> PyResult<Vec<f64>>{ let n = x.len(); let logweights: Vec<f64> = x.iter().map(|x| x.ln()).collect(); let mut max_weight: f64; max_weight = logweights[0]; for i in 0..n { if max_weight < logweights[i] { max_weight = logweights[i]; } } let weights:Vec<f64> = logweights.iter().map(|x| (x-max_weight).exp()).collect(); let w_sum:f64 = weights.iter().sum(); let normalised_weights:Vec<f64> = weights.iter().map(|x| x /w_sum).collect(); Ok(normalised_weights) } #[pymodule] fn rust_library(_py: Python, m: &PyModule) -> PyResult<()> { #[pyfn(m, "rust_normalize")] fn normalize_py(_py: Python, x: Vec<f64>) -> PyResult<Vec<f64>> { let out = rust_normalize(x); out } Ok(()) }
use std::sync::mpsc::Receiver; use event::Event; use mongodb::{ Client, ThreadedClient }; use mongodb::db::ThreadedDatabase; use bson::{ to_bson, Bson }; use notifier::{ Notifier }; pub fn logging_messages(rx: Receiver<Event>, db: Client, db_name: String, table_name: String, notifier: Notifier) { loop { match rx.recv() { Ok(Event::Logging(m)) => { if let Ok(Bson::Document(document)) = to_bson(&m) { notifier.publish(&document); if let Err(e) = db.db(db_name.as_str()).collection(table_name.as_str()).insert_one(document, None) { error!("Cannot insert into mongo: {}", e); } } }, _ => () } } }
//! Representation of an ebuild in a Gentoo repository use crate::version::{self, Version}; use once_cell::sync::OnceCell; use std::path::PathBuf; /// Represent a discrete Gentoo ebuild pub struct Ebuild { root: PathBuf, category: String, package: String, ebuild: String, version: OnceCell<Version>, } impl Ebuild { /// Construct a new ebuild explicitly pub fn new( root: PathBuf, category: String, package: String, ebuild: String, ) -> Ebuild { Ebuild { root, category, package, ebuild, version: OnceCell::INIT } } /// Returns a path to the ebuild file pub fn path(&self) -> PathBuf { self.root.join(&self.category).join(&self.package).join(&self.ebuild) } /// Return a [`Version`] object for this ebuild pub fn version(&self) -> &Version { self.version.get_or_init(|| { version::parse( self.ebuild.trim_end_matches(".ebuild").trim_start_matches( (self.package.to_owned() + "-").as_str(), ), ) }) } /// Returns the full name of this ebuild pub fn name(&self) -> String { self.category.to_owned() + "/" + &self.package + "/" + &self.ebuild } /// Returns the ebuilds category similar to `PMS` variable `CATEGORY` pub fn category(&self) -> String { self.category.to_owned() } /// Returns the ebuilds package name similar to `PMS` variable `PN` pub fn pn(&self) -> String { self.package.to_owned() } /// Returns the ebuilds full package version similar to `PMS` variable /// `PF` pub fn pf(&self) -> String { self.pn() + "-" + self.pvr() } /// Returns the ebuilds version with revision similar to `PMS` variable /// `PVR` pub fn pvr(&self) -> &str { self.version().pvr() } /// Returns the ebuilds version without revision similar to `PMS` variable /// `PV` pub fn pv(&self) -> &str { self.version().pv() } /// Returns the ebuilds revision, or r0, similar to `PMS` variable `PR` pub fn pr(&self) -> &str { self.version().pr() } /// Returns the ebuilds package name without revision, similar to `PMS` /// variable `P` pub fn p(&self) -> String { self.pn() + self.pv() } /// Returns if this is a legal ebuild pub fn is_legal(&self) -> bool { let p = self.path(); p.exists() && !p.is_dir() } } impl std::fmt::Debug for Ebuild { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!( f, "cat: {}, pf: {}, pn: {}, pvr: {} pv: {} pr: {}", self.category(), self.pf(), self.pn(), self.pvr(), self.pv(), self.pr(), ) } } impl Clone for Ebuild { fn clone(&self) -> Self { Self { root: self.root.clone(), category: self.category.clone(), package: self.package.clone(), ebuild: self.package.clone(), version: match self.version.get() { Some(v) => { let cell = OnceCell::INIT; cell.set(v.to_owned()).unwrap(); cell }, None => OnceCell::INIT, }, } } } impl crate::util::repoobject::RepoObject for Ebuild { fn name(&self) -> String { self.ebuild.to_owned() } fn path(&self) -> PathBuf { self.root.join(&self.category).join(&self.package).join(&self.ebuild) } fn ident(&self) -> String { self.category.to_owned() + "/" + &self.pf() } fn components(&self) -> String { format!( "cat={} package={} version={}", &self.category, &self.package, self.pvr() ) } }
use super::*; use counter::Counter; use indicatif::ProgressIterator; use roaring::RoaringBitmap; use std::collections::HashSet; /// # remove. impl Graph { /// Returns a **NEW** Graph that does not have the required attributes. /// /// ## Implementation details /// /// ### How the collapse of multigraphs is handled /// We keep only the first edge when a multigraph is collapsed while removing /// the edge types, in the order provided when first reading from the CSV file. /// /// ### Generation of new singleton nodes when removeping edges /// Some of the remove operations allowed in this method might lead to the /// generation of new singleton nodes that will not be handled within this /// function call even if you provide the flag singletons to true, but you /// will need to call the method again if you want to get reed of also those /// newly created singleton nodes. /// /// # Arguments /// * `allow_nodes_set`: Option<HashSet<String>> - Optional set of nodes names to keep. /// * `deny_nodes_set`: Option<HashSet<String>> - Optional set of nodes names to remove. /// * `allow_node_types_set`: Option<HashSet<String>> - Optional set of node type names to keep. /// * `deny_node_types_set`: Option<HashSet<String>> - Optional set of node type names to remove. /// * `allow_edge_set`: Option<HashSet<EdgeT>>- Optional set of numeric edge IDs to keep. /// * `deny_edge_set`: Option<HashSet<EdgeT>>- Optional set of numeric edge IDs to remove. /// * `allow_edge_types_set`: Option<HashSet<String>> - Optional set of edge type names to keep. /// * `deny_edge_types_set`: Option<HashSet<String>> - Optional set of edge type names to remove. /// * `weights`: bool - Wether to remove the weights. /// * `node_types`: bool - Wether to remove the node types. /// * `edge_types`: bool - Wether to remove the edge types. /// * `singletons`: bool - Wether to remove the singleton nodes. /// * `verbose`: bool - Wether to show a loading bar while building the graph. /// pub fn remove( &self, allow_nodes_set: Option<HashSet<String>>, deny_nodes_set: Option<HashSet<String>>, allow_node_types_set: Option<HashSet<String>>, deny_node_types_set: Option<HashSet<String>>, allow_edge_set: Option<HashSet<EdgeT>>, deny_edge_set: Option<HashSet<EdgeT>>, allow_edge_types_set: Option<HashSet<String>>, deny_edge_types_set: Option<HashSet<String>>, weights: bool, node_types: bool, edge_types: bool, singletons: bool, verbose: bool, ) -> Result<Graph, String> { let pb_edges = get_loading_bar( verbose, format!( "Building edges of graph {} without required attributes", self.name ) .as_ref(), self.get_edges_number() as usize, ); let pb_nodes = get_loading_bar( verbose, format!( "Building nodes of graph {} without required attributes", self.name ) .as_ref(), self.get_nodes_number() as usize, ); Graph::from_string_sorted( self.get_edges_string_quadruples(true) .progress_with(pb_edges) .filter_map(|(edge_id, src_name, dst_name, edge_type, weight)| { // If an allow edge set was provided if let Some(aes) = &allow_edge_set { // We check that the current edge ID is within the edge set. if !aes.contains(&edge_id) { return None; } } // If a deny edge set was provided if let Some(des) = &deny_edge_set { // We check that the current edge ID is NOT within the edge set. if des.contains(&edge_id) { return None; } } // If an allow nodes set was provided if let Some(ans) = &allow_nodes_set { // We check that the current source or destination node name is within the edge set. if !ans.contains(&src_name) || !ans.contains(&dst_name) { return None; } } // If a deny nodes set was provided if let Some(dns) = &deny_nodes_set { // We check that the current source or destination node name is NOT within the edge set. if dns.contains(&src_name) || dns.contains(&dst_name) { return None; } } // If the allow edge types set was provided if let (Some(aets), Some(et)) = (&allow_edge_types_set, &edge_type) { // We check that the current edge type name is within the edge type set. if !aets.contains(et) { return None; } } // If the deny edge types set was provided if let (Some(dets), Some(et)) = (&deny_edge_types_set, &edge_type) { // We check that the current edge type name is NOT within the edge type set. if dets.contains(et) { return None; } } let src_node_type = self.get_unchecked_node_type(self.get_unchecked_node_id(&src_name)); let dst_node_type = self.get_unchecked_node_type(self.get_unchecked_node_id(&dst_name)); // If the graph has node types if let (Some(src_nt), Some(dst_nt)) = (src_node_type, dst_node_type) { let src_node_type_name = self.get_node_type_name(src_nt).unwrap(); let dst_node_type_name = self.get_node_type_name(dst_nt).unwrap(); // If the allow node types set was provided if let Some(ants) = &allow_node_types_set { // We check that the current node type name is NOT within the node type set. if !ants.contains(&src_node_type_name) || !ants.contains(&dst_node_type_name) { return None; } } // If the deny node types set was provided if let Some(dnts) = &deny_node_types_set { // We check that the current node type name is NOT within the node type set. if dnts.contains(&src_node_type_name) && dnts.contains(&dst_node_type_name) { return None; } } } Some(Ok(( src_name, dst_name, match edge_types { false => edge_type, true => None, }, match weights { false => weight, true => None, }, ))) }), Some( self.get_nodes_names_iter() .progress_with(pb_nodes) .filter_map(|(_, node_name, node_type)| { if singletons && self.is_singleton_string(&node_name).unwrap() { return None; } if let Some(ans) = &allow_nodes_set { if !ans.contains(&node_name) { return None; } } if let Some(dns) = &deny_nodes_set { if dns.contains(&node_name) { return None; } } if let (Some(ants), Some(nt)) = (&allow_node_types_set, &node_type) { if !ants.contains(nt) { return None; } } if let (Some(dnts), Some(nt)) = (&deny_node_types_set, &node_type) { if dnts.contains(nt) { return None; } } Some(Ok(( node_name, match node_types { false => node_type, true => None, }, ))) }), ), self.directed, true, false, self.is_multigraph() && edge_types, self.get_edges_number(), // Approximation of expected edges number. self.get_nodes_number(), // Approximation of expected nodes number. match &self.edge_types { Some(ets) => ets.has_numeric_ids(), None => false, }, false, false, match &self.node_types { Some(nts) => nts.has_numeric_ids(), None => false, }, self.has_edge_types() && !edge_types, self.has_weights() && !weights, self.get_name(), ) } /// remove all the components that are not connected to interesting /// nodes and edges. /// /// # Arguments /// * `node_names` : Option<Vec<String>> - The name of the nodes of which components to keep. /// * `node_types` : Option<Vec<String>> - The types of the nodes of which components to keep. /// * `edge_types` : Option<Vec<String>> - The types of the edges of which components to keep. /// * `minimum_component_size`: Option<NodeT> - Optional, Minimum size of the components to keep. /// * `top_k_components`: Option<NodeT> - Optional, number of components to keep sorted by number of nodes. /// * `verbose`: bool - Wether to show the loading bar. pub fn remove_components( &self, node_names: Option<Vec<String>>, node_types: Option<Vec<String>>, edge_types: Option<Vec<String>>, minimum_component_size: Option<NodeT>, top_k_components: Option<NodeT>, verbose: bool, ) -> Result<Graph, String> { let mut keep_components = RoaringBitmap::new(); let components_vector = self.get_node_components_vector(verbose); // Extend the components so the include the given node Ids and node types. if let Some(node_ids) = self.get_filter_bitmap(node_names, node_types)? { keep_components.extend( node_ids .iter() .map(|node_id| components_vector[node_id as usize]), ); } // Extend the components to keep those that include the given edge types. if let Some(ets) = edge_types { let mut edge_types_ids = RoaringBitmap::new(); edge_types_ids.extend(self.translate_edge_types(ets)?.iter().map(|x| *x as u32)); let pb = get_loading_bar( verbose, &format!( "Computing which components are to keep for the graph {}", &self.name ), self.get_edges_number() as usize, ); self.get_edges_triples(self.directed) .progress_with(pb) .for_each(|(_, src, dst, edge_type)| { if let Some(et) = edge_type { if edge_types_ids.contains(et as u32) { keep_components.insert(components_vector[src as usize]); keep_components.insert(components_vector[dst as usize]); } } }); } // Retrieve minimal size of the smallest top k components let components_counts = Counter::init(components_vector.clone()).most_common(); let updated_min_component_size = match top_k_components { Some(tkc) => Some(match components_counts.len() < tkc as usize { true => components_counts.last().unwrap().1, false => components_counts.get(tkc as usize).unwrap().1, }), None => minimum_component_size, }; // Remove components that are smaller than given amount if let Some(mcs) = updated_min_component_size { components_counts .iter() .for_each(|(component, component_size)| { if *component_size < mcs { keep_components.remove(*component); } }); } let pb = get_loading_bar( verbose, &format!("removing components for the graph {}", &self.name), self.get_edges_number() as usize, ); Graph::build_graph( self.get_edges_quadruples(true) .progress_with(pb) .filter_map(|(_, src, dst, edge_type, weight)| { match keep_components.contains(components_vector[src as usize]) { true => Some(Ok((src, dst, edge_type, weight))), false => None, } }), self.get_edges_number(), self.nodes.clone(), self.node_types.clone(), match &self.edge_types { Some(ets) => Some(ets.vocabulary.clone()), None => None, }, self.directed, self.name.clone(), true, self.has_edge_types(), self.has_weights(), ) } }
#![doc = "generated by AutoRust 0.1.0"] #![allow(non_camel_case_types)] #![allow(unused_imports)] use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct DataExport { #[serde(flatten)] pub proxy_resource: ProxyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<DataExportProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct DataExportListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<DataExport>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct DataExportProperties { #[serde(rename = "dataExportId", default, skip_serializing_if = "Option::is_none")] pub data_export_id: Option<String>, #[serde(rename = "tableNames")] pub table_names: Vec<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub destination: Option<Destination>, #[serde(default, skip_serializing_if = "Option::is_none")] pub enable: Option<bool>, #[serde(rename = "createdDate", default, skip_serializing_if = "Option::is_none")] pub created_date: Option<String>, #[serde(rename = "lastModifiedDate", default, skip_serializing_if = "Option::is_none")] pub last_modified_date: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Destination { #[serde(rename = "resourceId")] pub resource_id: String, #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<destination::Type>, #[serde(rename = "metaData", default, skip_serializing_if = "Option::is_none")] pub meta_data: Option<DestinationMetaData>, } pub mod destination { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum Type { StorageAccount, EventHub, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct DestinationMetaData { #[serde(rename = "eventHubName", default, skip_serializing_if = "Option::is_none")] pub event_hub_name: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum DataSourceKind { WindowsEvent, WindowsPerformanceCounter, #[serde(rename = "IISLogs")] IisLogs, LinuxSyslog, LinuxSyslogCollection, LinuxPerformanceObject, LinuxPerformanceCollection, CustomLog, CustomLogCollection, AzureAuditLog, AzureActivityLog, GenericDataSource, ChangeTrackingCustomPath, ChangeTrackingPath, ChangeTrackingServices, ChangeTrackingDataTypeConfiguration, ChangeTrackingDefaultRegistry, ChangeTrackingRegistry, ChangeTrackingLinuxPath, LinuxChangeTrackingPath, ChangeTrackingContentLocation, WindowsTelemetry, Office365, SecurityWindowsBaselineConfiguration, SecurityCenterSecurityWindowsBaselineConfiguration, SecurityEventCollectionConfiguration, SecurityInsightsSecurityEventCollectionConfiguration, ImportComputerGroup, NetworkMonitoring, Itsm, DnsAnalytics, ApplicationInsights, SqlDataClassification, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct DataSource { #[serde(flatten)] pub proxy_resource: ProxyResource, pub properties: Object, #[serde(default, skip_serializing_if = "Option::is_none")] pub etag: Option<String>, pub kind: DataSourceKind, #[serde(default, skip_serializing_if = "Option::is_none")] pub tags: Option<serde_json::Value>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct DataSourceFilter { #[serde(default, skip_serializing_if = "Option::is_none")] pub kind: Option<DataSourceKind>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct DataSourceListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<DataSource>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Object {} #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IntelligencePack { #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub enabled: Option<bool>, #[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")] pub display_name: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct LinkedServiceProperties { #[serde(rename = "resourceId", default, skip_serializing_if = "Option::is_none")] pub resource_id: Option<String>, #[serde(rename = "writeAccessResourceId", default, skip_serializing_if = "Option::is_none")] pub write_access_resource_id: Option<String>, #[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")] pub provisioning_state: Option<linked_service_properties::ProvisioningState>, } pub mod linked_service_properties { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum ProvisioningState { Succeeded, Deleting, ProvisioningAccount, Updating, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct LinkedService { #[serde(flatten)] pub proxy_resource: ProxyResource, pub properties: LinkedServiceProperties, #[serde(default, skip_serializing_if = "Option::is_none")] pub tags: Option<serde_json::Value>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct LinkedServiceListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<LinkedService>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct LinkedStorageAccountsProperties { #[serde(rename = "dataSourceType", default, skip_serializing_if = "Option::is_none")] pub data_source_type: Option<linked_storage_accounts_properties::DataSourceType>, #[serde(rename = "storageAccountIds", default, skip_serializing_if = "Vec::is_empty")] pub storage_account_ids: Vec<String>, } pub mod linked_storage_accounts_properties { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum DataSourceType { CustomLogs, AzureWatson, Query, Alerts, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct LinkedStorageAccountsResource { #[serde(flatten)] pub proxy_resource: ProxyResource, pub properties: LinkedStorageAccountsProperties, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct LinkedStorageAccountsListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<LinkedStorageAccountsResource>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ManagementGroupProperties { #[serde(rename = "serverCount", default, skip_serializing_if = "Option::is_none")] pub server_count: Option<i32>, #[serde(rename = "isGateway", default, skip_serializing_if = "Option::is_none")] pub is_gateway: Option<bool>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub created: Option<String>, #[serde(rename = "dataReceived", default, skip_serializing_if = "Option::is_none")] pub data_received: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub version: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub sku: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ManagementGroup { #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<ManagementGroupProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WorkspaceListManagementGroupsResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<ManagementGroup>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct OperationStatus { #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "startTime", default, skip_serializing_if = "Option::is_none")] pub start_time: Option<String>, #[serde(rename = "endTime", default, skip_serializing_if = "Option::is_none")] pub end_time: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub status: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub error: Option<ErrorResponse>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SharedKeys { #[serde(rename = "primarySharedKey", default, skip_serializing_if = "Option::is_none")] pub primary_shared_key: Option<String>, #[serde(rename = "secondarySharedKey", default, skip_serializing_if = "Option::is_none")] pub secondary_shared_key: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct MetricName { #[serde(default, skip_serializing_if = "Option::is_none")] pub value: Option<String>, #[serde(rename = "localizedValue", default, skip_serializing_if = "Option::is_none")] pub localized_value: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct UsageMetric { #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<MetricName>, #[serde(default, skip_serializing_if = "Option::is_none")] pub unit: Option<String>, #[serde(rename = "currentValue", default, skip_serializing_if = "Option::is_none")] pub current_value: Option<f64>, #[serde(default, skip_serializing_if = "Option::is_none")] pub limit: Option<f64>, #[serde(rename = "nextResetTime", default, skip_serializing_if = "Option::is_none")] pub next_reset_time: Option<String>, #[serde(rename = "quotaPeriod", default, skip_serializing_if = "Option::is_none")] pub quota_period: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WorkspaceListUsagesResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<UsageMetric>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct StorageAccount { pub id: String, pub key: String, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct StorageInsightStatus { pub state: storage_insight_status::State, #[serde(default, skip_serializing_if = "Option::is_none")] pub description: Option<String>, } pub mod storage_insight_status { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum State { #[serde(rename = "OK")] Ok, #[serde(rename = "ERROR")] Error, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct StorageInsightProperties { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub containers: Vec<String>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub tables: Vec<String>, #[serde(rename = "storageAccount")] pub storage_account: StorageAccount, #[serde(default, skip_serializing_if = "Option::is_none")] pub status: Option<StorageInsightStatus>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct StorageInsight { #[serde(flatten)] pub proxy_resource: ProxyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<StorageInsightProperties>, #[serde(rename = "eTag", default, skip_serializing_if = "Option::is_none")] pub e_tag: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub tags: Option<serde_json::Value>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct StorageInsightListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<StorageInsight>, #[serde(rename = "@odata.nextLink", default, skip_serializing_if = "Option::is_none")] pub odata_next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Tag { pub name: String, pub value: String, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SavedSearchProperties { pub category: String, #[serde(rename = "displayName")] pub display_name: String, pub query: String, #[serde(rename = "functionAlias", default, skip_serializing_if = "Option::is_none")] pub function_alias: Option<String>, #[serde(rename = "functionParameters", default, skip_serializing_if = "Option::is_none")] pub function_parameters: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub version: Option<i64>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub tags: Vec<Tag>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SavedSearch { #[serde(flatten)] pub proxy_resource: ProxyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub etag: Option<String>, pub properties: SavedSearchProperties, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SavedSearchesListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<SavedSearch>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct AvailableServiceTier { #[serde(rename = "serviceTier", default, skip_serializing_if = "Option::is_none")] pub service_tier: Option<available_service_tier::ServiceTier>, #[serde(default, skip_serializing_if = "Option::is_none")] pub enabled: Option<bool>, #[serde(rename = "minimumRetention", default, skip_serializing_if = "Option::is_none")] pub minimum_retention: Option<i64>, #[serde(rename = "maximumRetention", default, skip_serializing_if = "Option::is_none")] pub maximum_retention: Option<i64>, #[serde(rename = "defaultRetention", default, skip_serializing_if = "Option::is_none")] pub default_retention: Option<i64>, #[serde(rename = "capacityReservationLevel", default, skip_serializing_if = "Option::is_none")] pub capacity_reservation_level: Option<i64>, #[serde(rename = "lastSkuUpdate", default, skip_serializing_if = "Option::is_none")] pub last_sku_update: Option<String>, } pub mod available_service_tier { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum ServiceTier { Free, Standard, Premium, PerNode, #[serde(rename = "PerGB2018")] PerGb2018, Standalone, CapacityReservation, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct CoreSummary { #[serde(default, skip_serializing_if = "Option::is_none")] pub status: Option<String>, #[serde(rename = "numberOfDocuments")] pub number_of_documents: i64, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SearchSort { #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub order: Option<search_sort::Order>, } pub mod search_sort { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum Order { #[serde(rename = "asc")] Asc, #[serde(rename = "desc")] Desc, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SearchMetadataSchema { #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub version: Option<i32>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SearchMetadata { #[serde(rename = "requestId", default, skip_serializing_if = "Option::is_none")] pub request_id: Option<String>, #[serde(rename = "resultType", default, skip_serializing_if = "Option::is_none")] pub result_type: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub total: Option<i64>, #[serde(default, skip_serializing_if = "Option::is_none")] pub top: Option<i64>, #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(rename = "coreSummaries", default, skip_serializing_if = "Vec::is_empty")] pub core_summaries: Vec<CoreSummary>, #[serde(default, skip_serializing_if = "Option::is_none")] pub status: Option<String>, #[serde(rename = "startTime", default, skip_serializing_if = "Option::is_none")] pub start_time: Option<String>, #[serde(rename = "lastUpdated", default, skip_serializing_if = "Option::is_none")] pub last_updated: Option<String>, #[serde(rename = "eTag", default, skip_serializing_if = "Option::is_none")] pub e_tag: Option<String>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub sort: Vec<SearchSort>, #[serde(rename = "requestTime", default, skip_serializing_if = "Option::is_none")] pub request_time: Option<i64>, #[serde(rename = "aggregatedValueField", default, skip_serializing_if = "Option::is_none")] pub aggregated_value_field: Option<String>, #[serde(rename = "aggregatedGroupingFields", default, skip_serializing_if = "Option::is_none")] pub aggregated_grouping_fields: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub sum: Option<i64>, #[serde(default, skip_serializing_if = "Option::is_none")] pub max: Option<i64>, #[serde(default, skip_serializing_if = "Option::is_none")] pub schema: Option<SearchMetadataSchema>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SearchSchemaValue { #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")] pub display_name: Option<String>, #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, pub indexed: bool, pub stored: bool, pub facet: bool, #[serde(rename = "ownerType", default, skip_serializing_if = "Vec::is_empty")] pub owner_type: Vec<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SearchGetSchemaResponse { #[serde(default, skip_serializing_if = "Option::is_none")] pub metadata: Option<SearchMetadata>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<SearchSchemaValue>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WorkspacePurgeBody { pub table: String, pub filters: Vec<WorkspacePurgeBodyFilters>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WorkspacePurgeBodyFilters { #[serde(default, skip_serializing_if = "Option::is_none")] pub column: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub operator: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub value: Option<serde_json::Value>, #[serde(default, skip_serializing_if = "Option::is_none")] pub key: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WorkspacePurgeResponse { #[serde(rename = "operationId")] pub operation_id: String, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WorkspacePurgeStatusResponse { pub status: workspace_purge_status_response::Status, } pub mod workspace_purge_status_response { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum Status { #[serde(rename = "pending")] Pending, #[serde(rename = "completed")] Completed, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct TableProperties { #[serde(rename = "retentionInDays", default, skip_serializing_if = "Option::is_none")] pub retention_in_days: Option<i32>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Table { #[serde(flatten)] pub proxy_resource: ProxyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<TableProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct TablesListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<Table>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ClusterProperties { #[serde(rename = "clusterId", default, skip_serializing_if = "Option::is_none")] pub cluster_id: Option<String>, #[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")] pub provisioning_state: Option<cluster_properties::ProvisioningState>, #[serde(rename = "isDoubleEncryptionEnabled", default, skip_serializing_if = "Option::is_none")] pub is_double_encryption_enabled: Option<bool>, #[serde(rename = "isAvailabilityZonesEnabled", default, skip_serializing_if = "Option::is_none")] pub is_availability_zones_enabled: Option<bool>, #[serde(rename = "billingType", default, skip_serializing_if = "Option::is_none")] pub billing_type: Option<BillingType>, #[serde(rename = "keyVaultProperties", default, skip_serializing_if = "Option::is_none")] pub key_vault_properties: Option<KeyVaultProperties>, #[serde(rename = "lastModifiedDate", default, skip_serializing_if = "Option::is_none")] pub last_modified_date: Option<String>, #[serde(rename = "createdDate", default, skip_serializing_if = "Option::is_none")] pub created_date: Option<String>, #[serde(rename = "associatedWorkspaces", default, skip_serializing_if = "Vec::is_empty")] pub associated_workspaces: Vec<AssociatedWorkspace>, #[serde(rename = "capacityReservationProperties", default, skip_serializing_if = "Option::is_none")] pub capacity_reservation_properties: Option<CapacityReservationProperties>, } pub mod cluster_properties { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum ProvisioningState { Creating, Succeeded, Failed, Canceled, Deleting, ProvisioningAccount, Updating, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ClusterPatchProperties { #[serde(rename = "keyVaultProperties", default, skip_serializing_if = "Option::is_none")] pub key_vault_properties: Option<KeyVaultProperties>, #[serde(rename = "billingType", default, skip_serializing_if = "Option::is_none")] pub billing_type: Option<BillingType>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ClusterPatch { #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<ClusterPatchProperties>, #[serde(default, skip_serializing_if = "Option::is_none")] pub identity: Option<Identity>, #[serde(default, skip_serializing_if = "Option::is_none")] pub sku: Option<ClusterSku>, #[serde(default, skip_serializing_if = "Option::is_none")] pub tags: Option<serde_json::Value>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Cluster { #[serde(flatten)] pub tracked_resource: TrackedResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub identity: Option<Identity>, #[serde(default, skip_serializing_if = "Option::is_none")] pub sku: Option<ClusterSku>, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<ClusterProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ClusterListResult { #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<Cluster>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct KeyVaultProperties { #[serde(rename = "keyVaultUri", default, skip_serializing_if = "Option::is_none")] pub key_vault_uri: Option<String>, #[serde(rename = "keyName", default, skip_serializing_if = "Option::is_none")] pub key_name: Option<String>, #[serde(rename = "keyVersion", default, skip_serializing_if = "Option::is_none")] pub key_version: Option<String>, #[serde(rename = "keyRsaSize", default, skip_serializing_if = "Option::is_none")] pub key_rsa_size: Option<i32>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum BillingType { Cluster, Workspaces, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ClusterSku { #[serde(default, skip_serializing_if = "Option::is_none")] pub capacity: Option<i64>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<cluster_sku::Name>, } pub mod cluster_sku { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum Name { CapacityReservation, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Identity { #[serde(rename = "principalId", default, skip_serializing_if = "Option::is_none")] pub principal_id: Option<String>, #[serde(rename = "tenantId", default, skip_serializing_if = "Option::is_none")] pub tenant_id: Option<String>, #[serde(rename = "type")] pub type_: identity::Type, #[serde(rename = "userAssignedIdentities", default, skip_serializing_if = "Option::is_none")] pub user_assigned_identities: Option<serde_json::Value>, } pub mod identity { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum Type { SystemAssigned, UserAssigned, None, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct UserIdentityProperties { #[serde(rename = "principalId", default, skip_serializing_if = "Option::is_none")] pub principal_id: Option<String>, #[serde(rename = "clientId", default, skip_serializing_if = "Option::is_none")] pub client_id: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct AssociatedWorkspace { #[serde(rename = "workspaceId", default, skip_serializing_if = "Option::is_none")] pub workspace_id: Option<String>, #[serde(rename = "workspaceName", default, skip_serializing_if = "Option::is_none")] pub workspace_name: Option<String>, #[serde(rename = "resourceId", default, skip_serializing_if = "Option::is_none")] pub resource_id: Option<String>, #[serde(rename = "associateDate", default, skip_serializing_if = "Option::is_none")] pub associate_date: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct CapacityReservationProperties { #[serde(rename = "lastSkuUpdate", default, skip_serializing_if = "Option::is_none")] pub last_sku_update: Option<String>, #[serde(rename = "minCapacity", default, skip_serializing_if = "Option::is_none")] pub min_capacity: Option<i64>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct OperationListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<Operation>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Operation { #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub display: Option<operation::Display>, } pub mod operation { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Display { #[serde(default, skip_serializing_if = "Option::is_none")] pub provider: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub resource: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub operation: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub description: Option<String>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WorkspaceSku { pub name: workspace_sku::Name, #[serde(rename = "capacityReservationLevel", default, skip_serializing_if = "Option::is_none")] pub capacity_reservation_level: Option<i32>, #[serde(rename = "lastSkuUpdate", default, skip_serializing_if = "Option::is_none")] pub last_sku_update: Option<String>, } pub mod workspace_sku { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum Name { Free, Standard, Premium, PerNode, #[serde(rename = "PerGB2018")] PerGb2018, Standalone, CapacityReservation, #[serde(rename = "LACluster")] LaCluster, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WorkspaceCapping { #[serde(rename = "dailyQuotaGb", default, skip_serializing_if = "Option::is_none")] pub daily_quota_gb: Option<f64>, #[serde(rename = "quotaNextResetTime", default, skip_serializing_if = "Option::is_none")] pub quota_next_reset_time: Option<String>, #[serde(rename = "dataIngestionStatus", default, skip_serializing_if = "Option::is_none")] pub data_ingestion_status: Option<workspace_capping::DataIngestionStatus>, } pub mod workspace_capping { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum DataIngestionStatus { RespectQuota, ForceOn, ForceOff, OverQuota, SubscriptionSuspended, ApproachingQuota, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WorkspaceProperties { #[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")] pub provisioning_state: Option<workspace_properties::ProvisioningState>, #[serde(rename = "customerId", default, skip_serializing_if = "Option::is_none")] pub customer_id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub sku: Option<WorkspaceSku>, #[serde(rename = "retentionInDays", default, skip_serializing_if = "Option::is_none")] pub retention_in_days: Option<i32>, #[serde(rename = "workspaceCapping", default, skip_serializing_if = "Option::is_none")] pub workspace_capping: Option<WorkspaceCapping>, #[serde(rename = "createdDate", default, skip_serializing_if = "Option::is_none")] pub created_date: Option<String>, #[serde(rename = "modifiedDate", default, skip_serializing_if = "Option::is_none")] pub modified_date: Option<String>, #[serde(rename = "publicNetworkAccessForIngestion", default, skip_serializing_if = "Option::is_none")] pub public_network_access_for_ingestion: Option<PublicNetworkAccessType>, #[serde(rename = "publicNetworkAccessForQuery", default, skip_serializing_if = "Option::is_none")] pub public_network_access_for_query: Option<PublicNetworkAccessType>, #[serde(rename = "forceCmkForQuery", default, skip_serializing_if = "Option::is_none")] pub force_cmk_for_query: Option<bool>, #[serde(rename = "privateLinkScopedResources", default, skip_serializing_if = "Vec::is_empty")] pub private_link_scoped_resources: Vec<PrivateLinkScopedResource>, #[serde(default, skip_serializing_if = "Option::is_none")] pub features: Option<WorkspaceFeatures>, } pub mod workspace_properties { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum ProvisioningState { Creating, Succeeded, Failed, Canceled, Deleting, ProvisioningAccount, Updating, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WorkspaceFeatures { #[serde(rename = "enableDataExport", default, skip_serializing_if = "Option::is_none")] pub enable_data_export: Option<bool>, #[serde(rename = "immediatePurgeDataOn30Days", default, skip_serializing_if = "Option::is_none")] pub immediate_purge_data_on30_days: Option<bool>, #[serde( rename = "enableLogAccessUsingOnlyResourcePermissions", default, skip_serializing_if = "Option::is_none" )] pub enable_log_access_using_only_resource_permissions: Option<bool>, #[serde(rename = "clusterResourceId", default, skip_serializing_if = "Option::is_none")] pub cluster_resource_id: Option<String>, #[serde(rename = "disableLocalAuth", default, skip_serializing_if = "Option::is_none")] pub disable_local_auth: Option<bool>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct PrivateLinkScopedResource { #[serde(rename = "resourceId", default, skip_serializing_if = "Option::is_none")] pub resource_id: Option<String>, #[serde(rename = "scopeId", default, skip_serializing_if = "Option::is_none")] pub scope_id: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Workspace { #[serde(flatten)] pub tracked_resource: TrackedResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<WorkspaceProperties>, #[serde(rename = "eTag", default, skip_serializing_if = "Option::is_none")] pub e_tag: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WorkspacePatch { #[serde(flatten)] pub azure_entity_resource: AzureEntityResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<WorkspaceProperties>, #[serde(default, skip_serializing_if = "Option::is_none")] pub tags: Option<serde_json::Value>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WorkspaceListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<Workspace>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum PublicNetworkAccessType { Enabled, Disabled, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ErrorResponse { #[serde(default, skip_serializing_if = "Option::is_none")] pub error: Option<ErrorDetail>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ErrorDetail { #[serde(default, skip_serializing_if = "Option::is_none")] pub code: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub message: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub target: Option<String>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub details: Vec<ErrorDetail>, #[serde(rename = "additionalInfo", default, skip_serializing_if = "Vec::is_empty")] pub additional_info: Vec<ErrorAdditionalInfo>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ErrorAdditionalInfo { #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub info: Option<serde_json::Value>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ProxyResource { #[serde(flatten)] pub resource: Resource, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Resource { #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct TrackedResource { #[serde(flatten)] pub resource: Resource, #[serde(default, skip_serializing_if = "Option::is_none")] pub tags: Option<serde_json::Value>, pub location: String, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct AzureEntityResource { #[serde(flatten)] pub resource: Resource, #[serde(default, skip_serializing_if = "Option::is_none")] pub etag: Option<String>, }
use crate::prelude::*; use crate::ExprMarker; use std::marker::PhantomData; pub mod prelude { pub use super::BinBoolExpr; } /// Generic binary formula expression. /// /// Used by concrete binary formula expressions as base template. #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct BinBoolExpr<M> { /// The two child expressions. pub children: P<BinExprChildren>, /// Marker to differentiate bool expressions from each /// other using the type system. marker: PhantomData<M>, } impl<M> BinBoolExpr<M> where M: ExprMarker, { /// Returns a new binary formula expression with the given child expressions. /// /// # Errors /// /// - If `lhs` or `rhs` are not of bool type. pub fn new<E1, E2>(lhs: E1, rhs: E2) -> ExprResult<Self> where E1: Into<AnyExpr>, E2: Into<AnyExpr>, { let lhs = lhs.into(); let rhs = rhs.into(); expect_type(Type::Bool, &lhs) .map_err(ExprError::from) .map_err(|e| { e.context_msg(format!( "Expected boolean type for the left hand-side expression of the {} expression.", M::EXPR_KIND.camel_name() )) })?; expect_type(Type::Bool, &rhs) .map_err(ExprError::from) .map_err(|e| { e.context_msg(format!( "Expected boolean type for the right hand-side expression of the {} expression.", M::EXPR_KIND.camel_name() )) })?; Ok(unsafe{ Self::new_unchecked(lhs, rhs) }) } /// Returns a new binary formula expression with the given child expressions. /// /// # Safety /// /// This does not check the type integrity of the given child expressions /// and thus should be used with care. pub unsafe fn new_unchecked<E1, E2>(lhs: E1, rhs: E2) -> Self where E1: Into<AnyExpr>, E2: Into<AnyExpr> { let lhs = lhs.into(); let rhs = rhs.into(); debug_assert!(expect_type(Type::Bool, &lhs).is_ok()); debug_assert!(expect_type(Type::Bool, &rhs).is_ok()); Self { children: BinExprChildren::new_boxed(lhs, rhs), marker: PhantomData, } } } impl<M> BoolExpr for BinBoolExpr<M> where Self: Into<AnyExpr>, { } impl<M> Children for BinBoolExpr<M> { fn children(&self) -> ChildrenIter { self.children.children() } } impl<M> ChildrenMut for BinBoolExpr<M> { fn children_mut(&mut self) -> ChildrenIterMut { self.children.children_mut() } } impl<M> IntoChildren for BinBoolExpr<M> { fn into_children(self) -> IntoChildrenIter { self.children.into_children() } } impl<M> HasType for BinBoolExpr<M> { fn ty(&self) -> Type { Type::Bool } } impl<M> HasKind for BinBoolExpr<M> where M: ExprMarker, { fn kind(&self) -> ExprKind { M::EXPR_KIND } } impl<M> HasArity for BinBoolExpr<M> { fn arity(&self) -> usize { 2 } } impl<M> BinaryExpr for BinBoolExpr<M> { fn lhs_child(&self) -> &AnyExpr { self.children.lhs_child() } fn rhs_child(&self) -> &AnyExpr { self.children.rhs_child() } }
//! Provides all errors for the cdl-runner crate. use glob; use hyper; use hyper::error::UriError; use native_tls; use std::io; error_chain! { errors { InvalidTypeToConvert(the_type: String) { description("Cannot convert type to a Database Type!") display("Invalid Type: [ {} ] to convert to DB", the_type) } PostgresErr { description("Underlying postgres error!") display("Underlying postgres error!") } MysqlErr { description("Underlying Mysql error!") display("Underlying Mysql error!") } ImportErr { description("Underlying import errror!") display("Underlying import error!") } } foreign_links { Globerror(glob::PatternError); Hypererror(hyper::Error); Ioerror(io::Error); Tlserror(native_tls::Error); Urierror(UriError); } }
use color_eyre::{Help, SectionExt}; use eyre::eyre; use rsonpath_lib::{ engine::error::EngineError, error::UnsupportedFeatureError, query::{ error::{CompilerError, ParseErrorReport, ParserError}, JsonPathQuery, }, }; const FEATURE_REQUEST_URL: &str = "https://github.com/V0ldek/rsonpath/issues/new?template=feature_request.md"; /// Turn a [`ParserError`] into a user-friendly eyre Report. pub fn report_parser_error(query_string: &str, error: ParserError) -> eyre::Report { match error { ParserError::SyntaxError { report } => report_query_syntax_error(query_string, report), ParserError::InternalNomError { .. } => eyre::Report::new(error), ParserError::ArrayIndexError(_) => eyre::Report::new(error), } } /// Turn a [`CompilerError`] into a user-friendly eyre Report. pub fn report_compiler_error(query: &JsonPathQuery, error: CompilerError) -> eyre::Report { match error { CompilerError::NotSupported(unsupported) => report_unsupported_error(unsupported), CompilerError::QueryTooComplex(_) => { let mut report = eyre::Report::new(error); if query .root() .iter() .any(|node| matches!(node, rsonpath_lib::query::JsonPathQueryNode::AnyChild(_))) { report = report.suggestion( "Wildcard selectors are a common source of query complexity.\n \ Consider reformulating the query using descendant selectors to replace sequences of wildcards.", ); } add_unsupported_context(report, UnsupportedFeatureError::large_automaton_queries()) } } } /// Turn a [`EngineError`] into a user-friendly eyre Report. pub fn report_engine_error(error: EngineError) -> eyre::Report { match error { EngineError::DepthBelowZero(_, _) => eyre::Report::new(error), EngineError::DepthAboveLimit(_, _) => { add_unsupported_context(eyre::Report::new(error), UnsupportedFeatureError::large_json_depths()) } EngineError::MissingClosingCharacter() => eyre::Report::new(error), EngineError::MissingOpeningCharacter() => eyre::Report::new(error), EngineError::MissingItem() => eyre::Report::new(error), EngineError::MalformedStringQuotes(_) => eyre::Report::new(error), EngineError::NotSupported(unsupported) => report_unsupported_error(unsupported), EngineError::InternalError(_) => eyre::Report::new(error), EngineError::InputError(_) => eyre::Report::new(error), EngineError::SinkError(_) => eyre::Report::new(error), } } fn report_query_syntax_error(query_string: &str, report: ParseErrorReport) -> eyre::Report { let mut eyre = eyre!("One or more syntax errors occurred."); for error in report.errors() { use color_eyre::owo_colors::OwoColorize; use std::{cmp, iter}; const MAX_DISPLAY_LENGTH: usize = 80; let display_start_idx = if error.start_idx > MAX_DISPLAY_LENGTH { error.start_idx - MAX_DISPLAY_LENGTH } else { 0 }; let display_length = cmp::min(error.len + MAX_DISPLAY_LENGTH, query_string.len() - display_start_idx); let error_slice = &query_string[error.start_idx..error.start_idx + error.len]; let slice = &query_string[display_start_idx..display_start_idx + display_length]; let error_idx = error.start_idx - display_start_idx; let underline: String = iter::repeat(' ') .take(error_idx) .chain(iter::repeat('^').take(error.len)) .collect(); let display_string = format!("{}\n{}", slice, (underline + " invalid tokens").bright_red()); eyre = eyre.section(display_string.header("Parse error:")); if error.start_idx == 0 { eyre = eyre.suggestion(format!( "Queries should start with the root selector '{}'.", "$".dimmed() )); } if error_slice.contains('$') { eyre = eyre.suggestion(format!( "The '{}' character is reserved for the root selector and may appear only at the start.", "$".dimmed() )); } } eyre } fn report_unsupported_error(unsupported: UnsupportedFeatureError) -> eyre::Report { use color_eyre::owo_colors::OwoColorize; let feature = unsupported.feature(); let base_report = if unsupported.is_planned() { let feature = feature.blue(); eyre!("The feature {feature} {}", "is not supported yet.".bright_red()) } else { let feature = feature.red(); eyre!("The feature {feature} {}", "is not supported.".bright_red()) }; add_unsupported_context(base_report, unsupported) } fn add_unsupported_context(report: eyre::Report, unsupported: UnsupportedFeatureError) -> eyre::Report { use color_eyre::owo_colors::OwoColorize; let feature = unsupported.feature(); if let Some(issue) = unsupported.issue() { let feature = feature.blue(); report.note(format!( "The feature {feature} is planned for a future release of rsonpath.\n \ You can join the ongoing discussion at {}.", format!("https://github.com/V0ldek/rsonpath/issues/{issue}").bright_blue() )) } else { let feature = feature.red(); report.note(format!( "The feature {feature} is not supported and is {} planned.\n \ If you would like to see it introduced to rsonpath, please raise a feature request at\n \ {} and describe your use case.", "not".italic(), FEATURE_REQUEST_URL.bright_blue() )) } }
use std::collections::{HashMap, HashSet}; use std::hash::Hash; use std::str::FromStr; trait InsertingIntoExistingValue<K, V> { fn insert_into_existing_value(&mut self, key: K, value: V); } type AdjacencyList<S> = HashMap<S, Vec<S>>; impl<S> InsertingIntoExistingValue<S, S> for AdjacencyList<S> where S: Hash + Eq, { fn insert_into_existing_value(&mut self, key: S, value: S) { if let Some(mut existing) = self.remove(&key) { existing.push(value); self.insert(key, existing); } else { self.insert(key, vec![value]); } } } #[derive(Hash, Eq, PartialEq, Debug, Clone)] pub enum OrbitalObject { You, Santa, Object(String), } impl From<&str> for OrbitalObject { fn from(string: &str) -> Self { match string { "YOU" => OrbitalObject::You, "SAN" => OrbitalObject::Santa, obj => OrbitalObject::Object(obj.to_string()), } } } #[derive(Debug)] pub struct OrbitMap { all_objects: HashSet<OrbitalObject>, reverse_lookup: HashMap<OrbitalObject, OrbitalObject>, } impl OrbitMap { pub fn number_of_orbits(&self) -> usize { self.construct_orbital_path_map() .iter() .map(|(_, v)| v.len()) .sum() } pub fn number_of_orbital_transfers_from_you_to_santa(&self) -> usize { let path_map = self.construct_orbital_path_map(); assert!(path_map.contains_key(&OrbitalObject::You)); assert!(path_map.contains_key(&OrbitalObject::Santa)); let you_orbits = path_map.get(&OrbitalObject::You).unwrap(); let santa_orbits = path_map.get(&OrbitalObject::Santa).unwrap(); let mut last_matching_you_index = you_orbits.len() - 1; let mut last_matching_santa_index = santa_orbits.len() - 1; for (reverse_index, (you, santa)) in you_orbits .iter() .rev() .zip(santa_orbits.iter().rev()) .enumerate() { if you != santa { break; } last_matching_you_index = you_orbits.len() - reverse_index - 1; last_matching_santa_index = santa_orbits.len() - reverse_index - 1; } let path: Vec<&OrbitalObject> = you_orbits[..last_matching_you_index] .iter() .chain(santa_orbits[..=last_matching_santa_index].iter().rev()) .collect(); path.len() - 1 } fn construct_orbital_path_map(&self) -> AdjacencyList<OrbitalObject> { let mut path_map: AdjacencyList<OrbitalObject> = Default::default(); for obj in &self.all_objects { let mut current = obj; path_map.insert(obj.clone(), Default::default()); while self.reverse_lookup.contains_key(current) { current = self.reverse_lookup.get(current).unwrap(); path_map.insert_into_existing_value(obj.clone(), current.clone()); } } path_map } } impl FromStr for OrbitMap { type Err = (); fn from_str(input: &str) -> Result<Self, Self::Err> { let mut all_objects = HashSet::new(); let mut reverse_lookup = HashMap::new(); for line in input.split('\n') { let adjacency: Vec<&str> = line.split(')').take(2).map(str::trim).collect(); if adjacency.len() != 2 { continue; } let (key, value) = ( OrbitalObject::from(adjacency[1]), OrbitalObject::from(adjacency[0]), ); all_objects.insert(key.clone()); reverse_lookup.insert(key, value); } Ok(OrbitMap { all_objects, reverse_lookup, }) } } #[cfg(test)] mod tests { use super::*; use crate::util; #[test] fn smoke_simple_program_1() { let input = "COM)B B)C C)D D)E E)F B)G G)H D)I E)J J)K K)L"; let orbit_map = OrbitMap::from_str(input).unwrap(); assert_eq!(orbit_map.number_of_orbits(), 42); } #[test] fn smoke_simple_program_2() { let input = "COM)B B)C C)D D)E E)F B)G G)H D)I E)J J)K K)L K)YOU I)SAN"; let orbit_map = OrbitMap::from_str(input).unwrap(); assert_eq!(orbit_map.number_of_orbital_transfers_from_you_to_santa(), 4); } #[test] fn test_advent_puzzle() { let orbit_map: OrbitMap = util::load_input_file("day06.txt").unwrap(); assert_eq!( orbit_map.number_of_orbital_transfers_from_you_to_santa(), 496 ); } }
use num_derive::FromPrimitive; use num_enum::IntoPrimitive; use num_traits::FromPrimitive; use bytes::{ Bytes, Buf, BytesMut, BufMut }; #[derive(FromPrimitive, IntoPrimitive, Debug, PartialEq, Copy, Clone)] #[repr(u8)] pub enum Type { Any = 0x01, Controller = 0x02, Slave = 0x03, Existing = 0x04, Stop = 0x05, StopFailed = 0x06, } #[derive(Debug, PartialEq, Clone)] pub struct AddNodeToNetworkRequestHost { pub type_ : Type, pub high_power : bool, pub network_wide : bool, } impl AddNodeToNetworkRequestHost { pub fn encode(&self, dst: &mut BytesMut) { let mut byte: u8 = self.type_.into(); if self.high_power { byte |= 0x80; } if self.network_wide { byte |= 0x40; } dst.put_u8(byte); } pub fn decode(src: &mut Bytes) -> AddNodeToNetworkRequestHost { src.advance(1); // skip let byte = src.get_u8(); let type_ : Type = FromPrimitive::from_u8(byte & 0xf).unwrap(); let high_power = byte & 0x80 != 0; let network_wide = byte & 0x40 != 0; AddNodeToNetworkRequestHost { type_, high_power, network_wide } } }
/// Assert one value is greater than another value. /// /// * When true, return `()`. /// /// * Otherwise, call [`panic!`] with a message and the values of the /// expressions with their debug representations. /// /// # Examples /// /// ```rust /// # #[macro_use] extern crate assertable; fn main() { /// assert_gt!(2, 1); /// //-> () /// # } /// ``` /// /// ```rust /// # #[macro_use] extern crate assertable; fn main() { /// // assert_gt!(1, 2); /// //-> panic!("assertion failed: `assert_gt(left, right)`\n left: `2`\n right: `1`") /// # } /// ``` /// /// This macro has a second form where a custom message can be provided. #[macro_export] macro_rules! assert_gt { ($left:expr, $right:expr $(,)?) => ({ match (&$left, &$right) { (left_val, right_val) => { if (left_val > right_val) { () } else { panic!("assertion failed: `assert_gt(left, right)`\n left: `{:?}`\n right: `{:?}`", $left, $right); } } } }); ($left:expr, $right:expr, $($arg:tt)+) => ({ match (&($left), &($right)) { (left_val, right_val) => { if (left_val > right_val) { () } else { panic!("{:?}", $($arg)+) } } } }); } #[cfg(test)] mod tests { #[test] fn test_assert_gt_x_arity_2_success() { let a = 2; let b = 1; let x = assert_gt!(a, b); assert_eq!( x, () ); } #[test] #[should_panic (expected = "assertion failed: `assert_gt(left, right)`\n left: `1`\n right: `2`")] fn test_assert_gt_x_arity_2_failure() { let a = 1; let b = 2; let _ = assert_gt!(a, b); } #[test] fn test_assert_gt_x_arity_3_success() { let a = 2; let b = 1; let x = assert_gt!(a, b, "message"); assert_eq!( x, () ); } #[test] #[should_panic (expected = "message")] fn test_assert_gt_x_arity_3_failure() { let a = 1; let b = 2; let _ = assert_gt!(a, b, "message"); } }
use alloc::string::String; use core::fmt; /// Resolver errors #[derive(Clone, Debug)] pub enum ResolverError { /// Resolver failed to open asset OpenAssetError(String), /// Resolver failed to get the last modified time of the asset AssetMtimeError, /// Resolver cannot write layer to path CannotWriteLayerToPath(String, String), /// Resolver cannot create a new layer with the given identifier CannotCreateNewLayerWithIdentifier(String, String), } impl fmt::Display for ResolverError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { ResolverError::OpenAssetError(s) => write!(f, "failed to open asset: {}", s), ResolverError::AssetMtimeError => { write!(f, "failed to get asset's modified time") } ResolverError::CannotWriteLayerToPath(path, reason) => { write!(f, "cannot write layer to path `{}`: {}", path, reason) } ResolverError::CannotCreateNewLayerWithIdentifier(identifier, reason) => { write!( f, "cannot create new layer with identifier `{}`: {}", identifier, reason ) } } } }
use std::fmt; use std::ops::Deref; pub type Float = f64; pub struct Matrix { pub data: Vec<Vec<Float>>, } pub struct Vector { pub data: Vec<Float>, } impl Deref for Matrix { type Target = Vec<Vec<Float>>; fn deref(&self) -> &Self::Target { &self.data } } impl Deref for Vector { type Target = Vec<Float>; fn deref(&self) -> &Self::Target { &self.data } } impl fmt::Display for Matrix { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { for row in &self.data { for el in row { try!(write!(f, "{:9.3} ", el)); } try!(write!(f, "\n")); } Ok(()) } } impl fmt::Display for Vector { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { for el in &self.data { try!(write!(f, "{:9.3} ", el)); } try!(write!(f, "\n")); Ok(()) } } pub fn gaussian_elimination(a: &Matrix, b: &Vector) -> Vector { type AugRow = (Vec<Float>, Float); type AugRowRef<'a> = &'a mut AugRow; fn leading_zeros(row: &AugRow) -> usize { row.0.iter() .cloned() .take_while(|x| *x == 0.0) .count() } fn sort_aug(aug: &mut Vec<AugRow>, col: usize) { use std::cmp::{PartialOrd,Ordering}; aug.sort_by(|l,r| r.0[col].abs().partial_cmp(&l.0[col].abs()) .unwrap_or(Ordering::Equal)); } // let n = a.data.len(); let m = a.data[0].len(); let mut x = vec![0.0; m]; // Augmented matrix let mut aug: Vec<AugRow> = a.data.iter() .cloned() .zip(b.data.iter().cloned()) .collect(); // Row-echelon form for j in 0..m { sort_aug(&mut aug, j); let mut rows = aug.iter_mut() .filter(|x| leading_zeros(x) == j); if let Some(pivot) = rows.next() { for row in rows { let scale = pivot.0[j] / row.0[j]; for (pp,rr) in pivot.0.iter() .cloned() .zip(&mut row.0) { (*rr) *= scale; (*rr) -= pp; } row.1 *= scale; row.1 -= pivot.1; } } else { panic!("Unconstrained variable"); } } // Sort rows aug.sort_by(|lhs,rhs| { let l_zeros = leading_zeros(lhs); let r_zeros = leading_zeros(rhs); l_zeros.cmp(&r_zeros) }); // Forward substitution for j in (0..m).rev() { let mut sum = aug[j].1; for k in (j+1)..m { sum -= aug[j].0[k] * x[k]; } x[j] = sum / aug[j].0[j]; } Vector { data: x } } fn euclidean_dist(a: &[Float], b: &[Float]) -> Float { a.iter() .zip(b) .map(|(xa,xb)| (xa-xb)*(xa-xb)) .fold(0.0, |s,x| s + x) .sqrt() } fn inner_product(a: &[Float], b: &[Float]) -> Float { a.iter() .zip(b) .map(|(xa,xb)| xa*xb) .fold(0.0, |s,x| s + x) } // Gauss-Seidel solver for Ax=b pub fn gauss_seidel(a: &Matrix, b: &Vector, eps: Float, max: usize) -> Vector { let a = &a.data; let b = &b.data; let n = b.len(); let mut x = vec![0.0; n]; let mut x_prev = vec![0.0; n]; for _ in 0..max { x_prev.clone_from(&x); for j in 0..n { x[j] += (b[j] - inner_product(&a[j], &x)) / a[j][j]; } if euclidean_dist(&x_prev, &x) <= eps { break; } } Vector { data: x } } // Matrix-Vector multiply fn mat_mul_vec(a: &Matrix, x: &Vector) -> Vector { let a = &a.data; let x = &x.data; let n = a.len(); let mut b = vec![0.0; n]; for (el,row) in b.iter_mut().zip(a) { *el = row.iter() .zip(x) .map(|e| e.0 * e.1) .fold(0.0, |s,x| s + x) } Vector { data: b } } impl Matrix { pub fn new(n: usize, m: usize) -> Matrix { Matrix { data: vec![vec![0.0; m]; n] } } } impl Vector { pub fn new(n: usize) -> Vector { Vector { data: vec![0.0; n] } } }
mod mathematics; pub use crate::mathematics::arithmetic;
use std::collections::HashMap; use regex::Regex; /// Count occurrences of words. pub fn word_count(words: &str) -> HashMap<String, u32> { Regex::new(r"[a-zA-Z0-9]+('[a-z])?").expect("invalid regex") .find_iter(words) .map(|w| w.as_str().to_lowercase()) .fold(HashMap::new(), |mut m, w| { *m.entry(w).or_insert(0) += 1; m }) }
extern crate std; use super::super::prelude::{ HWND , ToRustBoolConvertion , WindowShowStyleCommand , wapi , UINT , WPARAM , LPARAM , LRESULT }; pub type Window = HWND; pub trait WindowFunction { /** [C]isNone **/ fn isNone(&self) -> bool; /** [A]ShowWindow **/ fn show(&self , style : WindowShowStyleCommand) -> bool; /** [A]UpdateWindow **/ fn update(&self) -> bool; /** [A]DefWindowProc **/ fn pass(&self , message : UINT , wParam : WPARAM , lParam : LPARAM ) -> LRESULT; } impl WindowFunction for Window { fn isNone(&self) -> bool { *self == std::ptr::mut_null() } fn show(&self , style : WindowShowStyleCommand) -> bool { unsafe { wapi::Window::ShowWindow(*self , style).bool() } } fn update(&self) -> bool { unsafe { wapi::Window::UpdateWindow(*self).bool() } } fn pass(&self , message : UINT , wParam : WPARAM , lParam : LPARAM ) -> LRESULT { unsafe { wapi::WindowProcedure::DefWindowProcW(*self , message , wParam , lParam) } } }
// Implementing ToString for Statement enum so that making full latex text easily. use crate::error; use crate::parser::ast::*; use crate::parser::Parser; pub fn make_latex_format<const IS_TEST: bool>(parser: &mut Parser) -> error::Result<String> { let latex = parser.parse_latex()?; let mut output = String::new(); if !IS_TEST { output += &format!( "%\n% This file was generated by vesti {}\n%\n", env!("CARGO_PKG_VERSION") ); } for stmt in latex { output += &stmt.to_string(); } Ok(output) } impl ToString for Statement { fn to_string(&self) -> String { match self { Statement::NonStopMode => String::from("\\nonstopmode\n"), Statement::DocumentClass { name, options } => docclass_to_string(name, options), Statement::Usepackage { name, options } => usepackage_to_string(name, options), Statement::MultiUsepackages { pkgs } => multiusepacakge_to_string(pkgs), Statement::DocumentStart => String::from("\\begin{document}\n"), Statement::DocumentEnd => String::from("\n\\end{document}\n"), Statement::MainText(s) => s.clone(), Statement::BracedStmt(latex) => format!("{{{}}}", latex_to_string(latex)), Statement::Fraction { numerator, denominator, } => fraction_to_string(numerator, denominator), Statement::PlainTextInMath { trim, text } => plaintext_in_math_to_string(trim, text), Statement::Integer(i) => i.to_string(), Statement::Float(f) => f.to_string(), Statement::RawLatex(s) => s.clone(), Statement::MathText { state, text } => math_text_to_string(*state, text), Statement::LatexFunction { name, args } => latex_function_to_string(name, args), Statement::Environment { name, args, text } => environment_to_string(name, args, text), Statement::BeginPhantomEnvironment { name, args } => { begin_phantom_environment_to_string(name, args) } Statement::EndPhantomEnvironment { name } => format!("\\end{{{name}}}"), Statement::FunctionDefine { style, name, args, trim, body, } => function_def_to_string(style, name, args, trim, body), Statement::EnvironmentDefine { is_redefine, name, args_num, optional_arg, trim, begin_part, end_part, } => environment_def_to_string( *is_redefine, name, *args_num, optional_arg.as_ref(), trim, begin_part, end_part, ), } } } fn docclass_to_string(name: &str, options: &Option<Vec<Latex>>) -> String { if let Some(opts) = options { let mut options_str = String::new(); for o in opts { options_str = options_str + &latex_to_string(o) + ","; } options_str.pop(); format!("\\documentclass[{options_str}]{{{name}}}\n") } else { format!("\\documentclass{{{name}}}\n") } } fn usepackage_to_string(name: &str, options: &Option<Vec<Latex>>) -> String { if let Some(opts) = options { let mut options_str = String::new(); for o in opts { options_str = options_str + &latex_to_string(o) + ","; } options_str.pop(); format!("\\usepackage[{options_str}]{{{name}}}\n") } else { format!("\\usepackage{{{name}}}\n") } } fn multiusepacakge_to_string(pkgs: &[Statement]) -> String { let mut output = String::new(); for pkg in pkgs { if let Statement::Usepackage { name, options } = pkg { output += &usepackage_to_string(name, options); } } output } fn math_text_to_string(state: MathState, text: &[Statement]) -> String { let mut output = String::new(); match state { MathState::Text => { output += "$"; for t in text { output += &t.to_string(); } output += "$"; } MathState::Inline => { output += "\\["; for t in text { output += &t.to_string(); } output += "\\]"; } } output } fn fraction_to_string(numerator: &Latex, denominator: &Latex) -> String { format!( "\\frac{{{}}}{{{}}}", latex_to_string(numerator), latex_to_string(denominator) ) } fn plaintext_in_math_to_string(trim: &TrimWhitespace, text: &Latex) -> String { let mut output = latex_to_string(text); if output.as_bytes()[output.len() - 1] == b' ' { output.pop(); } match (trim.start, trim.end) { (true, true) => format!("\\text{{ {} }}", output), (true, false) => format!("\\text{{ {}}}", output), (false, true) => format!("\\text{{{} }}", output), (false, false) => format!("\\text{{{}}}", output), } } fn latex_function_to_string(name: &str, args: &Vec<(ArgNeed, Vec<Statement>)>) -> String { let mut output = format!("{}", name); for arg in args { let mut tmp = String::new(); for t in &arg.1 { tmp += &t.to_string(); } match arg.0 { ArgNeed::MainArg => output += &format!("{{{tmp}}}"), ArgNeed::Optional => output += &format!("[{tmp}]"), ArgNeed::StarArg => output.push('*'), } } output } fn begin_phantom_environment_to_string( name: &str, args: &Vec<(ArgNeed, Vec<Statement>)>, ) -> String { let mut output = format!("\\begin{{{name}}}"); for arg in args { let mut tmp = String::new(); for t in &arg.1 { tmp += &t.to_string(); } match arg.0 { ArgNeed::MainArg => output += &format!("{{{tmp}}}"), ArgNeed::Optional => output += &format!("[{tmp}]"), ArgNeed::StarArg => output.push('*'), } } output } fn environment_to_string( name: &str, args: &Vec<(ArgNeed, Vec<Statement>)>, text: &Latex, ) -> String { let mut output = format!("\\begin{{{name}}}"); for arg in args { let mut tmp = String::new(); for t in &arg.1 { tmp += &t.to_string(); } match arg.0 { ArgNeed::MainArg => output += &format!("{{{tmp}}}"), ArgNeed::Optional => output += &format!("[{tmp}]"), ArgNeed::StarArg => output.push('*'), } } for t in text { output += &t.to_string(); } output += &format!("\\end{{{name}}}\n"); output } fn latex_to_string(latex: &Latex) -> String { let mut output = String::new(); for l in latex { output += &l.to_string(); } output } fn function_def_to_string( style: &FunctionStyle, name: &str, args: &str, trim: &TrimWhitespace, body: &Latex, ) -> String { let mut output = match style { FunctionStyle::Plain => format!("\\def\\{name}{args}{{"), FunctionStyle::LongPlain => format!("\\long\\def\\{name}{args}{{"), FunctionStyle::OuterPlain => format!("\\outer\\def\\{name}{args}{{"), FunctionStyle::LongOuterPlain => format!("\\long\\outer\\def\\{name}{args}{{"), FunctionStyle::Expand => format!("\\edef\\{name}{args}{{"), FunctionStyle::LongExpand => format!("\\long\\edef\\{name}{args}{{"), FunctionStyle::OuterExpand => format!("\\outer\\edef\\{name}{args}{{"), FunctionStyle::LongOuterExpand => format!("\\long\\outer\\edef\\{name}{args}{{"), FunctionStyle::Global => format!("\\gdef\\{name}{args}{{"), FunctionStyle::LongGlobal => format!("\\long\\gdef\\{name}{args}{{"), FunctionStyle::OuterGlobal => format!("\\outer\\gdef\\{name}{args}{{"), FunctionStyle::LongOuterGlobal => format!("\\long\\outer\\gdef\\{name}{args}{{"), FunctionStyle::ExpandGlobal => format!("\\xdef\\{name}{args}{{"), FunctionStyle::LongExpandGlobal => format!("\\long\\xdef\\{name}{args}{{"), FunctionStyle::OuterExpandGlobal => format!("\\outer\\xdef\\{name}{args}{{"), FunctionStyle::LongOuterExpandGlobal => format!("\\long\\outer\\xdef\\{name}{args}{{"), }; let mut tmp = String::new(); for b in body { tmp += &b.to_string(); } output += match (trim.start, trim.end) { (false, false) => tmp.as_str(), (true, false) => tmp.trim_start(), (false, true) => tmp.trim_end(), (true, true) => tmp.trim(), }; output.push_str("}\n"); output } fn environment_def_to_string( is_redefine: bool, name: &str, args_num: u8, optional_arg: Option<&Latex>, trim: &TrimWhitespace, begin_part: &Latex, end_part: &Latex, ) -> String { let mut output = if is_redefine { format!("\\renewenvironment{{{name}}}") } else { format!("\\newenvironment{{{name}}}") }; if args_num > 0 { output += &format!("[{args_num}]"); if let Some(inner) = optional_arg { output.push('['); for stmt in inner { output += &stmt.to_string(); } output.push_str("]{"); } else { output.push('{'); } } else { output.push('{'); } let mut tmp = String::new(); for b in begin_part { tmp += &b.to_string(); } output += match (trim.start, trim.mid) { (false, Some(false)) => tmp.as_str(), (true, Some(false)) => tmp.trim_start(), (false, Some(true)) => tmp.trim_end(), (true, Some(true)) => tmp.trim(), _ => unreachable!("VESTI BUG!!!!: codegen::environment_def_to_string"), }; output.push_str("}{"); tmp.clear(); for b in end_part { tmp += &b.to_string(); } output += match (trim.mid, trim.end) { (Some(false), false) => tmp.as_str(), (Some(true), false) => tmp.trim_start(), (Some(false), true) => tmp.trim_end(), (Some(true), true) => tmp.trim(), _ => unreachable!("VESTI BUG!!!!: codegen::environment_def_to_string"), }; output.push_str("}\n"); output }
// This file contains all of the endpoints for the market table use crate::api::components::market::model::Market; use crate::api::components::market::controller; use rocket_contrib::json::Json; use rocket_contrib::uuid::Uuid; use crate::database::DbConn; //Returns a list of all markets #[get("/markets")] pub fn get_markets(connection : DbConn) -> Json<Vec<Market>> { Json(controller::get_all_markets(connection)) } /* #[get("/markets/<radius>")] fn get_prods_in_radius(radius : i32) -> String { return format!("Get poducers within {} miles of you", radius); } )*/ #[get("/markets/<id>")] pub fn get_market_by_id(id : Uuid, connection : DbConn) -> Json<Market> { Json(controller::get_market_by_id(id, connection)) } #[post("/markets", format = "application/json", data = "<market>")] pub fn post_market(market: Json<Market>, connection : DbConn) -> String { let prod = market.into_inner(); controller::construct_market(prod, connection); return "hello".to_string(); } #[put("/markets", format = "application/json", data = "<body>")] pub fn update_market(body : Json<Market>, connection : DbConn) -> Json<Market> { controller::update_market(body, connection) } #[delete("/markets/<id>")] pub fn delete_market(id : Uuid, connection : DbConn) -> Json<String> { controller::delete_market(id, connection) }
extern crate serde; extern crate serde_json; extern crate router; extern crate bodyparser; extern crate iron; use exonum::blockchain::{self, Service, Transaction, ApiContext}; use exonum::node::{TransactionSend, ApiSender, NodeChannel}; use exonum::messages::{RawTransaction, FromRaw, Message}; use exonum::storage::{Fork, MapIndex}; use exonum::crypto::{PublicKey, Hash}; use exonum::encoding::{self, Field}; use exonum::api::{Api, ApiError}; use exonum::helpers::fabric::{ServiceFactory, Context}; use iron::prelude::*; use iron::Handler; use router::Router; // Service identifier const SERVICE_ID: u16 = 900; // Identifier for wallet creating transaction const TX_CREATE_WALLET_ID: u16 = 1; // Identifier for coins transferring transaction const TX_TRANSFER_ID: u16 = 2; // Starting balance of a newly created wallet const INIT_BALANCE: u64 = 100; // Declare Persistent Data encoding_struct! { struct Wallet { const SIZE = 48; field pub_key: &PublicKey [00 => 32] field name: &str [32 => 40] field balance: u64 [40 => 48] } } impl Wallet { pub fn increase(&mut self, amount: u64) { let balance = self.balance() + amount; Field::write(&balance, &mut self.raw, 40, 48); } pub fn decrease(&mut self, amount: u64) { let balance = self.balance() - amount; Field::write(&balance, &mut self.raw, 40, 48); } } // Create Schema pub struct CurrencySchema<'a> { view: &'a mut Fork, } impl<'a> CurrencySchema<'a> { pub fn wallets(&mut self) -> MapIndex<&mut Fork, PublicKey, Wallet> { let prefix = blockchain::gen_prefix(SERVICE_ID, 0, &()); MapIndex::new(prefix, self.view) } // Utility method to quickly get a separate wallet from the storage pub fn wallet(&mut self, pub_key: &PublicKey) -> Option<Wallet> { self.wallets().get(pub_key) } } // Define Transactions // Creating New Wallet message! { struct TxCreateWallet { const TYPE = SERVICE_ID; const ID = TX_CREATE_WALLET_ID; const SIZE = 40; field pub_key: &PublicKey [00 => 32] field name: &str [32 => 40] } } // Transferring Coins message! { struct TxTransfer { const TYPE = SERVICE_ID; const ID = TX_TRANSFER_ID; const SIZE = 80; field from: &PublicKey [00 => 32] field to: &PublicKey [32 => 64] field amount: u64 [64 => 72] field seed: u64 [72 => 80] } } // Transaction Execution impl Transaction for TxCreateWallet { fn verify(&self) -> bool { self.verify_signature(self.pub_key()) } fn execute(&self, view: &mut Fork) { let mut schema = CurrencySchema { view }; if schema.wallet(self.pub_key()).is_none() { let wallet = Wallet::new(self.pub_key(), self.name(), INIT_BALANCE); println!("Create the wallet: {:?}", wallet); schema.wallets().put(self.pub_key(), wallet) } } } impl Transaction for TxTransfer { fn verify(&self) -> bool { (*self.from() != *self.to()) && self.verify_signature(self.from()) } fn execute(&self, view: &mut Fork) { let mut schema = CurrencySchema { view }; let sender = schema.wallet(self.from()); let receiver = schema.wallet(self.to()); if let (Some(mut sender), Some(mut receiver)) = (sender, receiver) { let amount = self.amount(); if sender.balance() >= amount { sender.decrease(amount); receiver.increase(amount); println!("Transfer between wallets: {:?} => {:?}", sender, receiver); let mut wallets = schema.wallets(); wallets.put(self.from(), sender); wallets.put(self.to(), receiver); } } } } // Implement API #[derive(Clone)] struct CryptocurrencyApi { channel: ApiSender<NodeChannel>, } #[serde(untagged)] #[derive(Clone, Serialize, Deserialize)] enum TransactionRequest { CreateWallet(TxCreateWallet), Transfer(TxTransfer), } impl Into<Box<Transaction>> for TransactionRequest { fn into(self) -> Box<Transaction> { match self { TransactionRequest::CreateWallet(trans) => Box::new(trans), TransactionRequest::Transfer(trans) => Box::new(trans), } } } #[derive(Serialize, Deserialize)] struct TransactionResponse { tx_hash: Hash, } impl Api for CryptocurrencyApi { fn wire(&self, router: &mut Router) { let self_ = self.clone(); let tx_handler = move |req: &mut Request| -> IronResult<Response> { match req.get::<bodyparser::Struct<TransactionRequest>>() { Ok(Some(tx)) => { let tx: Box<Transaction> = tx.into(); let tx_hash = tx.hash(); self_.channel.send(tx).map_err(|e| ApiError::Events(e))?; let json = TransactionResponse { tx_hash }; self_.ok_response(&serde_json::to_value(&json).unwrap()) } Ok(None) => Err(ApiError::IncorrectRequest("Empty request body".into()))?, Err(e) => Err(ApiError::IncorrectRequest(Box::new(e)))?, } }; // Bind the transaction handler to a specific route. let route_post = "/v1/wallets/transaction"; router.post(&route_post, tx_handler, "transaction"); } } // Define Service pub struct CurrencyService; impl CurrencyService { pub fn new() -> CurrencyService { CurrencyService {} } } impl Service for CurrencyService { fn service_name(&self) -> &'static str { "cryptocurrency" } fn service_id(&self) -> u16 { SERVICE_ID } fn tx_from_raw(&self, raw: RawTransaction) -> Result<Box<Transaction>, encoding::Error> { println!("Currency service has incoming tx"); let trans: Box<Transaction> = match raw.message_type() { TX_TRANSFER_ID => Box::new(TxTransfer::from_raw(raw)?), TX_CREATE_WALLET_ID => Box::new(TxCreateWallet::from_raw(raw)?), _ => { return Err(encoding::Error::IncorrectMessageType { message_type: raw.message_type(), }); } }; Ok(trans) } fn public_api_handler(&self, ctx: &ApiContext) -> Option<Box<Handler>> { let mut router = Router::new(); let api = CryptocurrencyApi { channel: ctx.node_channel().clone() }; api.wire(&mut router); Some(Box::new(router)) } } impl ServiceFactory for CurrencyService { fn make_service(_: &Context) -> Box<Service> { Box::new(CurrencyService::new()) } }
#[doc = "Register `FLTCR` reader"] pub type R = crate::R<FLTCR_SPEC>; #[doc = "Register `FLTCR` writer"] pub type W = crate::W<FLTCR_SPEC>; #[doc = "Field `TAMPFREQ` reader - TAMPFREQ"] pub type TAMPFREQ_R = crate::FieldReader; #[doc = "Field `TAMPFREQ` writer - TAMPFREQ"] pub type TAMPFREQ_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 3, O>; #[doc = "Field `TAMPFLT` reader - TAMPFLT"] pub type TAMPFLT_R = crate::FieldReader; #[doc = "Field `TAMPFLT` writer - TAMPFLT"] pub type TAMPFLT_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>; #[doc = "Field `TAMPPRCH` reader - TAMPPRCH"] pub type TAMPPRCH_R = crate::FieldReader; #[doc = "Field `TAMPPRCH` writer - TAMPPRCH"] pub type TAMPPRCH_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>; #[doc = "Field `TAMPPUDIS` reader - TAMPPUDIS"] pub type TAMPPUDIS_R = crate::BitReader; #[doc = "Field `TAMPPUDIS` writer - TAMPPUDIS"] pub type TAMPPUDIS_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; impl R { #[doc = "Bits 0:2 - TAMPFREQ"] #[inline(always)] pub fn tampfreq(&self) -> TAMPFREQ_R { TAMPFREQ_R::new((self.bits & 7) as u8) } #[doc = "Bits 3:4 - TAMPFLT"] #[inline(always)] pub fn tampflt(&self) -> TAMPFLT_R { TAMPFLT_R::new(((self.bits >> 3) & 3) as u8) } #[doc = "Bits 5:6 - TAMPPRCH"] #[inline(always)] pub fn tampprch(&self) -> TAMPPRCH_R { TAMPPRCH_R::new(((self.bits >> 5) & 3) as u8) } #[doc = "Bit 7 - TAMPPUDIS"] #[inline(always)] pub fn tamppudis(&self) -> TAMPPUDIS_R { TAMPPUDIS_R::new(((self.bits >> 7) & 1) != 0) } } impl W { #[doc = "Bits 0:2 - TAMPFREQ"] #[inline(always)] #[must_use] pub fn tampfreq(&mut self) -> TAMPFREQ_W<FLTCR_SPEC, 0> { TAMPFREQ_W::new(self) } #[doc = "Bits 3:4 - TAMPFLT"] #[inline(always)] #[must_use] pub fn tampflt(&mut self) -> TAMPFLT_W<FLTCR_SPEC, 3> { TAMPFLT_W::new(self) } #[doc = "Bits 5:6 - TAMPPRCH"] #[inline(always)] #[must_use] pub fn tampprch(&mut self) -> TAMPPRCH_W<FLTCR_SPEC, 5> { TAMPPRCH_W::new(self) } #[doc = "Bit 7 - TAMPPUDIS"] #[inline(always)] #[must_use] pub fn tamppudis(&mut self) -> TAMPPUDIS_W<FLTCR_SPEC, 7> { TAMPPUDIS_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "TAMP filter control register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`fltcr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`fltcr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct FLTCR_SPEC; impl crate::RegisterSpec for FLTCR_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`fltcr::R`](R) reader structure"] impl crate::Readable for FLTCR_SPEC {} #[doc = "`write(|w| ..)` method takes [`fltcr::W`](W) writer structure"] impl crate::Writable for FLTCR_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets FLTCR to value 0"] impl crate::Resettable for FLTCR_SPEC { const RESET_VALUE: Self::Ux = 0; }
fn print_coordinates(&(x, y): &(i32, i32)) { println!("{}, {}", x, y); } struct Point { x: i32, y: i32, } fn main() { let favorite_color: Option<&str> = None; let is_tuesday = false; let age: Result<u8, _> = "34".parse(); if let Some(color) = favorite_color { println!("favorite color is {}", color); } else if is_tuesday { println!("is tuesday"); } else if let Ok(age) = age { println!("{}", age); } else { println!("no matches"); } let mut stack = Vec::new(); (1..4) .for_each(|i| { stack.push(i); }); while let Some(element) = stack.pop() { println!("{}", element); } let point = (3, 5); print_coordinates(&point); // if let x = 5 { // println!("{}", x); // } let x = 10; match x { 1 => println!("one"), 2 => println!("two"), n if n % 2 == 0 => println!("even"), _ => println!("anything"), } let s = Some(5); match s { Some(5) => println!("five"), Some(v) => println!("{}", v), None => println!("none"), } let z = 1; match z { 1 | 2 => println!("one or two"), _ => println!("anything"), } let r = 5; match r { 1 ... 10 => println!("one through ten"), _ => println!("something else"), } let c = 'c'; match c { 'a' ... 'j' => println!("early ascii letter"), 'k' ... 'z' => println!("late ascii letter"), _ => println!("something else"), } let p = Point { x: 0, y: 7, }; let Point { x: lng, y: lat } = p; assert_eq!(0, lng); assert_eq!(7, lat); let other_p = Point { x: 0, y: 7, }; let Point { x, y } = other_p; assert_eq!(0, x); assert_eq!(7, y); match other_p { Point { x: _, y: 0 } => println!("y is 0"), Point { x: 0, y: _ } => println!("x is 0"), Point { x, y } => println!("x, y -> {}, {}", x, y), } let points = vec![ Point { x: 0, y: 0 }, Point { x: 1, y: 5 }, Point { x: 10, y: -3 }, ]; let sum_of_squares: i32 = points .iter() .map(|&Point { x, y }| x * x + y * y) .sum(); let ((feet, inches), Point { x, y }) = ((3, 10), Point { x: 3, y: -10 }); fn foo(_: i32, y: i32) { println!("{}", y); } foo(3, 4); let numbers = (2, 4, 6, 8, 10); match numbers { (first, _, third, _, _) => { println!("{}, {}", first, third); } } if let Some(_) = Some(String::from("Hello")) { println!("Some"); } let origin = Point { x: 0, y: 2 }; match origin { Point { x, .. } => println!("{}", x), } let second = (1, 2, 3, 4, 5); match second { (first, ..) => { println!("{}", first); } } let robot_name = Some(String::from("Bors")); match robot_name { Some(ref name) => println!("{}", name), _ => (), } println!("{:?}", robot_name); let mut another_name = Some(String::from("Bors")); match another_name { Some(ref mut name) => *name = String::from("Another name"), _ => (), } println!("{:?}", another_name); let num = Some(4); match num { Some(x) if x < 5 => println!("less than 5"), Some(x) => println!("{}", x), _ => (), } enum Message { Hello { id: i32, }, } let msg = Message::Hello { id: 5, }; match msg { Message::Hello { id: id_variable @ 3...7, } => { println!("in range {}", id_variable); }, Message::Hello { id: id_variable @ 10...12, } => { println!("in another range {}", id_variable); }, Message::Hello { id } => println!("{}", id), } }
use typesense::Document; use serde::{Serialize, Deserialize}; #[derive(Document, Serialize, Deserialize)] #[typesense(default_sorting_field = company_name)] struct Company { company_name: String, num_employees: i32, #[typesense(facet)] country: String, } fn main() {}
//! Defines the VMF file parser use synom::IResult; use synom::space::*; use ast::*; pub fn name<'a, K>(i: &'a str) -> IResult<&str, K> where K: From<&'a str> { let i = skip_whitespace(i); let input_length = i.len(); if input_length == 0 { return IResult::Error } for (idx, item) in i.chars().enumerate() { match item { 'a'...'z' | 'A'...'Z' | '0'...'9' | '_' | '-' | '$' => {}, _ => { if idx == 0 { return IResult::Error } else { return IResult::Done(&i[idx..], K::from(&i[..idx])) } }, } } IResult::Done(&i[input_length..], K::from(i)) } named!( pub string -> &str, delimited!( punct!("\""), take_until!("\""), punct!("\"") ) ); pub fn property<'a, K>(i: &'a str) -> IResult<&str, Property<K>> where K: From<&'a str> { do_parse!(i, key: string >> value: string >> (Property { key: key.into(), value: value.into() }) ) } pub fn block<'a, K>(i: &'a str) -> IResult<&str, Block<K>> where K: From<&'a str> { do_parse!(i, name: name >> punct!("{") >> body: map!( many0!(alt!( map!(property, |v| (Some(v), None)) | map!(block, |v| (None, Some(v))) )), |items: Vec<_>| { items.into_iter() .fold( (Vec::new(), Vec::new()), |(mut props, mut blocks): (Vec<_>, Vec<_>), (prop, block)| { if let Some(prop) = prop { props.push(prop); } if let Some(block) = block { blocks.push(block); } (props, blocks) } ) } ) >> punct!("}") >> (Block { name, props: body.0, blocks: body.1 }) ) } pub fn file<'a, K>(i: &'a str) -> IResult<&str, Vec<Block<K>>> where K: From<&'a str> { many0!(i, block) }
fn main() { let s1 = String::from("halo"); let s2 = String::from(" bob"); let s3 = s1 + &s2; // s1 moved // println!("{:?}", s1); // 不可用了 println!("{:?}", s2); println!("{:?}", s3); // &String 可以被 强转(coerced)成 &str // Rust 使用了一个被称为 Deref 强制转换(deref coercion)的技术,你可以将其理解为它把 &s2 变成了 &s2[..] }
// The arguments of a macro a prefixed by a dollar sign `$` and type // annotated with a `designator`: macro_rules! create_function { // This macro takes an argument of designator `ident` and creates // a function named `$func_name`. // The `ident` designator is used for variable/function names. ($func_name:ident) => { fn $func_name() { println!("You called {:?}()", stringify!($func_name)); } }; } create_function!(foo); create_function!(bar); macro_rules! print_result { ($expression:expr) => { // `stringify!` will convert the expression *as it is` into a string println!("{:?} = {:?}", stringify!($expression), $expression ); } } // These are some of the available designators: // // - `block` // - `expr` is used for expressions // - `ident` is used for variable/function names // - `item` // - `literal` is used for literal constants // - `pat` (pattern) // - `path` // - `stmt` (statement) // - `tt` (token tree) // - `ty` (type) // - `vis` (visibility qualifier) // Macros can be overloaded to accept different combinations of arguments. // In that regard, `macro_rules!` can work similarly to a match block macro_rules! test { ($left:expr; and $right:expr) => { println!("{:?} and {:?} is {:?}", stringify!($left), stringify!($right), $left && $right ) }; ($left:expr; or $right:expr) => { println!("{:?} or {:?} is {:?}", stringify!($left), stringify!($right), $left || $right ) }; } // Macros can use `+` in the argument list to indicate that an argument may // repeat at least once, or `*`, to indicate that the argument may repeat // zero or more times // // In the following example, surrounding the matcher with `$(...),+` will // match one or more expression, separated by commas. Also note that the // semicolon is optional on the last case. macro_rules! find_min { ($x:expr) => ($x); ($x:expr, $($y:expr),+) => ( std::cmp::min($x, find_min!($($y),+)) ) } fn main() { foo(); bar(); print_result!(1u32 + 1); print_result!({ let x = 1u32; x * x + 2 * x - 1 }); // test!(1i32 + 1 == 2i32; and 2i32 * 2 == 4i32); test!(true; or false); // println!("{}", find_min!(1u32)); println!("{}", find_min!(1u32 + 2, 2u32)); println!("{}", find_min!(5u32, 2u32 * 3, 4u32)); }
use tonic::{transport::Server, Request, Response, Status}; use hello_api::hello_api_server::{HelloApi, HelloApiServer}; use hello_api::{HelloRequest, HelloResponse}; pub mod hello_api { tonic::include_proto!("hello"); } #[derive(Debug, Default)] pub struct HApi {} #[tonic::async_trait] impl HelloApi for HApi { async fn hello( &self, request: Request<HelloRequest>, ) -> Result<Response<HelloResponse>, Status> { println!("Got a request: {:?}", request); Ok(Response::new(hello_api::HelloResponse { message: format!("Hello {}!", request.into_inner().name).into(), })) } } #[tokio::main] async fn main() -> Result<(), Box<dyn std::error::Error>> { let addr = "[::1]:50051".parse()?; let greeter = HApi::default(); Server::builder() .add_service(HelloApiServer::new(greeter)) .serve(addr) .await?; Ok(()) }
// // Part of Roadkill Project. // // Copyright 2010, 2017, Stanislav Karchebnyy <berkus@madfire.net> // // Distributed under the Boost Software License, Version 1.0. // (See file LICENSE_1_0.txt or a copy at http://www.boost.org/LICENSE_1_0.txt) // use cgmath::Vector3; use crate::support::{ actor::{Actor, ActorNode}, material::Material, mesh::Mesh, path_subst, texture::PixelMap, Error, }; use log::*; use std::{ collections::{HashMap, HashSet}, fs::File, io::{BufRead, BufReader}, iter::Iterator, path::{Path, PathBuf}, }; // Car assembles the gameplay object (a car in this case) from various model and texture files. pub struct Car { pub name: String, pub actors: Actor, pub meshes: HashMap<String, Mesh>, pub materials: HashMap<String, Material>, pub textures: HashMap<String, PixelMap>, pub base_translation: Vector3<f32>, } /// Expect next line to match provided text exactly. fn expect_match<Iter: Iterator<Item = String>>(input: &mut Iter, text: &str) { if let Some(line) = input.next() { if line == text { return; } panic!("Expected {:?} but got {:?}", text, line); } panic!("Expected {:?} but got empty line", text); } /// Parse a three-component vector from a comma-separated string. fn parse_vector(line: &String) -> Vector3<f32> { let line: Vec<f32> = line.split(',').map(|i| i.trim().parse().unwrap()).collect(); Vector3::from((line[0], line[1], line[2])) } /// Read systems in a single damage spec clause. fn read_systems<Iter: Iterator<Item = String>>(input: &mut Iter) { // read condition flag for this clause /*let condition =*/ input.next().unwrap(); // read systems count, read this many systems let systems_count = input.next().unwrap().parse().unwrap(); for _ in 0..systems_count { input.next(); } } /// Read all damage spec clauses. fn read_clauses<Iter: Iterator<Item = String>>(input: &mut Iter) { // read clause count, read this many systems let clause_count = input.next().unwrap().parse().unwrap(); for _ in 0..clause_count { read_systems(input); } } /// Read a vector of strings. fn read_vector<Iter: Iterator<Item = String>>(input: &mut Iter) -> Vec<String> { // read vector size, read this many strings let size = input.next().unwrap().parse().unwrap(); let mut vec = Vec::<String>::with_capacity(size); for _ in 0..size { vec.push(input.next().unwrap()); } vec } fn read_funk<Iter: Iterator<Item = String>>(input: &mut Iter) { expect_match(input, "START OF FUNK"); // for now just ignore everything here, read until END OF FUNK loop { // @todo read funk loop with NEXT FUNK as trigger // read_funk(); // NEXT FUNK let line = input.next().unwrap(); if line == "END OF FUNK" { return; } } } struct Groove {} // Read a single groove // fn read_groove<Iter: Iterator<Item=String>>(input: &mut Iter) -> Groove { // } fn read_grooves<Iter: Iterator<Item = String>>(input: &mut Iter) { expect_match(input, "START OF GROOVE"); // for now just ignore everything here, read until END OF GROOVE loop { // @todo read groove loop with NEXT GROOVE as trigger // read_groove(); // NEXT GROOVE let line = input.next().unwrap(); if line == "END OF GROOVE" { return; } } } /// A bunch of some matrices and mappings or vertex-pairs, ignore for now. fn read_some_metadata<Iter: Iterator<Item = String>>(input: &mut Iter) { input.next(); // 0.700000 input.next(); // 0.050000,0.300000 input.next(); // 0.050000 input.next(); // 0.050000 input.next(); // 0.000000 input.next(); // 0.000000 let size = input.next().unwrap().parse().unwrap(); for _ in 0..size { input.next(); // 11 input.next(); // -0.107444, -0.080211, 0.106640 input.next(); // -0.057444, 0.054463, 0.206640 input.next(); // 0.038245, 0.352418, 0.220975 input.next(); // 0.111755, 0.051602, 0.079025 let pair_count = input.next().unwrap().parse().unwrap(); for _ in 0..pair_count { input.next(); input.next(); } } } // @fixme used to patch actors now // @todo should support extra wheels pub struct Mechanics { pub lrwheel_pos: Vector3<f32>, pub rrwheel_pos: Vector3<f32>, pub lfwheel_pos: Vector3<f32>, pub rfwheel_pos: Vector3<f32>, } fn read_mechanics_block_v1_1<Iter: Iterator<Item = String>>( input: &mut Iter, ) -> Result<Mechanics, Error> { let lrwheel_pos = parse_vector(&input.next().unwrap()); trace!("Left rear wheel position: {:?}", lrwheel_pos); let rrwheel_pos = parse_vector(&input.next().unwrap()); trace!("Right rear wheel position: {:?}", rrwheel_pos); let lfwheel_pos = parse_vector(&input.next().unwrap()); trace!("Left front wheel position: {:?}", lfwheel_pos); let rfwheel_pos = parse_vector(&input.next().unwrap()); trace!("Right front wheel position: {:?}", rfwheel_pos); let centre_of_mass_pos = parse_vector(&input.next().unwrap()); trace!("Centre of mass position: {:?}", centre_of_mass_pos); Ok(Mechanics { lrwheel_pos, rrwheel_pos, lfwheel_pos, rfwheel_pos, }) } fn read_mechanics_block_v1_1_v3<Iter: Iterator<Item = String>>(input: &mut Iter) { let min_bb = parse_vector(&input.next().unwrap()); let max_bb = parse_vector(&input.next().unwrap()); trace!("Bounding box: ({:?} - {:?})", min_bb, max_bb); } // Version 2 contains count for bounding boxes (which is always 1, that's why it's removed in ver 3) fn read_mechanics_block_v1_1_v2<Iter: Iterator<Item = String>>(input: &mut Iter) { expect_match(input, "1"); read_mechanics_block_v1_1_v3(input); } fn read_mechanics_block_v1_2<Iter: Iterator<Item = String>>(input: &mut Iter) { // 0.5 // min turning circle radius input.next(); // 0.025, 0.025 // suspension give (forward, back) input.next(); // 0.090 // ride height (must be more than miny in bounding box ) input.next(); // 0.5 // damping factor input.next(); // 1.5 // mass in tonnes input.next(); // 1 // fractional reduction in friction when slipping input.next(); // 79, 80 // friction angle ( front and rear ) input.next(); // 0.4, 0.2, 0.816 // width, height, length(0.816, 1.216) for angular momentum calculation input.next(); } fn read_mechanics_block_v1_3<Iter: Iterator<Item = String>>(input: &mut Iter) { // 0.05, 0.05 // rolling resistance front and back input.next(); // 6 // number of gears input.next(); // 200 // speed at red line in highest gear input.next(); // 4 // acceleration in highest gear m/s^2 (i.e. engine strength) input.next(); } fn read_mechanics_block_v2<Iter: Iterator<Item = String>>(input: &mut Iter) { // 2.0 // traction fractional multiplier v. 2 input.next(); // 50 // speed at which down force = weight v. 2 input.next(); // 1.0 // brake multiplier, 1 = nomral brakes v. 2 input.next(); // 1.0 // increase in brakes per second 1 = normal v. 2 input.next(); } fn read_mechanics_block_v3<Iter: Iterator<Item = String>>(input: &mut Iter) { // 3 // 0,-0.18,-0.52 // extra point 1 v. 3 // -0.07,0.07,0.18 // extra point 2 v. 3 // 0.07,0.07,0.18 // extra point 3 v. 3 read_vector(input); } fn read_mechanics_v2<Iter: Iterator<Item = String>>(input: &mut Iter) -> Result<Mechanics, Error> { let mech = read_mechanics_block_v1_1(input)?; read_mechanics_block_v1_1_v2(input); read_mechanics_block_v1_2(input); read_mechanics_block_v2(input); read_mechanics_block_v1_3(input); Ok(mech) } fn read_mechanics_v3<Iter: Iterator<Item = String>>(input: &mut Iter) -> Result<Mechanics, Error> { let mech = read_mechanics_block_v1_1(input)?; read_mechanics_block_v1_1_v3(input); read_mechanics_block_v3(input); read_mechanics_block_v1_2(input); read_mechanics_block_v2(input); read_mechanics_block_v1_3(input); Ok(mech) } fn read_mechanics_v4<Iter: Iterator<Item = String>>(input: &mut Iter) -> Result<Mechanics, Error> { read_mechanics_v3(input) } fn read_meshes( fname: &String, load_models: &Vec<String>, car_meshes: &mut HashMap<String, Mesh>, ) -> Result<(), Error> { let mut load_models = load_models.clone(); load_models.sort(); load_models.dedup(); debug!("Models to load: {:?}", load_models); // Now iterate all meshes and load them. for mesh in load_models { let mut mesh_file_name = PathBuf::from(&fname); mesh_file_name.set_file_name(mesh); let mesh_file_name = path_subst( &mesh_file_name, &Path::new("MODELS"), Some(String::from("DAT")), ); info!("### Opening mesh file {:?}", mesh_file_name); let meshes = Mesh::load_from( mesh_file_name .clone() .into_os_string() .into_string() .unwrap(), )?; for mesh in meshes { car_meshes.insert(mesh.name.clone(), mesh); } } Ok(()) } fn read_materials( fname: &String, load_materials: &HashSet<String>, car_materials: &mut HashMap<String, Material>, ) -> Result<(), Error> { for material in load_materials { let mut mat_file_name = PathBuf::from(&fname); mat_file_name.set_file_name(material); let mat_file_name = path_subst(&mat_file_name, &Path::new("MATERIAL"), None); info!("### Opening material {:?}", mat_file_name); let materials = Material::load_from( mat_file_name .clone() .into_os_string() .into_string() .unwrap(), )?; for mat in materials { car_materials.insert(mat.name.clone(), mat); } } Ok(()) } impl Car { pub fn dump(&self) { self.actors.dump(); self.actors.dump_actor_points(); for tex in &self.textures { println!("Texture {}: {}", tex.0, tex.1); } self.debug_meshes(); for mat in &self.materials { println!("Material {}:", mat.0); } } pub fn debug_meshes(&self) { for mesh in &self.meshes { debug!("Mesh {}:", mesh.0); for mat in &mesh.1.material_names { debug!("... Material {}", mat); } } } pub fn load_from(fname: String) -> Result<Car, Error> { // Load description file. let description_file_name = path_subst( &Path::new(fname.as_str()), &Path::new("CARS"), Some(String::from("ENC")), ); info!("### Opening car {:?}", description_file_name); let description_file = File::open(description_file_name)?; let description_file = BufReader::new(description_file); let mut input_lines = description_file.lines() .map(|line| line.unwrap()) .filter(|line| !line.starts_with("//")) // Skip whole-line comments .filter(|line| !line.is_empty()) // Skip empty lines // Separate in-line comments from data .map(|line| line.split("//").next().unwrap().trim().to_owned()); let car_name = input_lines.next().unwrap(); debug!("Car name {}", car_name); expect_match(&mut input_lines, "START OF DRIVABLE STUFF"); let driver_head_3d_offset = parse_vector(&input_lines.next().unwrap()); trace!( "Offset of driver's head in 3D space {:?}", driver_head_3d_offset ); let head_turn_angles = input_lines.next().unwrap(); trace!( "Angles to turn to make head go left and right {}", head_turn_angles ); let mirror_3d_offset_and_fov = input_lines.next().unwrap(); trace!( "Offset of 'mirror camera' in 3D space, viewing angle of mirror {}", mirror_3d_offset_and_fov ); let pratcam_borders = input_lines.next().unwrap(); trace!( "Pratcam border names (left, top, right, bottom) {}", pratcam_borders ); expect_match(&mut input_lines, "END OF DRIVABLE STUFF"); let engine_noise = input_lines.next().unwrap(); trace!( "Engine noise (normal, enclosed space, underwater) {}", engine_noise ); let stealworthy = input_lines.next().unwrap(); trace!("Cannot be stolen (without cheat): {}", stealworthy); read_clauses(&mut input_lines); read_clauses(&mut input_lines); read_clauses(&mut input_lines); read_clauses(&mut input_lines); read_clauses(&mut input_lines); read_clauses(&mut input_lines); let grid_image = input_lines.next().unwrap(); trace!("Grid image (opponent, frank, annie): {}", grid_image); let mut load_pixmaps = read_vector(&mut input_lines); load_pixmaps.append(&mut read_vector(&mut input_lines)); load_pixmaps.append(&mut read_vector(&mut input_lines)); let load_shadetable = read_vector(&mut input_lines); debug!("Shadetable to load: {:?}", load_shadetable); let mut load_materials = read_vector(&mut input_lines); load_materials.append(&mut read_vector(&mut input_lines)); load_materials.append(&mut read_vector(&mut input_lines)); let mut load_models = read_vector(&mut input_lines); let load_actors = read_vector(&mut input_lines); let load_actors: HashMap<isize, String> = load_actors .iter() .map(|act| act.split(",")) .map(|mut split| { ( split.next().unwrap().parse().unwrap(), String::from(split.next().unwrap()), ) }).collect(); debug!("Actors to load: {:?}", load_actors); let reflective_material = input_lines.next().unwrap(); trace!( "Name of reflective screen material (or none if non-reflective): {}", reflective_material ); // Number of steerable wheels // GroovyFunkRef of 1st steerable wheel -- this is index in the GROOVE array below // GroovyFunkRef of 2nd steerable wheel let steerable_wheels = read_vector(&mut input_lines); trace!("Steerable wheels GroovyFunkRefs: {:?}", steerable_wheels); let lfsus_gfref = input_lines.next().unwrap(); trace!("Left-front suspension parts GroovyFunkRef: {}", lfsus_gfref); let rfsus_gfref = input_lines.next().unwrap(); trace!( "Right-front suspension parts GroovyFunkRef: {}", rfsus_gfref ); let lrsus_gfref = input_lines.next().unwrap(); trace!("Left-rear suspension parts GroovyFunkRef: {}", lrsus_gfref); let rrsus_gfref = input_lines.next().unwrap(); trace!("Right-rear suspension parts GroovyFunkRef: {}", rrsus_gfref); let driven_wheels_gfref = input_lines.next().unwrap(); trace!( "Driven wheels GroovyFunkRefs (for spinning) - MUST BE 4 ITEMS: {}", driven_wheels_gfref ); let nondriven_wheels_gfref = input_lines.next().unwrap(); trace!( "Non-driven wheels GroovyFunkRefs (for spinning) - MUST BE 4 ITEMS: {}", nondriven_wheels_gfref ); let driven_wheels_diameter = input_lines.next().unwrap(); trace!("Driven wheels diameter: {}", driven_wheels_diameter); let nondriven_wheels_diameter = input_lines.next().unwrap(); trace!("Non-driven wheels diameter: {}", nondriven_wheels_diameter); read_funk(&mut input_lines); read_grooves(&mut input_lines); read_some_metadata(&mut input_lines); read_some_metadata(&mut input_lines); read_some_metadata(&mut input_lines); let mechanics = input_lines.next().unwrap(); if !mechanics.starts_with("START OF MECHANICS STUFF") { panic!( "Expected START OF MECHANICS STUFF, got {:?} instead", mechanics ); } let version = mechanics .split(" version ") .skip(1) .next() .unwrap() .parse() .unwrap(); let _mech = match version { 2 => read_mechanics_v2(&mut input_lines), 3 => read_mechanics_v3(&mut input_lines), 4 => read_mechanics_v4(&mut input_lines), x => panic!("Unsupported mechanics version {}", x), }?; expect_match(&mut input_lines, "END OF MECHANICS STUFF"); let some_materials = read_vector(&mut input_lines); debug!("Some other materials to use: {:?}", some_materials); // @todo More post-mechanics stuff // // Meshes // let mut car_meshes = HashMap::<String, Mesh>::new(); debug!("Meshes to load: {:?}", load_models); // Read meshes referenced from text description read_meshes(&fname, &load_models, &mut car_meshes)?; // Load actor file. let mut actor_file_name = PathBuf::from(&fname); let idx: isize = 0; actor_file_name.set_file_name(&load_actors[&idx]); // Read mipmap 0 actor let actor_file_name = path_subst( &actor_file_name, &Path::new("ACTORS"), Some(String::from("ACT")), ); info!("### Opening actor {:?}", actor_file_name); let car_actors = Actor::load_from(actor_file_name.into_os_string().into_string().unwrap())?; // Read meshes referenced from actor file load_models.clear(); for actor in car_actors.traverse() { match actor.data() { &ActorNode::MeshfileRef(ref name) => { if !car_meshes.contains_key(name) { load_models.push(name.clone()) } } _ => (), } } debug!("Extra meshes to load: {:?}", load_models); read_meshes(&fname, &load_models, &mut car_meshes)?; // // Materials // let mut load_materials: HashSet<String> = load_materials.iter().map(|s| s.clone()).collect(); debug!("Materials to load: {:?}", load_materials); let mut car_materials = HashMap::<String, Material>::new(); read_materials(&fname, &load_materials, &mut car_materials)?; load_materials.clear(); for mat in some_materials { if !car_materials.contains_key(&mat) { load_materials.insert(mat.clone()); } } debug!("Extra materials to load: {:?}", load_materials); read_materials(&fname, &load_materials, &mut car_materials)?; // Load palette from PIX file. let mut pal_file_name = PathBuf::from(&fname); pal_file_name.set_file_name("DRRENDER.PAL"); let pal_file_name = path_subst(&pal_file_name, &Path::new("REG/PALETTES"), None); info!("### Opening palette {:?}", pal_file_name); let palette = &PixelMap::load_from(pal_file_name.into_os_string().into_string().unwrap())?[0]; for x in 0..palette.units { trace!( "Palette alpha {}", palette.data[(x * palette.unit_bytes + 0) as usize] ); } let load_pixmaps: HashSet<_> = load_pixmaps.iter().collect(); debug!("Pixmaps to load: {:?}", load_pixmaps); let mut car_textures = HashMap::<String, PixelMap>::new(); for pixmap in load_pixmaps { let mut pix_file_name = PathBuf::from(&fname); pix_file_name.set_file_name(pixmap); let pix_file_name = path_subst(&pix_file_name, &Path::new("PIXELMAP"), None); info!("### Opening pixelmap {:?}", pix_file_name); let pix = PixelMap::load_from( pix_file_name .clone() .into_os_string() .into_string() .unwrap(), )?; for pmap in pix { let pmap = pmap.remap_via(&palette)?; car_textures.insert(pmap.name.clone(), pmap); } } Ok(Car { name: car_name, actors: car_actors, meshes: car_meshes, materials: car_materials, textures: car_textures, base_translation: Vector3::from([0f32, 0f32, 0f32]), }) } }
fn main() { proconio::input! { n: usize, a: i32, b: i32, s: String, } let cs:Vec<char> = s.chars().collect(); let mut count_a = 0; let mut count_b = 0; for i in 0..cs.len() { let c = cs[i]; if c == 'c' { println!("No"); continue; } if c == 'a' { if count_a + count_b < a + b { println!("Yes"); count_a += 1; continue; }else{ println!("No"); } }else{ if count_a + count_b < a + b && count_b < b { println!("Yes"); count_b += 1; continue; }else{ println!("No"); } } } }
use std::io::Read; use std::collections::HashMap; use std::convert::From; use std::str::FromStr; use rustc_serialize::base64::{ToBase64, MIME}; use hyper::Client; use hyper::header::{Accept, ContentType, ContentLength, UserAgent, qitem}; use hyper::mime::Mime; use rustc_serialize::json; use rustc_serialize::{Encoder, Encodable, Decoder, Decodable, EncoderHelpers}; header! { (Authorization, "Authorization") => [String] } pub struct AuthBody { pub scopes: Vec<String>, // A list of scopes that this authorization is in. pub note: String, // A note to remind you what the OAuth token is for. pub note_url: String, // A URL to remind you what app the OAuth token is for. pub client_id: String, // The 20 character OAuth app client key for which to create the token. pub client_secret: String // The 40 character OAuth app client secret for which to create the token. } pub struct AuthBody2 { /// The 40 character OAuth app client secret associated with the client ID specified in the URL. pub client_secret: String, /// A list of scopes that this authorization is in. pub scopes: Vec<String>, /// A note to remind you what the OAuth token is for. pub note: String, /// A URL to remind you what app the OAuth token is for. pub note_url: String } #[derive(Debug)] pub struct TokenResponse { pub id: String, pub url: String, pub app: HashMap<String, String>, pub token: String, pub hashed_token: String, pub token_last_eight: String, pub note: String, pub note_url: String, pub created_at: String, pub updated_at: String, pub scopes: Vec<String>, pub fingerprint: Option<String> } impl Encodable for AuthBody { fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> { s.emit_struct("AuthBody", 5, |s| { try!(s.emit_struct_field("scopes", 0, |s| { s.emit_from_vec(&self.scopes, |s,e| { s.emit_str(e) }) })); try!(s.emit_struct_field("note", 1, |s| { s.emit_str(&self.note) })); try!(s.emit_struct_field("note_url", 2, |s| { s.emit_str(&self.note_url) })); try!(s.emit_struct_field("client_id", 3, |s| { s.emit_str(&self.client_id) })); try!(s.emit_struct_field("client_secret", 4, |s| { s.emit_str(&self.client_secret) })); Ok(()) }) } } impl Encodable for AuthBody2 { fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> { s.emit_struct("AuthBody2", 4, |s| { try!(s.emit_struct_field("client_secret", 0, |s| { s.emit_str(&self.client_secret) })); try!(s.emit_struct_field("scopes", 1, |s| { s.emit_from_vec(&self.scopes, |s,e| { s.emit_str(e) }) })); try!(s.emit_struct_field("note", 2, |s| { s.emit_str(&self.note) })); try!(s.emit_struct_field("note_url", 3, |s| { s.emit_str(&self.note_url) })); Ok(()) }) } } impl Decodable for TokenResponse { fn decode<D: Decoder>(d: &mut D) -> Result<Self, D::Error> { d.read_struct("root", 0, |d| { let id: usize = try!(d.read_struct_field("id", 0, |d| d.read_usize())); Ok(TokenResponse { id: id.to_string(), url: try!(d.read_struct_field("url", 1, |d| Decodable::decode(d))), app: try!(d.read_struct_field("app", 2, |d| Decodable::decode(d))), token: try!(d.read_struct_field("token", 3, |d| Decodable::decode(d))), hashed_token: try!(d.read_struct_field("hashed_token", 4, |d| Decodable::decode(d))), token_last_eight: try!(d.read_struct_field("token_last_eight", 5, |d| Decodable::decode(d))), note: try!(d.read_struct_field("note", 6, |d| Decodable::decode(d))), note_url: try!(d.read_struct_field("note_url", 7, |d| Decodable::decode(d))), created_at: try!(d.read_struct_field("created_at", 8, |d| Decodable::decode(d))), updated_at: try!(d.read_struct_field("updated_at", 9, |d| Decodable::decode(d))), scopes: try!(d.read_struct_field("scopes", 10, |d| Decodable::decode(d))), fingerprint: try!(d.read_struct_field("fingerprint", 11, |d| Decodable::decode(d))) }) }) } } pub fn create_authorization(username: String, password: String, body: AuthBody) -> Result<String, String> { let auth_str = String::from("Basic ") + &(username + ":" + &password).as_bytes().to_base64(MIME); let content_type: Mime = try!(Mime::from_str("Application/Json").map_err(|_| "Mime type not found")); let msg = try!(json::encode(&body).map_err(|err| err.to_string())); let client = Client::new(); let request = client.post("https://api.github.com/authorizations").body(&msg) .header(Authorization(auth_str)) .header(ContentType(content_type.clone())) .header(ContentLength(msg.as_bytes().len() as u64)) .header(Accept(vec![qitem(content_type)])) .header(UserAgent("rusthub".to_string())); let mut response = try!(request.send().map_err(|err| err.to_string())); let mut json_str = String::new(); match response.read_to_string(&mut json_str) { Ok(_) => Ok(json_str), Err(err) => Err(err.to_string()) } } /// Sends a request to either get a valid token with the supplied client_id, or create a new one /// Returns a result where Ok == valid response, and Err == Error description pub fn get_authorization(username: String, password: String, body: AuthBody2, client_id: String) -> Result<TokenResponse, String> { let auth_str = String::from("Basic ") + &(username + ":" + &password).as_bytes().to_base64(MIME); let content_type: Mime = try!(Mime::from_str("Application/Json").map_err(|_| "Mime type not found")); let msg = try!(json::encode(&body).map_err(|err| err.to_string())); let client = Client::new(); let url = "https://api.github.com/authorizations/clients/".to_string() + &client_id; let request = client.put(&url).body(&msg) .header(Authorization(auth_str)) .header(ContentType(content_type.clone())) .header(ContentLength(msg.as_bytes().len() as u64)) .header(Accept(vec![qitem(content_type)])) .header(UserAgent("rusthub".to_string())); let mut response = try!(request.send().map_err(|err| err.to_string())); let mut json_str = String::new(); match response.read_to_string(&mut json_str) { Ok(_) => { let result = json::decode(&json_str); match result { Ok(token_response) => Ok(token_response), Err(err) => Err(err.to_string()) } }, Err(err) => Err(err.to_string()) } }
use serde::{Deserialize, Serialize}; use common::event::EventPublisher; use common::result::Result; use crate::domain::role::{Role, RoleId}; use crate::domain::user::{ Email, Identity, Password, Provider, User, UserRepository, UserService, Username, }; #[derive(Deserialize)] pub struct RegisterCommand { pub username: String, pub email: String, pub password: String, } #[derive(Serialize)] pub struct RegisterResponse { pub id: String, pub validation_code: String, // TODO: remove, only for testing } pub struct Register<'a> { event_pub: &'a dyn EventPublisher, user_repo: &'a dyn UserRepository, user_serv: &'a UserService, } impl<'a> Register<'a> { pub fn new( event_pub: &'a dyn EventPublisher, user_repo: &'a dyn UserRepository, user_serv: &'a UserService, ) -> Self { Register { event_pub, user_repo, user_serv, } } pub async fn exec(&self, cmd: RegisterCommand) -> Result<RegisterResponse> { self.user_serv.available(&cmd.username, &cmd.email).await?; let hashed_password = self.user_serv.generate_password(&cmd.password)?; let mut user = User::new( self.user_repo.next_id().await?, Identity::new( Provider::Local, Username::new(cmd.username)?, Email::new(cmd.email)?, Some(Password::new(hashed_password)?), )?, Role::new(RoleId::new("user")?, "User")?, )?; self.user_repo.save(&mut user).await?; self.event_pub.publish_all(user.base().events()?).await?; Ok(RegisterResponse { id: user.base().id().to_string(), validation_code: user.validation().unwrap().code().to_string(), }) } } #[cfg(test)] mod tests { use super::*; use crate::domain::user::UserId; use crate::mocks; #[tokio::test] async fn new_user() { let c = mocks::container(); let uc = Register::new(c.event_pub(), c.user_repo(), c.user_serv()); let cmd = RegisterCommand { username: "new-user".to_owned(), email: "new@user.com".to_owned(), password: "P@asswd!".to_owned(), }; let res = uc.exec(cmd).await.unwrap(); let saved_user = c .user_repo() .find_by_id(&UserId::new(&res.id).unwrap()) .await .unwrap(); assert_eq!(res.id, saved_user.base().id().value()); assert_eq!(saved_user.identity().username().value(), "new-user"); assert_eq!(saved_user.identity().email().value(), "new@user.com"); assert_ne!( saved_user.identity().password().unwrap().value(), "P@asswd!" ); assert_eq!(c.event_pub().events().await.len(), 1); } #[tokio::test] async fn invalid_data() { let c = mocks::container(); let uc = Register::new(c.event_pub(), c.user_repo(), c.user_serv()); let mut user = mocks::user1(); c.user_repo().save(&mut user).await.unwrap(); assert!(uc .exec(RegisterCommand { username: "us".to_owned(), email: "new@user.com".to_owned(), password: "P@asswd!".to_owned(), }) .await .is_err()); assert!(uc .exec(RegisterCommand { username: "new-user".to_owned(), email: "invalid-email".to_owned(), password: "P@asswd!".to_owned(), }) .await .is_err()); assert!(uc .exec(RegisterCommand { username: "new-user".to_owned(), email: "new@user.com".to_owned(), password: "1234".to_owned(), }) .await .is_err()); } #[tokio::test] async fn existing_user() { let c = mocks::container(); let uc = Register::new(c.event_pub(), c.user_repo(), c.user_serv()); let mut user = mocks::user1(); c.user_repo().save(&mut user).await.unwrap(); assert!(uc .exec(RegisterCommand { username: user.identity().username().to_string(), email: user.identity().email().to_string(), password: "P@asswd!".to_owned(), }) .await .is_err()); assert!(uc .exec(RegisterCommand { username: "other".to_owned(), email: user.identity().email().to_string(), password: "P@asswd!".to_owned(), }) .await .is_err()); assert!(uc .exec(RegisterCommand { username: user.identity().username().to_string(), email: "other@other.com".to_owned(), password: "P@asswd!".to_owned(), }) .await .is_err()); } }
fn main() { let r; let x =5; r = &x; //x will be dropped here so r will be pointing the memory its not intended to point. println!("{}",r); }
#![feature(simd)] #[simd] struct T(f64, f64, f64); static X: T = T(0.0, 0.0, 0.0); fn main() { let _ = X; }
use plotters::coord::Shift; use plotters::prelude::*; use regex::Regex; use std::io::BufRead; const BAR_HEIGHT: i32 = 18; const BAR_WIDTH: i32 = 5; fn main() { let bars = parse(std::io::stdin().lock()); let width = bars .iter() .map(|bar| bar.begin + bar.length) .max() .unwrap_or(0) as u32 * BAR_WIDTH as u32 + 200; let height = bars.len() as u32 * BAR_HEIGHT as u32 + 5; println!("Drawing area: {}, {}", width, height); let drawing_area = SVGBackend::new("plot.svg", (width, height)).into_drawing_area(); draw(drawing_area, &bars); } fn draw<DB: DrawingBackend>(drawing_area: DrawingArea<DB, Shift>, bars: &[Bar]) { let text_style = TextStyle::from(("monospace", BAR_HEIGHT).into_font()).color(&BLACK); for (i, bar) in bars.iter().enumerate() { let i = i as i32; let rect = [ (BAR_WIDTH * bar.begin, BAR_HEIGHT * i), ( BAR_WIDTH * (bar.begin + bar.length) + 2, BAR_HEIGHT * (i + 1), ), ]; drawing_area .draw(&Rectangle::new( rect, ShapeStyle { color: bar.color.to_rgba(), filled: true, stroke_width: 0, }, )) .unwrap(); drawing_area .draw_text( &format!("{}({})", bar.label, bar.id), &text_style, (BAR_WIDTH * bar.begin, BAR_HEIGHT * i), ) .unwrap(); } } fn parse(input: impl BufRead) -> Vec<Bar> { let re_fetch = Regex::new(r"^\[(\d+)\] \#{1,2} ([a-z_]+)\((\d+)\) will complete in (\d+) ms$").unwrap(); let re_data = Regex::new(r"^\#\# data = d:(\d+) \(([a-z]+)\)$").unwrap(); let mut bars = Vec::new(); for line in input.lines() { let line = line.unwrap(); if let Some(caps) = re_fetch.captures(&line) { println!("Line matches fetching: {}", line); let label = caps[2].to_owned(); let color = if label == "get_page" || label == "get_data" { RGBColor(0xA0, 0xC0, 0xFF) } else if label == "fetch_resource" { RGBColor(0xA0, 0xFF, 0xC0) } else { RGBColor(0xC0, 0xC0, 0xC0) }; bars.push(Bar { begin: caps[1].parse().unwrap(), length: caps[4].parse().unwrap(), label, id: caps[3].parse().unwrap(), color, }); } if let Some(caps) = re_data.captures(&line) { println!("Line matches data: {}", line); let id: usize = caps[1].parse().unwrap(); let status = &caps[2]; for bar in bars.iter_mut() { if bar.label == "get_data" && bar.id == id { if status == "valid" { bar.color = GREEN; } else { bar.color = RED; } } } } } // Normalize for the starting time. let start = bars.iter().map(|bar| bar.begin).min().unwrap_or(0); for bar in bars.iter_mut() { bar.begin -= start; } bars } struct Bar { begin: i32, length: i32, label: String, id: usize, color: RGBColor, }
fn main() { let mut s = [15, 10, 12, 20, 25, 13, 27, 22]; println!("before: {:?}", &s); quick_sort(&mut s); println!("after: {:?}", &s); } fn quick_sort(s: &mut [i32]) { let s_len = s.len(); if s_len < 2 { return; } quick_sort_iter(s, 0, s_len - 1); } fn quick_sort_iter(s: &mut [i32], low: usize, high: usize) { if low >= high { return; } let pivot_index = partition(s, low, high); quick_sort_iter(s, low, pivot_index - 1); quick_sort_iter(s, pivot_index + 1, high); } fn partition(s: &mut [i32], low: usize, high: usize) -> usize { let pivot_item = s[low]; let mut i = low + 1; let mut j = low; while i <= high { if s[i] < pivot_item { j = j + 1; s.swap(i, j); } i = i + 1; } s.swap(low, j); j }
use std::io; use std::fmt::Show; use conduit; use conduit::{Handler, Request, Response}; use conduit_middleware; use conduit_static::Static; use semver; pub struct Middleware { handler: Option<Box<Handler + Send + Sync>>, dist: Static, } impl Middleware { pub fn new() -> Middleware { Middleware { handler: None, dist: Static::new(Path::new("dist")), } } } impl conduit_middleware::AroundMiddleware for Middleware { fn with_handler(&mut self, handler: Box<Handler + Send + Sync>) { self.handler = Some(handler); } } impl Handler for Middleware { fn call(&self, req: &mut Request) -> Result<Response, Box<Show + 'static>> { // First, attempt to serve a static file. If we're missing a static // file, then keep going. match self.dist.call(req) { Ok(ref resp) if resp.status.val0() == 404 => {} ret => return ret, } // Second, if we're requesting html, then we've only got one page so // serve up that page. Otherwise proxy on to the rest of the app. let wants_html = { let content = req.headers().find("Accept").unwrap_or(Vec::new()); content.iter().any(|s| s.contains("html")) }; return if wants_html { self.dist.call(&mut RequestProxy { other: req, path_override: "/index.html", }) } else { self.handler.as_ref().unwrap().call(req) }; struct RequestProxy<'a> { other: &'a mut Request + 'a, path_override: &'a str, } impl<'a> Request for RequestProxy<'a> { fn http_version(&self) -> semver::Version { self.other.http_version() } fn conduit_version(&self) -> semver::Version { self.other.conduit_version() } fn method(&self) -> conduit::Method { self.other.method() } fn scheme(&self) -> conduit::Scheme { self.other.scheme() } fn host<'a>(&'a self) -> conduit::Host<'a> { self.other.host() } fn virtual_root<'a>(&'a self) -> Option<&'a str> { self.other.virtual_root() } fn path<'a>(&'a self) -> &'a str { self.path_override.as_slice() } fn query_string<'a>(&'a self) -> Option<&'a str> { self.other.query_string() } fn remote_ip(&self) -> io::net::ip::IpAddr { self.other.remote_ip() } fn content_length(&self) -> Option<uint> { self.other.content_length() } fn headers<'a>(&'a self) -> &'a conduit::Headers { self.other.headers() } fn body<'a>(&'a mut self) -> &'a mut Reader { self.other.body() } fn extensions<'a>(&'a self) -> &'a conduit::Extensions { self.other.extensions() } fn mut_extensions<'a>(&'a mut self) -> &'a mut conduit::Extensions { self.other.mut_extensions() } } } }
use crate::schema::users; use diesel::pg::PgConnection; use diesel::prelude::*; use diesel::{Connection, Identifiable, QueryResult, Queryable}; use validator::Validate; #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, Identifiable)] #[table_name = "users"] pub struct UserId(#[column_name = "id"] pub i32); #[derive(Clone, Debug, PartialEq, Eq, Queryable, Identifiable)] pub struct User { pub id: i32, pub username: String, pub display_name: String, pub password_hash: String, pub email_address: String, pub use_email_address_for_gravatar: bool, pub gravatar_alternative: String, } impl User { pub fn by_id(conn: &PgConnection, id: UserId) -> QueryResult<Option<User>> { users::table.find(id.0).first(conn).optional() } pub fn by_username(conn: &PgConnection, username: &str) -> QueryResult<Option<User>> { users::table .filter(users::username.ilike(username)) .first(conn) .optional() } pub fn by_email_address(conn: &PgConnection, email_address: &str) -> QueryResult<Option<User>> { users::table .filter(users::email_address.ilike(email_address)) .first(conn) .optional() } pub fn get_id(&self) -> UserId { UserId(self.id) } } #[derive(Insertable, Debug, Default, Validate)] #[table_name = "users"] pub struct NewUser { #[validate(length(min = 1, max = 40))] pub username: String, #[validate(length(min = 1, max = 40))] pub display_name: String, pub password_hash: String, #[validate(length(max = 255))] #[validate(email)] pub email_address: String, use_email_address_for_gravatar: bool, gravatar_alternative: String, } impl NewUser { pub fn new(username: String, password_hash: String, email_address: String) -> Self { NewUser { username: username.to_lowercase(), display_name: username, password_hash, // TODO: in principle, only the host-part of the email address should be lowercased. email_address: email_address.to_lowercase(), use_email_address_for_gravatar: true, gravatar_alternative: random_string::unambiguous_string(32), } } pub fn create(&self, conn: &PgConnection) -> QueryResult<Option<User>> { use crate::schema::users::dsl::*; conn.transaction(|| { let maybe_inserted = diesel::insert_into(users) .values(self) .on_conflict_do_nothing() .get_result::<User>(conn) .optional()?; Ok(maybe_inserted) }) } }
use std::collections::HashMap; use glium::{Display, Frame, Surface, DrawParameters, Program, VertexBuffer, IndexBuffer, texture::{Texture2d, SrgbTexture2d}, uniforms::{Uniforms, UniformValue}}; use crate::math::{mult_matrix, mult_matrix3}; use crate::textures; use serde::Deserialize; pub const MAX_LIGHTS: usize = 24; #[derive(Debug, Copy, Clone)] pub struct Vertex { pub position: [f32; 3], pub normal: [f32; 3], pub texcoords: [f32; 2] } implement_vertex!(Vertex, position, normal, texcoords); #[derive(Clone)] pub struct MtlInfo { pub diffuse_texture: Option<String>, pub color: [f32; 3] } impl Default for MtlInfo { fn default() -> Self { Self { color: [1., 1., 1.], diffuse_texture: None } } } pub struct EnvDrawInfo<'a> { pub view_mat: [[f32; 4]; 4], pub perspective_mat: [[f32; 4]; 4], pub params: &'a DrawParameters<'a>, pub lights: [Light; MAX_LIGHTS], pub light_count: usize, pub textures: &'a HashMap<String, Texture2d> } pub struct ObjDrawInfo { pub position: [f32; 3], pub rotation: [f32; 3], pub scale: [f32; 3], pub model_mat: Option<[[f32; 4]; 4]>, pub color: [f32; 3] } impl Default for ObjDrawInfo { fn default() -> Self { let mut result = Self { position: [0., 0., 0.], rotation: [0., 0., 0.], scale: [1., 1., 1.], color: [1., 1., 1.], model_mat: None }; result.generate_matrix(); result } } pub struct UIDrawInfo { pub position: (f32, f32), pub scale: (f32, f32), pub left_clip: f32, pub screen_left_clip: f32, pub screen_dim: (u32, u32), pub color: [f32; 4], pub model_mat: Option<[[f32; 3]; 3]>, pub translate_after_scale: bool } pub struct ObjDef { pub position: [f32; 3], pub vertices: VertexBuffer<Vertex>, pub indices: IndexBuffer<u32>, pub material: Option<MtlInfo> } #[derive(Copy, Clone, Deserialize)] #[serde(default)] pub struct Light { pub position: [f32; 3], pub att_linear: f32, pub att_quad: f32, pub att_constant: f32 } impl Default for Light { fn default() -> Self { Self { position: [0., 0., 0.], att_linear: 0., att_constant: 0., att_quad: 1. } } } struct BasicDrawUniforms<'a> { lights: [Light; MAX_LIGHTS], light_count: i32, model: [[f32; 4]; 4], view: [[f32; 4]; 4], perspective: [[f32; 4]; 4], mtl_color: [f32; 3], obj_color: [f32; 3], texcoord_displacement: [f32; 2], min_text_val: [f32; 4], tex: &'a Texture2d } impl Uniforms for BasicDrawUniforms<'_> { fn visit_values<'a, F: FnMut(&str, UniformValue<'a>)>(&'a self, mut func: F) { for i in 0..MAX_LIGHTS { func(&format!("lights[{}].position", i).to_string(), UniformValue::Vec3(self.lights[i].position)); func(&format!("lights[{}].att_constant", i).to_string(), UniformValue::Float(self.lights[i].att_constant)); func(&format!("lights[{}].att_linear", i).to_string(), UniformValue::Float(self.lights[i].att_linear)); func(&format!("lights[{}].att_quad", i).to_string(), UniformValue::Float(self.lights[i].att_quad)); } func("light_count", UniformValue::SignedInt(self.light_count)); func("model", UniformValue::Mat4(self.model)); func("view", UniformValue::Mat4(self.view)); func("perspective", UniformValue::Mat4(self.perspective)); func("mtl_color", UniformValue::Vec3(self.mtl_color)); func("obj_color", UniformValue::Vec3(self.obj_color)); func("texcoord_displacement", UniformValue::Vec2(self.texcoord_displacement)); func("tex", UniformValue::Texture2d(self.tex, None)); func("min_text_val", UniformValue::Vec4(self.min_text_val)); } } impl ObjDrawInfo { pub fn generate_matrix(&mut self) { let rotation_matrix = [ [ self.rotation[2].cos() * self.rotation[1].cos(), (self.rotation[2].cos() * self.rotation[1].sin() * self.rotation[0].sin()) - (self.rotation[2].sin() * self.rotation[0].cos()), (self.rotation[2].cos() * self.rotation[1].sin() * self.rotation[0].cos()) + (self.rotation[2].sin() * self.rotation[0].sin()), 0.0 ], [ self.rotation[2].sin() * self.rotation[1].cos(), (self.rotation[2].sin() * self.rotation[1].sin() * self.rotation[0].sin()) + (self.rotation[2].cos() * self.rotation[0].cos()), (self.rotation[2].sin() * self.rotation[1].sin() * self.rotation[0].cos()) - (self.rotation[2].cos() * self.rotation[0].sin()), 0.0 ], [ -self.rotation[1].sin(), self.rotation[1].cos() * self.rotation[0].sin(), self.rotation[1].cos() * self.rotation[0].cos(), 0.0 ], [0.0, 0.0, 0.0, 1.0f32] ]; let scale_matrix = [ [self.scale[0], 0.0, 0.0, 0.0], [0.0, self.scale[1], 0.0, 0.0], [0.0, 0.0, self.scale[2], 0.0], [0.0, 0.0, 0.0, 1.0] ]; let translate_matrix = [ [1.0, 0.0, 0.0, 0.0], [0.0, 1.0, 0.0, 0.0], [0.0, 0.0, 1.0, 0.0], [self.position[0], self.position[1], self.position[2], 1.0f32] ]; self.model_mat = Some(mult_matrix(&mult_matrix(&rotation_matrix, &scale_matrix), &translate_matrix)); } } impl UIDrawInfo { pub fn new(position: (f32, f32), scale: (f32, f32)) -> UIDrawInfo { UIDrawInfo { model_mat: None, position: position, scale: scale, color: [1., 1., 1., 1.], left_clip: -4., screen_left_clip: -4., screen_dim: (0, 0), translate_after_scale: true } } pub fn generate_matrix(&mut self, target: &mut Frame) { if self.screen_dim == target.get_dimensions() { return; } self.screen_dim = target.get_dimensions(); let x_scale = self.screen_dim.1 as f32 / self.screen_dim.0 as f32; let mut translate_mat = [ [1., 0., 0.], [0., 1., 0.], [self.position.0, self.position.1, 1.0f32] ]; let scale_mat = [ [self.scale.0 * x_scale, 0., 0.], [0., self.scale.1, 0.], [0., 0., 1.] ]; self.screen_left_clip = self.left_clip * x_scale; self.model_mat = Some(if !self.translate_after_scale { mult_matrix3(&translate_mat, &scale_mat) } else { translate_mat[2][0] *= x_scale; mult_matrix3(&scale_mat, &translate_mat) }); } } pub fn load_data_to_gpu(display: &Display, vertices: &[Vertex], indices: &[u32]) -> ObjDef { ObjDef { vertices: glium::VertexBuffer::new(display, &vertices).unwrap(), indices: glium::IndexBuffer::new(display, glium::index::PrimitiveType::TrianglesList, &indices).unwrap(), position: vertices[0].position, material: None } } pub fn basic_render(target: &mut Frame, env_info: &EnvDrawInfo, obj_info: &ObjDrawInfo, obj_def: &ObjDef, program: &Program, texcoord_displacement: Option<[f32; 2]>) { let uniforms = BasicDrawUniforms { model: *obj_info.model_mat.as_ref().unwrap(), view: env_info.view_mat, perspective: env_info.perspective_mat, lights: env_info.lights, light_count: env_info.light_count as i32, mtl_color: obj_def.material.as_ref().unwrap().color, obj_color: obj_info.color, texcoord_displacement: texcoord_displacement.unwrap_or([0., 0.]), min_text_val: if obj_def.material.as_ref().unwrap().diffuse_texture.is_none() { [1., 1., 1., 1.0f32] } else { [0., 0., 0., 0.0f32] }, tex: env_info.textures.get( obj_def.material.as_ref().unwrap().diffuse_texture.as_ref().unwrap_or(&textures::WHITE.to_string()) ).unwrap() }; target.draw(&obj_def.vertices, &obj_def.indices, program, &uniforms, env_info.params).unwrap(); } pub fn ui_draw(target: &mut Frame, obj_def: &ObjDef, ui_draw_info: &UIDrawInfo, program: &Program, texture: &SrgbTexture2d) { let uniforms = uniform! { tex: texture.sampled() .magnify_filter(glium::uniforms::MagnifySamplerFilter::Linear) .minify_filter(glium::uniforms::MinifySamplerFilter::Linear), ui_color: ui_draw_info.color, model: ui_draw_info.model_mat.unwrap(), left_clip: ui_draw_info.screen_left_clip }; let params = DrawParameters { blend: glium::draw_parameters::Blend::alpha_blending(), clip_planes_bitmask: 1, ..Default::default() }; target.draw(&obj_def.vertices, &obj_def.indices, program, &uniforms, &params).unwrap(); }
use std::{fmt::Display, ops::{Add, AddAssign, Sub}}; #[derive(Clone, Copy, Debug, Default, PartialEq, Eq)] pub struct Position { pub line: usize, pub column: usize, } #[derive(Clone, Copy, Debug, Default, PartialEq, Eq)] pub struct Range { pub from: Position, pub until: Delta, } #[derive(Clone, Copy, Debug, Default, PartialEq, Eq, PartialOrd, Ord)] pub struct Delta { pub lines: usize, pub columns: usize, } impl From<&str> for Delta { fn from(s: &str) -> Self { let mut lines = s.rsplit('\n'); Self { columns: lines.next().unwrap().len(), lines: lines.count(), } } } impl Add<Delta> for Position { type Output = Position; fn add(mut self, Delta { lines, columns }: Delta) -> Self::Output { if lines > 0 { self.line += lines; self.column = columns; } else { self.column += columns; } self } } impl Sub for Position { type Output = Delta; fn sub(self, rhs: Self) -> Self::Output { if self.line > rhs.line { Delta { lines: self.line - rhs.line, columns: self.column, } } else if self.line == rhs.line { Delta { lines: 0, columns: self.column - rhs.column, } } else { unreachable!() } } } impl Display for Position { fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> { write!(f, "{}:{}", self.line, self.column) } } impl Display for Range { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{}-{}", self.from, self.from + self.until) } } impl Add for Range { type Output = Self; fn add(self, rhs: Self) -> Self::Output { Self { from: self.from, until: rhs.to() - self.from, } } } impl AddAssign for Range { fn add_assign(&mut self, rhs: Self) { *self = *self + rhs } } impl From<Position> for Range { fn from(from: Position) -> Self { Self { from, until: Delta::default() } } } impl Delta { pub fn nonzero(self) -> bool { self > Delta::default() } } impl Range { pub fn to(self) -> Position { self.from + self.until } }
use openxr as xr; const GL_RGBA8: u32 = 0x8058; pub struct SwapChains { pub swapchain_left: Option<xr::Swapchain<xr::OpenGL>>, pub resolution_left: (u32, u32), pub swapchain_right: Option<xr::Swapchain<xr::OpenGL>>, pub resolution_right: (u32, u32), } impl SwapChains { fn empty() -> Self { Self { swapchain_left: None, swapchain_right: None, resolution_left: (800, 600), resolution_right: (800, 600), } } fn new_from_session( session: &xr::Session<xr::OpenGL>, instance: &xr::Instance, system: openxr::SystemId, ) -> Self { let view_configuration_views = instance .enumerate_view_configuration_views(system, xr::ViewConfigurationType::PRIMARY_STEREO) .unwrap(); let resolution_left = ( view_configuration_views[0].recommended_image_rect_width, view_configuration_views[0].recommended_image_rect_height, ); let resolution_right = ( view_configuration_views[1].recommended_image_rect_width, view_configuration_views[1].recommended_image_rect_height, ); let sample_count_left = view_configuration_views[0].recommended_swapchain_sample_count; let sample_count_right = view_configuration_views[1].recommended_swapchain_sample_count; let swapchain_formats = session.enumerate_swapchain_formats().unwrap(); if !swapchain_formats.contains(&GL_RGBA8) { panic!("XR: Cannot use OpenGL GL_RGBA8 swapchain format"); } let swapchain_create_info_left: xr::SwapchainCreateInfo<xr::OpenGL> = xr::SwapchainCreateInfo { create_flags: xr::SwapchainCreateFlags::EMPTY, usage_flags: xr::SwapchainUsageFlags::COLOR_ATTACHMENT | xr::SwapchainUsageFlags::SAMPLED, format: GL_RGBA8, sample_count: sample_count_left, width: resolution_left.0, height: resolution_left.1, face_count: 1, array_size: 2, mip_count: 1, }; let swapchain_create_info_right: xr::SwapchainCreateInfo<xr::OpenGL> = xr::SwapchainCreateInfo { create_flags: xr::SwapchainCreateFlags::EMPTY, usage_flags: xr::SwapchainUsageFlags::COLOR_ATTACHMENT | xr::SwapchainUsageFlags::SAMPLED, format: GL_RGBA8, sample_count: sample_count_right, width: resolution_right.0, height: resolution_right.1, face_count: 1, array_size: 2, mip_count: 1, }; let swapchain_left = session .create_swapchain(&swapchain_create_info_left) .unwrap(); let swapchain_right = session .create_swapchain(&swapchain_create_info_right) .unwrap(); Self { swapchain_left: Some(swapchain_left), swapchain_right: Some(swapchain_right), resolution_left, resolution_right, } } pub fn is_initialized(&self) -> bool { self.swapchain_left.is_none() && self.swapchain_right.is_none() } pub fn get_swapchains( &mut self, ) -> Option<( &mut xr::Swapchain<xr::OpenGL>, &mut xr::Swapchain<xr::OpenGL>, )> { if self.swapchain_left.is_some() && self.swapchain_right.is_some() { let mut swapchain_left = self.swapchain_left.as_mut().unwrap(); let mut swapchain_right = self.swapchain_right.as_mut().unwrap(); Some((swapchain_left, swapchain_right)) } else { None } } pub fn get_images(&mut self) -> Option<(u32, u32)> { let mut swapchains = self.get_swapchains(); let swapchains = swapchains.as_mut()?; let swapchain_image_left = get_swapchain_image(swapchains.0); let swapchain_image_right = get_swapchain_image(swapchains.1); Some((swapchain_image_left, swapchain_image_right)) } pub fn get_subimages( &mut self, ) -> ( xr::SwapchainSubImage<xr::OpenGL>, xr::SwapchainSubImage<xr::OpenGL>, ) { let resolution_left = self.resolution_left; let resolution_right = self.resolution_right; println!("resolution_right {:?}", resolution_right); let eye_rect_left = xr::Rect2Di { offset: xr::Offset2Di { x: 0, y: 0 }, extent: xr::Extent2Di { width: resolution_left.0 as i32, height: resolution_left.1 as i32, }, }; let eye_rect_right = xr::Rect2Di { offset: xr::Offset2Di { x: 0, y: 0 }, extent: xr::Extent2Di { width: resolution_right.0 as i32, height: resolution_right.1 as i32, }, }; let left_subimage: xr::SwapchainSubImage<xr::OpenGL> = openxr::SwapchainSubImage::new() .swapchain(&self.swapchain_left.as_ref().unwrap()) .image_rect(eye_rect_left); let right_subimage: xr::SwapchainSubImage<xr::OpenGL> = openxr::SwapchainSubImage::new() .swapchain(&self.swapchain_right.as_ref().unwrap()) .image_rect(eye_rect_right); (left_subimage, right_subimage) } pub fn release_images(&mut self) { let swapchains = self.get_swapchains().unwrap(); swapchains.0.release_image().unwrap(); swapchains.1.release_image().unwrap(); } } pub struct OpenXR { pub entry: xr::Entry, pub instance: xr::Instance, pub session: xr::Session<xr::OpenGL>, pub system: openxr::SystemId, pub swapchains: SwapChains, pub spaces: (Option<xr::Space>, Option<xr::Space>), pub session_state: xr::SessionState, frame_stream: xr::FrameStream<xr::OpenGL>, predicted_display_time: xr::Time, } impl OpenXR { pub fn new(backend: &mut crate::render::backend::Backend) -> Self { let entry = xr::Entry::linked(); let extensions = entry .enumerate_extensions() .expect("Cannot enumerate extensions"); let app_info = xr::ApplicationInfo::new().application_name("SlashMania"); if !extensions.khr_opengl_enable { panic!("XR: OpenGL extension unsupported"); } let extension_set = xr::ExtensionSet { khr_opengl_enable: true, ..Default::default() }; let instance = entry.create_instance(app_info, &extension_set).unwrap(); let instance_props = instance.properties().expect("Cannot load instance props"); println!( "loaded instance: {} v{}", instance_props.runtime_name, instance_props.runtime_version ); let system = instance .system(xr::FormFactor::HEAD_MOUNTED_DISPLAY) .unwrap(); let info = unsafe { backend.xr_session_create_info() }; let (session, frame_stream) = unsafe { instance.create_session(system, &info).unwrap() }; session .begin(xr::ViewConfigurationType::PRIMARY_STEREO) .unwrap(); let spaces = init_spaces(&session); let view_configuration_views = instance .enumerate_view_configuration_views(system, xr::ViewConfigurationType::PRIMARY_STEREO) .unwrap(); let resolution = ( view_configuration_views[0].recommended_image_rect_width, view_configuration_views[0].recommended_image_rect_height, ); backend.dimmensions = resolution; OpenXR { entry, instance, session, spaces, system, session_state: xr::SessionState::UNKNOWN, frame_stream, predicted_display_time: xr::Time::from_raw(0), swapchains: SwapChains::empty(), } } pub fn update(&mut self) { let mut buffer = xr::EventDataBuffer::new(); while let Some(event) = self.instance.poll_event(&mut buffer).unwrap() { use xr::Event::*; match event { SessionStateChanged(session_change) => { println!( "session state changed to {:?} at t={:?}", session_change.state(), session_change.time() ); self.session_state = session_change.state(); match session_change.state() { xr::SessionState::EXITING | xr::SessionState::LOSS_PENDING => { self.finish_session() } xr::SessionState::RUNNING => { if self.swapchains.is_initialized() { self.create_swapchains() } } _ => {} } } _ => { println!("unhandled event"); } } } } pub fn create_swapchains(&mut self) { self.swapchains = SwapChains::new_from_session(&self.session, &self.instance, self.system); } pub fn frame_stream_begin(&mut self) { let state = self.frame_stream.wait().unwrap(); self.predicted_display_time = state.predicted_display_time; self.frame_stream.begin().unwrap(); } pub fn frame_stream_end(&mut self) { let subimages = self.swapchains.get_subimages(); let (view_flags, views) = self .session .locate_views(self.predicted_display_time, self.spaces.0.as_ref().unwrap()) .unwrap(); let projection_view_left = xr::CompositionLayerProjectionView::new() .pose(views[0].pose) .fov(views[0].fov) .sub_image(subimages.0); let projection_view_right = xr::CompositionLayerProjectionView::new() .pose(views[1].pose) .fov(views[1].fov) .sub_image(subimages.1); let proj_views = [projection_view_left, projection_view_right]; let projection = xr::CompositionLayerProjection::new().views(&proj_views); self.frame_stream .end( self.predicted_display_time, xr::EnvironmentBlendMode::OPAQUE, &[&projection], ) .unwrap(); } pub fn finish_session(&self) {} } pub fn init_spaces(session: &xr::Session<xr::OpenGL>) -> (Option<xr::Space>, Option<xr::Space>) { let space_tys = session.enumerate_reference_spaces().unwrap(); let has_stage = space_tys.contains(&xr::ReferenceSpaceType::STAGE); let has_view = space_tys.contains(&xr::ReferenceSpaceType::VIEW); let stage = if has_stage { Some( session .create_reference_space( xr::ReferenceSpaceType::STAGE, xr::Posef { position: xr::Vector3f { x: 0.0, y: 0.0, z: 0.0, }, orientation: xr::Quaternionf { w: 1.0, x: 0.0, y: 0.0, z: 0.0, }, }, ) .unwrap(), ) } else { None }; let view = if has_view { Some( session .create_reference_space( xr::ReferenceSpaceType::VIEW, xr::Posef { position: xr::Vector3f { x: 0.0, y: 0.0, z: 0.0, }, orientation: xr::Quaternionf { w: 1.0, x: 0.0, y: 0.0, z: 0.0, }, }, ) .unwrap(), ) } else { None }; return (stage, view); } pub fn get_swapchain_image(swapchain: &mut xr::Swapchain<xr::OpenGL>) -> u32 { let images = swapchain.enumerate_images().unwrap(); let image_id = swapchain.acquire_image().unwrap(); swapchain.wait_image(xr::Duration::INFINITE).unwrap(); let image = images[image_id as usize]; image }
use std::collections::HashMap; use std::collections::HashSet; use crate::wfc::valid_dirs; use crate::wfc::CompatibilityMap; use crate::wfc::CompatibleTile; use crate::wfc::UVec2; use crate::wfc::WeightTable; use crate::Matrix; fn parse_matrix( matrix: &Matrix, weights: &mut WeightTable<char>, compatabilities: &mut CompatibilityMap<char>, ) { let matrix_height = matrix.len(); let matrix_width = matrix[0].len(); for (y, row) in matrix.iter().enumerate() { for (x, col) in row.iter().enumerate() { let weight = weights.entry(*col).or_insert(0f64); *weight += 1f64; for direction in valid_dirs(UVec2(x, y), UVec2(matrix_width, matrix_height)) { let o_x = ((x as isize) + direction.0) as usize; let o_y = ((y as isize) + direction.1) as usize; let other_tile = matrix[o_y][o_x]; let compats = compatabilities .entry(*col) .or_insert(HashSet::<CompatibleTile<char>>::new()); compats.insert((other_tile, direction)); } } } } pub fn provide(matrixes: &[Matrix]) -> (CompatibilityMap<char>, WeightTable<char>) { let mut weights = WeightTable::new(); let mut compatabilities = HashMap::new(); for matrix in matrixes { parse_matrix(matrix, &mut weights, &mut compatabilities); } weights.normalize(); (compatabilities, weights) }
use crate::headers; use crate::AddAsHeader; use http::request::Builder; /// The max number of items in the collection #[derive(Debug, Clone, Copy)] pub struct MaxItemCount(i32); impl MaxItemCount { /// Create a new `MaxItemCount` pub fn new(count: i32) -> Self { Self(count) } } impl AddAsHeader for MaxItemCount { fn add_as_header(&self, builder: Builder) -> Builder { if self.0 <= 0 { builder.header(headers::MAX_ITEM_COUNT, -1) } else { builder.header(headers::MAX_ITEM_COUNT, self.0) } } fn add_as_header2( &self, request: &mut crate::Request, ) -> Result<(), crate::errors::HTTPHeaderError> { let (header_name, header_value) = if self.0 <= 0 { (headers::MAX_ITEM_COUNT, -1) } else { (headers::MAX_ITEM_COUNT, self.0) }; request .headers_mut() .append(header_name, http::HeaderValue::from(header_value)); Ok(()) } }
#![deny(missing_debug_implementations, missing_docs)] //! Data structures shared with multiple crates in the apllodb workspace. #[macro_use] extern crate derive_new; pub(crate) mod database; pub(crate) mod error; pub(crate) mod expression; pub(crate) mod schema; pub(crate) mod session; pub(crate) mod validation_helper; pub(crate) mod value; pub use crate::{ database::database_name::DatabaseName, error::{ session_error::{ApllodbSessionError, ApllodbSessionResult}, sqlstate::SqlState, ApllodbError, ApllodbResult, }, expression::{ boolean_expression::{ comparison_function::ComparisonFunction, logical_function::LogicalFunction, BooleanExpression, }, operator::{BinaryOperator, UnaryOperator}, Expression, }, schema::{r_pos::RPos, schema_index::SchemaIndex, schema_name::SchemaName, Schema}, session::{ session_id::SessionId, with_db::SessionWithDb, with_tx::SessionWithTx, without_db::SessionWithoutDb, Session, }, validation_helper::{ collection::{find_dup, find_dup_slow}, short_name::ShortName, }, value::{ sql_convertible::SqlConvertible, sql_type::{I64LooseType, NumericComparableType, SqlType, StringComparableLoseType}, sql_value::{ nn_sql_value::NnSqlValue, sql_compare_result::SqlCompareResult, sql_value_hash_key::SqlValueHashKey, SqlValue, }, }, }; #[cfg(feature = "test-support")] pub mod test_support; #[cfg(test)] mod tests { use apllodb_test_support::setup::setup_test_logger; use ctor::ctor; #[cfg_attr(test, ctor)] fn test_setup() { setup_test_logger(); } }
use annotate_snippets::{ display_list::{DisplayList, FormatOptions}, snippet::{Annotation, AnnotationType, Snippet}, }; use crate::utils::span::peg_error_to_snippet; pub mod binding; pub mod expr; pub mod parsing; pub mod proof; pub mod utils; pub fn read_fpl(input: &str) -> Result<(), ()> { use crate::{parsing::*, proof::*}; let parsed_math = parser::math(&input).map_err(|err| peg_error_to_snippet(err, input))?; let math = read_math(&parsed_math).map_err(|errs| { for err in errs { err.print_error_snippet(input); } })?; proofs_has_cycles(&parsed_math).map_err(|text| { let snippet = Snippet { title: Some(Annotation { label: Some(text), id: None, annotation_type: AnnotationType::Error }), opt: FormatOptions { color: true, ..Default::default() }, ..Snippet::default() }; println!("{}", DisplayList::from(snippet)); })?; is_proofs_correct(&parsed_math, &math).map_err(|errs| { for err in errs { err.print_error_snippet(input); } })?; Ok(()) }
#[doc = "Reader of register RX_FIFO_RD"] pub type R = crate::R<u32, super::RX_FIFO_RD>; #[doc = "Reader of field `DATA`"] pub type DATA_R = crate::R<u16, u16>; impl R { #[doc = "Bits 0:15 - Data read from the receiver FIFO. Reading a data frame will remove the data frame from the FIFO; i.e. behavior is similar to that of a POP operation. Note that when CTRL.BYTE_MODE is '1', only DATA\\[7:0\\] are used. A read from an empty RX FIFO sets INTR_RX.UNDERFLOW to '1'. When this register is read through the debugger, the data frame will not be removed from the FIFO. Similar in operation to RX_FIFO_RD_SILENT"] #[inline(always)] pub fn data(&self) -> DATA_R { DATA_R::new((self.bits & 0xffff) as u16) } }
// This file is part of rdma-core. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/rdma-core/master/COPYRIGHT. No part of rdma-core, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file. // Copyright © 2016 The developers of rdma-core. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/rdma-core/master/COPYRIGHT. #[repr(C)] pub struct ibv_send_wr { pub wr_id: u64, pub next: *mut ibv_send_wr, pub sg_list: *mut ibv_sge, pub num_sge: c_int, pub opcode: ibv_wr_opcode, pub send_flags: c_uint, pub __bindgen_anon_1: ibv_send_wr__bindgen_ty_1, pub wr: ibv_send_wr__bindgen_ty_2, pub qp_type: ibv_send_wr__bindgen_ty_3, pub __bindgen_anon_2: ibv_send_wr__bindgen_ty_4, } impl Default for ibv_send_wr { #[inline(always)] fn default() -> Self { unsafe { zeroed() } } } impl Debug for ibv_send_wr { #[inline(always)] fn fmt(&self, f: &mut Formatter) -> Result { write!(f, "ibv_send_wr {{ next: {:?}, sg_list: {:?}, opcode: {:?}, __bindgen_anon_1: {:?}, wr: {:?}, qp_type: {:?}, __bindgen_anon_2: {:?} }}", self.next, self.sg_list, self.opcode, self.__bindgen_anon_1, self.wr, self.qp_type, self.__bindgen_anon_2) } }
// Copyright (C) 2019 O.S. Systems Sofware LTDA // // SPDX-License-Identifier: MIT use serde::Deserialize; use std::collections::HashMap; #[derive(Deserialize, Debug, PartialEq)] pub struct Message { #[serde(rename = "type")] pub kind: String, pub destination_email: String, pub destination_name: String, #[serde(default)] pub fields: HashMap<String, String>, pub attachment: Option<Attachment>, } #[derive(Deserialize, Debug, PartialEq)] pub struct Attachment { pub name: String, pub content: String, } #[test] fn valid_message() { use serde_json::json; assert_eq!( Message { kind: "payment_confirm".to_string(), destination_email: "robertosilva@gmail.com".to_string(), destination_name: "Roberto Silva".to_string(), fields: [("owner_uid".to_string(), "ns-1".to_string())] .iter() .cloned() .collect(), attachment: None, }, serde_json::from_str( &json!( { "destination_email": "robertosilva@gmail.com".to_string(), "destination_name": "Roberto Silva".to_string(), "type": "payment_confirm".to_string(), "fields": { "owner_uid": "ns-1".to_string() } } ) .to_string() ) .unwrap() ); assert_eq!( Message { kind: "payment_confirm".to_string(), destination_email: "robertosilva@gmail.com".to_string(), destination_name: "Roberto Silva".to_string(), fields: HashMap::new(), attachment: None, }, serde_json::from_str( &json!( { "destination_email": "robertosilva@gmail.com".to_string(), "destination_name": "Roberto Silva".to_string(), "type": "payment_confirm".to_string() } ) .to_string() ) .unwrap() ); }
use std::{num::ParseIntError, env::current_dir}; use crate::read_lines::read_lines; pub fn run() { let root_dir = current_dir().unwrap(); let puzzle_path = root_dir.join("Input").join("day_2.txt"); let lines = read_lines(puzzle_path).unwrap(); let mut count: usize = 0; for line in lines { if let Ok(line) = line { if check_password(line.as_str()) { count += 1; } } } println!("number of correct passwords: {}", count) } fn check_password(password: &str) -> bool { let mut iter = password.split(":"); let rule = iter.next().unwrap(); let password = iter.next().unwrap(); let rule = Rule::from_string(rule).unwrap(); let first_char = password.chars().nth(rule.lower); let first_match = match first_char { Some(first_char) => first_char == rule.letter, None => false, }; let second_char = password.chars().nth(rule.upper); let second_match = match second_char { Some(second_char) => second_char == rule.letter, None => false, }; return (first_match || second_match) && first_match != second_match; } struct Rule { upper: usize, lower: usize, letter: char, } impl Rule { fn from_string(rule_string: &str) -> Result<Self, ParseIntError> { let mut split = rule_string.split("-"); let lower = split.next().unwrap().parse()?; let mut split = split.next().unwrap().split(" "); let upper = split.next().unwrap().parse()?; Ok( Rule { upper, lower, letter: split.next().unwrap().chars().next().unwrap(), } ) } }
#![doc = "generated by AutoRust 0.1.0"] #![allow(non_camel_case_types)] #![allow(unused_imports)] use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ApplicationLogsConfig { #[serde(rename = "fileSystem", default, skip_serializing_if = "Option::is_none")] pub file_system: Option<FileSystemApplicationLogsConfig>, #[serde(rename = "azureTableStorage", default, skip_serializing_if = "Option::is_none")] pub azure_table_storage: Option<AzureTableStorageApplicationLogsConfig>, #[serde(rename = "azureBlobStorage", default, skip_serializing_if = "Option::is_none")] pub azure_blob_storage: Option<AzureBlobStorageApplicationLogsConfig>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct AzureBlobStorageApplicationLogsConfig { #[serde(default, skip_serializing_if = "Option::is_none")] pub level: Option<azure_blob_storage_application_logs_config::Level>, #[serde(rename = "sasUrl", default, skip_serializing_if = "Option::is_none")] pub sas_url: Option<String>, #[serde(rename = "retentionInDays", default, skip_serializing_if = "Option::is_none")] pub retention_in_days: Option<i32>, } pub mod azure_blob_storage_application_logs_config { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum Level { Off, Verbose, Information, Warning, Error, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct AzureBlobStorageHttpLogsConfig { #[serde(rename = "sasUrl", default, skip_serializing_if = "Option::is_none")] pub sas_url: Option<String>, #[serde(rename = "retentionInDays", default, skip_serializing_if = "Option::is_none")] pub retention_in_days: Option<i32>, #[serde(default, skip_serializing_if = "Option::is_none")] pub enabled: Option<bool>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct AzureTableStorageApplicationLogsConfig { #[serde(default, skip_serializing_if = "Option::is_none")] pub level: Option<azure_table_storage_application_logs_config::Level>, #[serde(rename = "sasUrl")] pub sas_url: String, } pub mod azure_table_storage_application_logs_config { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum Level { Off, Verbose, Information, Warning, Error, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct BackupItem { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<backup_item::Properties>, } pub mod backup_item { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<i32>, #[serde(rename = "storageAccountUrl", default, skip_serializing_if = "Option::is_none")] pub storage_account_url: Option<String>, #[serde(rename = "blobName", default, skip_serializing_if = "Option::is_none")] pub blob_name: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub status: Option<properties::Status>, #[serde(rename = "sizeInBytes", default, skip_serializing_if = "Option::is_none")] pub size_in_bytes: Option<i64>, #[serde(default, skip_serializing_if = "Option::is_none")] pub created: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub log: Option<String>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub databases: Vec<DatabaseBackupSetting>, #[serde(default, skip_serializing_if = "Option::is_none")] pub scheduled: Option<bool>, #[serde(rename = "lastRestoreTimeStamp", default, skip_serializing_if = "Option::is_none")] pub last_restore_time_stamp: Option<String>, #[serde(rename = "finishedTimeStamp", default, skip_serializing_if = "Option::is_none")] pub finished_time_stamp: Option<String>, #[serde(rename = "correlationId", default, skip_serializing_if = "Option::is_none")] pub correlation_id: Option<String>, #[serde(rename = "websiteSizeInBytes", default, skip_serializing_if = "Option::is_none")] pub website_size_in_bytes: Option<i64>, } pub mod properties { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum Status { InProgress, Failed, Succeeded, TimedOut, Created, Skipped, PartiallySucceeded, DeleteInProgress, DeleteFailed, Deleted, } } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct BackupItemCollection { pub value: Vec<BackupItem>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct BackupRequest { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<backup_request::Properties>, } pub mod backup_request { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { pub name: String, #[serde(default, skip_serializing_if = "Option::is_none")] pub enabled: Option<bool>, #[serde(rename = "storageAccountUrl")] pub storage_account_url: String, #[serde(rename = "backupSchedule", default, skip_serializing_if = "Option::is_none")] pub backup_schedule: Option<BackupSchedule>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub databases: Vec<DatabaseBackupSetting>, #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<properties::Type>, } pub mod properties { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum Type { Default, Clone, Relocation, Snapshot, } } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct BackupSchedule { #[serde(rename = "frequencyInterval")] pub frequency_interval: i32, #[serde(rename = "frequencyUnit")] pub frequency_unit: backup_schedule::FrequencyUnit, #[serde(rename = "keepAtLeastOneBackup")] pub keep_at_least_one_backup: bool, #[serde(rename = "retentionPeriodInDays")] pub retention_period_in_days: i32, #[serde(rename = "startTime", default, skip_serializing_if = "Option::is_none")] pub start_time: Option<String>, #[serde(rename = "lastExecutionTime", default, skip_serializing_if = "Option::is_none")] pub last_execution_time: Option<String>, } pub mod backup_schedule { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum FrequencyUnit { Day, Hour, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ConnStringValueTypePair { pub value: String, #[serde(rename = "type")] pub type_: conn_string_value_type_pair::Type, } pub mod conn_string_value_type_pair { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum Type { MySql, #[serde(rename = "SQLServer")] SqlServer, #[serde(rename = "SQLAzure")] SqlAzure, Custom, NotificationHub, ServiceBus, EventHub, ApiHub, DocDb, RedisCache, #[serde(rename = "PostgreSQL")] PostgreSql, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ConnectionStringDictionary { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<serde_json::Value>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ContinuousWebJob { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<continuous_web_job::Properties>, } pub mod continuous_web_job { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(default, skip_serializing_if = "Option::is_none")] pub status: Option<properties::Status>, #[serde(rename = "detailedStatus", default, skip_serializing_if = "Option::is_none")] pub detailed_status: Option<String>, #[serde(rename = "logUrl", default, skip_serializing_if = "Option::is_none")] pub log_url: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "runCommand", default, skip_serializing_if = "Option::is_none")] pub run_command: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub url: Option<String>, #[serde(rename = "extraInfoUrl", default, skip_serializing_if = "Option::is_none")] pub extra_info_url: Option<String>, #[serde(rename = "jobType", default, skip_serializing_if = "Option::is_none")] pub job_type: Option<properties::JobType>, #[serde(default, skip_serializing_if = "Option::is_none")] pub error: Option<String>, #[serde(rename = "usingSdk", default, skip_serializing_if = "Option::is_none")] pub using_sdk: Option<bool>, #[serde(default, skip_serializing_if = "Option::is_none")] pub settings: Option<serde_json::Value>, } pub mod properties { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum Status { Initializing, Starting, Running, PendingRestart, Stopped, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum JobType { Continuous, Triggered, } } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ContinuousWebJobCollection { pub value: Vec<ContinuousWebJob>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct CsmPublishingProfileOptions { #[serde(default, skip_serializing_if = "Option::is_none")] pub format: Option<csm_publishing_profile_options::Format>, } pub mod csm_publishing_profile_options { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum Format { FileZilla3, WebDeploy, Ftp, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct CsmSlotEntity { #[serde(rename = "targetSlot")] pub target_slot: String, #[serde(rename = "preserveVnet")] pub preserve_vnet: bool, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct CustomHostnameAnalysisResult { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<custom_hostname_analysis_result::Properties>, } pub mod custom_hostname_analysis_result { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(rename = "isHostnameAlreadyVerified", default, skip_serializing_if = "Option::is_none")] pub is_hostname_already_verified: Option<bool>, #[serde(rename = "customDomainVerificationTest", default, skip_serializing_if = "Option::is_none")] pub custom_domain_verification_test: Option<properties::CustomDomainVerificationTest>, #[serde(rename = "customDomainVerificationFailureInfo", default, skip_serializing_if = "Option::is_none")] pub custom_domain_verification_failure_info: Option<ErrorEntity>, #[serde(rename = "hasConflictOnScaleUnit", default, skip_serializing_if = "Option::is_none")] pub has_conflict_on_scale_unit: Option<bool>, #[serde(rename = "hasConflictAcrossSubscription", default, skip_serializing_if = "Option::is_none")] pub has_conflict_across_subscription: Option<bool>, #[serde(rename = "conflictingAppResourceId", default, skip_serializing_if = "Option::is_none")] pub conflicting_app_resource_id: Option<String>, #[serde(rename = "cNameRecords", default, skip_serializing_if = "Vec::is_empty")] pub c_name_records: Vec<String>, #[serde(rename = "txtRecords", default, skip_serializing_if = "Vec::is_empty")] pub txt_records: Vec<String>, #[serde(rename = "aRecords", default, skip_serializing_if = "Vec::is_empty")] pub a_records: Vec<String>, #[serde(rename = "alternateCNameRecords", default, skip_serializing_if = "Vec::is_empty")] pub alternate_c_name_records: Vec<String>, #[serde(rename = "alternateTxtRecords", default, skip_serializing_if = "Vec::is_empty")] pub alternate_txt_records: Vec<String>, } pub mod properties { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum CustomDomainVerificationTest { Passed, Failed, Skipped, } } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct DatabaseBackupSetting { #[serde(rename = "databaseType")] pub database_type: database_backup_setting::DatabaseType, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "connectionStringName", default, skip_serializing_if = "Option::is_none")] pub connection_string_name: Option<String>, #[serde(rename = "connectionString", default, skip_serializing_if = "Option::is_none")] pub connection_string: Option<String>, } pub mod database_backup_setting { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum DatabaseType { SqlAzure, MySql, LocalMySql, PostgreSql, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Deployment { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<deployment::Properties>, } pub mod deployment { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub status: Option<i32>, #[serde(default, skip_serializing_if = "Option::is_none")] pub message: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub author: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub deployer: Option<String>, #[serde(rename = "authorEmail", default, skip_serializing_if = "Option::is_none")] pub author_email: Option<String>, #[serde(rename = "startTime", default, skip_serializing_if = "Option::is_none")] pub start_time: Option<String>, #[serde(rename = "endTime", default, skip_serializing_if = "Option::is_none")] pub end_time: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub active: Option<bool>, #[serde(default, skip_serializing_if = "Option::is_none")] pub details: Option<String>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct DeploymentCollection { pub value: Vec<Deployment>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct EnabledConfig { #[serde(default, skip_serializing_if = "Option::is_none")] pub enabled: Option<bool>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct FileSystemApplicationLogsConfig { #[serde(default, skip_serializing_if = "Option::is_none")] pub level: Option<file_system_application_logs_config::Level>, } pub mod file_system_application_logs_config { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum Level { Off, Verbose, Information, Warning, Error, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct FileSystemHttpLogsConfig { #[serde(rename = "retentionInMb", default, skip_serializing_if = "Option::is_none")] pub retention_in_mb: Option<i32>, #[serde(rename = "retentionInDays", default, skip_serializing_if = "Option::is_none")] pub retention_in_days: Option<i32>, #[serde(default, skip_serializing_if = "Option::is_none")] pub enabled: Option<bool>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct FunctionEnvelope { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<function_envelope::Properties>, } pub mod function_envelope { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "functionAppId", default, skip_serializing_if = "Option::is_none")] pub function_app_id: Option<String>, #[serde(rename = "scriptRootPathHref", default, skip_serializing_if = "Option::is_none")] pub script_root_path_href: Option<String>, #[serde(rename = "scriptHref", default, skip_serializing_if = "Option::is_none")] pub script_href: Option<String>, #[serde(rename = "configHref", default, skip_serializing_if = "Option::is_none")] pub config_href: Option<String>, #[serde(rename = "secretsFileHref", default, skip_serializing_if = "Option::is_none")] pub secrets_file_href: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub href: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub config: Option<serde_json::Value>, #[serde(default, skip_serializing_if = "Option::is_none")] pub files: Option<serde_json::Value>, #[serde(rename = "testData", default, skip_serializing_if = "Option::is_none")] pub test_data: Option<String>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct FunctionEnvelopeCollection { pub value: Vec<FunctionEnvelope>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct FunctionSecrets { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<function_secrets::Properties>, } pub mod function_secrets { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(default, skip_serializing_if = "Option::is_none")] pub key: Option<String>, #[serde(rename = "triggerUrl", default, skip_serializing_if = "Option::is_none")] pub trigger_url: Option<String>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct HostNameBinding { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<host_name_binding::Properties>, } pub mod host_name_binding { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(rename = "siteName", default, skip_serializing_if = "Option::is_none")] pub site_name: Option<String>, #[serde(rename = "domainId", default, skip_serializing_if = "Option::is_none")] pub domain_id: Option<String>, #[serde(rename = "azureResourceName", default, skip_serializing_if = "Option::is_none")] pub azure_resource_name: Option<String>, #[serde(rename = "azureResourceType", default, skip_serializing_if = "Option::is_none")] pub azure_resource_type: Option<properties::AzureResourceType>, #[serde(rename = "customHostNameDnsRecordType", default, skip_serializing_if = "Option::is_none")] pub custom_host_name_dns_record_type: Option<properties::CustomHostNameDnsRecordType>, #[serde(rename = "hostNameType", default, skip_serializing_if = "Option::is_none")] pub host_name_type: Option<properties::HostNameType>, #[serde(rename = "sslState", default, skip_serializing_if = "Option::is_none")] pub ssl_state: Option<properties::SslState>, #[serde(default, skip_serializing_if = "Option::is_none")] pub thumbprint: Option<String>, #[serde(rename = "virtualIP", default, skip_serializing_if = "Option::is_none")] pub virtual_ip: Option<String>, } pub mod properties { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum AzureResourceType { Website, TrafficManager, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum CustomHostNameDnsRecordType { CName, A, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum HostNameType { Verified, Managed, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum SslState { Disabled, SniEnabled, IpBasedEnabled, } } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct HostNameBindingCollection { pub value: Vec<HostNameBinding>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct HttpLogsConfig { #[serde(rename = "fileSystem", default, skip_serializing_if = "Option::is_none")] pub file_system: Option<FileSystemHttpLogsConfig>, #[serde(rename = "azureBlobStorage", default, skip_serializing_if = "Option::is_none")] pub azure_blob_storage: Option<AzureBlobStorageHttpLogsConfig>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct MsDeploy { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<MsDeployCore>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct MsDeployCore { #[serde(rename = "packageUri", default, skip_serializing_if = "Option::is_none")] pub package_uri: Option<String>, #[serde(rename = "connectionString", default, skip_serializing_if = "Option::is_none")] pub connection_string: Option<String>, #[serde(rename = "dbType", default, skip_serializing_if = "Option::is_none")] pub db_type: Option<String>, #[serde(rename = "setParametersXmlFileUri", default, skip_serializing_if = "Option::is_none")] pub set_parameters_xml_file_uri: Option<String>, #[serde(rename = "setParameters", default, skip_serializing_if = "Option::is_none")] pub set_parameters: Option<serde_json::Value>, #[serde(rename = "skipAppData", default, skip_serializing_if = "Option::is_none")] pub skip_app_data: Option<bool>, #[serde(rename = "appOffline", default, skip_serializing_if = "Option::is_none")] pub app_offline: Option<bool>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct MsDeployLog { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<ms_deploy_log::Properties>, } pub mod ms_deploy_log { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub entries: Vec<MsDeployLogEntry>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct MsDeployLogEntry { #[serde(default, skip_serializing_if = "Option::is_none")] pub time: Option<String>, #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<ms_deploy_log_entry::Type>, #[serde(default, skip_serializing_if = "Option::is_none")] pub message: Option<String>, } pub mod ms_deploy_log_entry { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum Type { Message, Warning, Error, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct MsDeployStatus { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<ms_deploy_status::Properties>, } pub mod ms_deploy_status { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(default, skip_serializing_if = "Option::is_none")] pub deployer: Option<String>, #[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")] pub provisioning_state: Option<properties::ProvisioningState>, #[serde(rename = "startTime", default, skip_serializing_if = "Option::is_none")] pub start_time: Option<String>, #[serde(rename = "endTime", default, skip_serializing_if = "Option::is_none")] pub end_time: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub complete: Option<bool>, } pub mod properties { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum ProvisioningState { #[serde(rename = "accepted")] Accepted, #[serde(rename = "running")] Running, #[serde(rename = "succeeded")] Succeeded, #[serde(rename = "failed")] Failed, #[serde(rename = "canceled")] Canceled, } } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct MigrateMySqlRequest { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<migrate_my_sql_request::Properties>, } pub mod migrate_my_sql_request { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(rename = "connectionString")] pub connection_string: String, #[serde(rename = "migrationType")] pub migration_type: properties::MigrationType, } pub mod properties { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum MigrationType { LocalToRemote, RemoteToLocal, } } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct MigrateMySqlStatus { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<migrate_my_sql_status::Properties>, } pub mod migrate_my_sql_status { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(rename = "migrationOperationStatus", default, skip_serializing_if = "Option::is_none")] pub migration_operation_status: Option<properties::MigrationOperationStatus>, #[serde(rename = "operationId", default, skip_serializing_if = "Option::is_none")] pub operation_id: Option<String>, #[serde(rename = "localMySqlEnabled", default, skip_serializing_if = "Option::is_none")] pub local_my_sql_enabled: Option<bool>, } pub mod properties { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum MigrationOperationStatus { InProgress, Failed, Succeeded, TimedOut, Created, } } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct NetworkFeatures { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<network_features::Properties>, } pub mod network_features { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(rename = "virtualNetworkName", default, skip_serializing_if = "Option::is_none")] pub virtual_network_name: Option<String>, #[serde(rename = "virtualNetworkConnection", default, skip_serializing_if = "Option::is_none")] pub virtual_network_connection: Option<VnetInfo>, #[serde(rename = "hybridConnections", default, skip_serializing_if = "Vec::is_empty")] pub hybrid_connections: Vec<RelayServiceConnectionEntity>, #[serde(rename = "hybridConnectionsV2", default, skip_serializing_if = "Vec::is_empty")] pub hybrid_connections_v2: Vec<HybridConnection>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct PerfMonCounterCollection { pub value: Vec<PerfMonResponse>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct PerfMonResponse { #[serde(default, skip_serializing_if = "Option::is_none")] pub code: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub message: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub data: Option<PerfMonSet>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct PerfMonSample { #[serde(default, skip_serializing_if = "Option::is_none")] pub time: Option<String>, #[serde(rename = "instanceName", default, skip_serializing_if = "Option::is_none")] pub instance_name: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub value: Option<f64>, #[serde(rename = "coreCount", default, skip_serializing_if = "Option::is_none")] pub core_count: Option<i32>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct PerfMonSet { #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "startTime", default, skip_serializing_if = "Option::is_none")] pub start_time: Option<String>, #[serde(rename = "endTime", default, skip_serializing_if = "Option::is_none")] pub end_time: Option<String>, #[serde(rename = "timeGrain", default, skip_serializing_if = "Option::is_none")] pub time_grain: Option<String>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub values: Vec<PerfMonSample>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct PremierAddOn { #[serde(flatten)] pub resource: Resource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<premier_add_on::Properties>, } pub mod premier_add_on { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(default, skip_serializing_if = "Option::is_none")] pub sku: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub product: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub vendor: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub location: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub tags: Option<serde_json::Value>, #[serde(rename = "marketplacePublisher", default, skip_serializing_if = "Option::is_none")] pub marketplace_publisher: Option<String>, #[serde(rename = "marketplaceOffer", default, skip_serializing_if = "Option::is_none")] pub marketplace_offer: Option<String>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ProcessInfo { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<process_info::Properties>, } pub mod process_info { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<i32>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub href: Option<String>, #[serde(rename = "miniDump", default, skip_serializing_if = "Option::is_none")] pub mini_dump: Option<String>, #[serde(rename = "isProfileRunning", default, skip_serializing_if = "Option::is_none")] pub is_profile_running: Option<bool>, #[serde(rename = "isIisProfileRunning", default, skip_serializing_if = "Option::is_none")] pub is_iis_profile_running: Option<bool>, #[serde(rename = "iisProfileTimeoutInSeconds", default, skip_serializing_if = "Option::is_none")] pub iis_profile_timeout_in_seconds: Option<f64>, #[serde(default, skip_serializing_if = "Option::is_none")] pub parent: Option<String>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub children: Vec<String>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub threads: Vec<ProcessThreadInfo>, #[serde(rename = "openFileHandles", default, skip_serializing_if = "Vec::is_empty")] pub open_file_handles: Vec<String>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub modules: Vec<ProcessModuleInfo>, #[serde(rename = "fileName", default, skip_serializing_if = "Option::is_none")] pub file_name: Option<String>, #[serde(rename = "commandLine", default, skip_serializing_if = "Option::is_none")] pub command_line: Option<String>, #[serde(rename = "userName", default, skip_serializing_if = "Option::is_none")] pub user_name: Option<String>, #[serde(rename = "handleCount", default, skip_serializing_if = "Option::is_none")] pub handle_count: Option<i32>, #[serde(rename = "moduleCount", default, skip_serializing_if = "Option::is_none")] pub module_count: Option<i32>, #[serde(rename = "threadCount", default, skip_serializing_if = "Option::is_none")] pub thread_count: Option<i32>, #[serde(rename = "startTime", default, skip_serializing_if = "Option::is_none")] pub start_time: Option<String>, #[serde(rename = "totalProcessorTime", default, skip_serializing_if = "Option::is_none")] pub total_processor_time: Option<String>, #[serde(rename = "userProcessorTime", default, skip_serializing_if = "Option::is_none")] pub user_processor_time: Option<String>, #[serde(rename = "privilegedProcessorTime", default, skip_serializing_if = "Option::is_none")] pub privileged_processor_time: Option<String>, #[serde(rename = "workingSet64", default, skip_serializing_if = "Option::is_none")] pub working_set64: Option<i64>, #[serde(rename = "peakWorkingSet64", default, skip_serializing_if = "Option::is_none")] pub peak_working_set64: Option<i64>, #[serde(rename = "privateMemorySize64", default, skip_serializing_if = "Option::is_none")] pub private_memory_size64: Option<i64>, #[serde(rename = "virtualMemorySize64", default, skip_serializing_if = "Option::is_none")] pub virtual_memory_size64: Option<i64>, #[serde(rename = "peakVirtualMemorySize64", default, skip_serializing_if = "Option::is_none")] pub peak_virtual_memory_size64: Option<i64>, #[serde(rename = "pagedSystemMemorySize64", default, skip_serializing_if = "Option::is_none")] pub paged_system_memory_size64: Option<i64>, #[serde(rename = "nonpagedSystemMemorySize64", default, skip_serializing_if = "Option::is_none")] pub nonpaged_system_memory_size64: Option<i64>, #[serde(rename = "pagedMemorySize64", default, skip_serializing_if = "Option::is_none")] pub paged_memory_size64: Option<i64>, #[serde(rename = "peakPagedMemorySize64", default, skip_serializing_if = "Option::is_none")] pub peak_paged_memory_size64: Option<i64>, #[serde(rename = "timeStamp", default, skip_serializing_if = "Option::is_none")] pub time_stamp: Option<String>, #[serde(rename = "environmentVariables", default, skip_serializing_if = "Option::is_none")] pub environment_variables: Option<serde_json::Value>, #[serde(rename = "isScmSite", default, skip_serializing_if = "Option::is_none")] pub is_scm_site: Option<bool>, #[serde(rename = "isWebJob", default, skip_serializing_if = "Option::is_none")] pub is_web_job: Option<bool>, #[serde(default, skip_serializing_if = "Option::is_none")] pub description: Option<String>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ProcessInfoCollection { pub value: Vec<ProcessInfo>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ProcessModuleInfo { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<process_module_info::Properties>, } pub mod process_module_info { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(rename = "baseAddress", default, skip_serializing_if = "Option::is_none")] pub base_address: Option<String>, #[serde(rename = "fileName", default, skip_serializing_if = "Option::is_none")] pub file_name: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub href: Option<String>, #[serde(rename = "filePath", default, skip_serializing_if = "Option::is_none")] pub file_path: Option<String>, #[serde(rename = "moduleMemorySize", default, skip_serializing_if = "Option::is_none")] pub module_memory_size: Option<i32>, #[serde(rename = "fileVersion", default, skip_serializing_if = "Option::is_none")] pub file_version: Option<String>, #[serde(rename = "fileDescription", default, skip_serializing_if = "Option::is_none")] pub file_description: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub product: Option<String>, #[serde(rename = "productVersion", default, skip_serializing_if = "Option::is_none")] pub product_version: Option<String>, #[serde(rename = "isDebug", default, skip_serializing_if = "Option::is_none")] pub is_debug: Option<bool>, #[serde(default, skip_serializing_if = "Option::is_none")] pub language: Option<String>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ProcessModuleInfoCollection { pub value: Vec<ProcessModuleInfo>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ProcessThreadInfo { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<process_thread_info::Properties>, } pub mod process_thread_info { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<i32>, #[serde(default, skip_serializing_if = "Option::is_none")] pub href: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub process: Option<String>, #[serde(rename = "startAddress", default, skip_serializing_if = "Option::is_none")] pub start_address: Option<String>, #[serde(rename = "currentPriority", default, skip_serializing_if = "Option::is_none")] pub current_priority: Option<i32>, #[serde(rename = "priorityLevel", default, skip_serializing_if = "Option::is_none")] pub priority_level: Option<String>, #[serde(rename = "basePriority", default, skip_serializing_if = "Option::is_none")] pub base_priority: Option<i32>, #[serde(rename = "startTime", default, skip_serializing_if = "Option::is_none")] pub start_time: Option<String>, #[serde(rename = "totalProcessorTime", default, skip_serializing_if = "Option::is_none")] pub total_processor_time: Option<String>, #[serde(rename = "userProcessorTime", default, skip_serializing_if = "Option::is_none")] pub user_processor_time: Option<String>, #[serde(rename = "priviledgedProcessorTime", default, skip_serializing_if = "Option::is_none")] pub priviledged_processor_time: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub state: Option<String>, #[serde(rename = "waitReason", default, skip_serializing_if = "Option::is_none")] pub wait_reason: Option<String>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ProcessThreadInfoCollection { pub value: Vec<ProcessThreadInfo>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct PublicCertificate { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<public_certificate::Properties>, } pub mod public_certificate { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(default, skip_serializing_if = "Option::is_none")] pub blob: Option<String>, #[serde(rename = "publicCertificateLocation", default, skip_serializing_if = "Option::is_none")] pub public_certificate_location: Option<properties::PublicCertificateLocation>, #[serde(default, skip_serializing_if = "Option::is_none")] pub thumbprint: Option<String>, } pub mod properties { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum PublicCertificateLocation { CurrentUserMy, LocalMachineMy, Unknown, } } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct PublicCertificateCollection { pub value: Vec<PublicCertificate>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct RelayServiceConnectionEntity { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<relay_service_connection_entity::Properties>, } pub mod relay_service_connection_entity { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(rename = "entityName", default, skip_serializing_if = "Option::is_none")] pub entity_name: Option<String>, #[serde(rename = "entityConnectionString", default, skip_serializing_if = "Option::is_none")] pub entity_connection_string: Option<String>, #[serde(rename = "resourceType", default, skip_serializing_if = "Option::is_none")] pub resource_type: Option<String>, #[serde(rename = "resourceConnectionString", default, skip_serializing_if = "Option::is_none")] pub resource_connection_string: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub hostname: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub port: Option<i32>, #[serde(rename = "biztalkUri", default, skip_serializing_if = "Option::is_none")] pub biztalk_uri: Option<String>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct RestoreRequest { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<restore_request::Properties>, } pub mod restore_request { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(rename = "storageAccountUrl")] pub storage_account_url: String, #[serde(rename = "blobName", default, skip_serializing_if = "Option::is_none")] pub blob_name: Option<String>, pub overwrite: bool, #[serde(rename = "siteName", default, skip_serializing_if = "Option::is_none")] pub site_name: Option<String>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub databases: Vec<DatabaseBackupSetting>, #[serde(rename = "ignoreConflictingHostNames", default, skip_serializing_if = "Option::is_none")] pub ignore_conflicting_host_names: Option<bool>, #[serde(rename = "ignoreDatabases", default, skip_serializing_if = "Option::is_none")] pub ignore_databases: Option<bool>, #[serde(rename = "appServicePlan", default, skip_serializing_if = "Option::is_none")] pub app_service_plan: Option<String>, #[serde(rename = "operationType", default, skip_serializing_if = "Option::is_none")] pub operation_type: Option<properties::OperationType>, #[serde(rename = "adjustConnectionStrings", default, skip_serializing_if = "Option::is_none")] pub adjust_connection_strings: Option<bool>, #[serde(rename = "hostingEnvironment", default, skip_serializing_if = "Option::is_none")] pub hosting_environment: Option<String>, } pub mod properties { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum OperationType { Default, Clone, Relocation, Snapshot, } } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct RestoreResponse { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<restore_response::Properties>, } pub mod restore_response { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(rename = "operationId", default, skip_serializing_if = "Option::is_none")] pub operation_id: Option<String>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SiteAuthSettings { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<site_auth_settings::Properties>, } pub mod site_auth_settings { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(default, skip_serializing_if = "Option::is_none")] pub enabled: Option<bool>, #[serde(rename = "runtimeVersion", default, skip_serializing_if = "Option::is_none")] pub runtime_version: Option<String>, #[serde(rename = "unauthenticatedClientAction", default, skip_serializing_if = "Option::is_none")] pub unauthenticated_client_action: Option<properties::UnauthenticatedClientAction>, #[serde(rename = "tokenStoreEnabled", default, skip_serializing_if = "Option::is_none")] pub token_store_enabled: Option<bool>, #[serde(rename = "allowedExternalRedirectUrls", default, skip_serializing_if = "Vec::is_empty")] pub allowed_external_redirect_urls: Vec<String>, #[serde(rename = "defaultProvider", default, skip_serializing_if = "Option::is_none")] pub default_provider: Option<properties::DefaultProvider>, #[serde(rename = "tokenRefreshExtensionHours", default, skip_serializing_if = "Option::is_none")] pub token_refresh_extension_hours: Option<f64>, #[serde(rename = "clientId", default, skip_serializing_if = "Option::is_none")] pub client_id: Option<String>, #[serde(rename = "clientSecret", default, skip_serializing_if = "Option::is_none")] pub client_secret: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub issuer: Option<String>, #[serde(rename = "allowedAudiences", default, skip_serializing_if = "Vec::is_empty")] pub allowed_audiences: Vec<String>, #[serde(rename = "additionalLoginParams", default, skip_serializing_if = "Vec::is_empty")] pub additional_login_params: Vec<String>, #[serde(rename = "googleClientId", default, skip_serializing_if = "Option::is_none")] pub google_client_id: Option<String>, #[serde(rename = "googleClientSecret", default, skip_serializing_if = "Option::is_none")] pub google_client_secret: Option<String>, #[serde(rename = "googleOAuthScopes", default, skip_serializing_if = "Vec::is_empty")] pub google_o_auth_scopes: Vec<String>, #[serde(rename = "facebookAppId", default, skip_serializing_if = "Option::is_none")] pub facebook_app_id: Option<String>, #[serde(rename = "facebookAppSecret", default, skip_serializing_if = "Option::is_none")] pub facebook_app_secret: Option<String>, #[serde(rename = "facebookOAuthScopes", default, skip_serializing_if = "Vec::is_empty")] pub facebook_o_auth_scopes: Vec<String>, #[serde(rename = "twitterConsumerKey", default, skip_serializing_if = "Option::is_none")] pub twitter_consumer_key: Option<String>, #[serde(rename = "twitterConsumerSecret", default, skip_serializing_if = "Option::is_none")] pub twitter_consumer_secret: Option<String>, #[serde(rename = "microsoftAccountClientId", default, skip_serializing_if = "Option::is_none")] pub microsoft_account_client_id: Option<String>, #[serde(rename = "microsoftAccountClientSecret", default, skip_serializing_if = "Option::is_none")] pub microsoft_account_client_secret: Option<String>, #[serde(rename = "microsoftAccountOAuthScopes", default, skip_serializing_if = "Vec::is_empty")] pub microsoft_account_o_auth_scopes: Vec<String>, } pub mod properties { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum UnauthenticatedClientAction { RedirectToLoginPage, AllowAnonymous, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum DefaultProvider { AzureActiveDirectory, Facebook, Google, MicrosoftAccount, Twitter, } } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SiteCloneability { #[serde(default, skip_serializing_if = "Option::is_none")] pub result: Option<site_cloneability::Result>, #[serde(rename = "blockingFeatures", default, skip_serializing_if = "Vec::is_empty")] pub blocking_features: Vec<SiteCloneabilityCriterion>, #[serde(rename = "unsupportedFeatures", default, skip_serializing_if = "Vec::is_empty")] pub unsupported_features: Vec<SiteCloneabilityCriterion>, #[serde(rename = "blockingCharacteristics", default, skip_serializing_if = "Vec::is_empty")] pub blocking_characteristics: Vec<SiteCloneabilityCriterion>, } pub mod site_cloneability { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum Result { Cloneable, PartiallyCloneable, NotCloneable, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SiteCloneabilityCriterion { #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub description: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SiteConfigResource { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<SiteConfig>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SiteConfigResourceCollection { pub value: Vec<SiteConfigResource>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SiteConfigurationSnapshotInfo { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<site_configuration_snapshot_info::Properties>, } pub mod site_configuration_snapshot_info { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(default, skip_serializing_if = "Option::is_none")] pub time: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<i32>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SiteConfigurationSnapshotInfoCollection { pub value: Vec<SiteConfigurationSnapshotInfo>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SiteExtensionInfo { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<site_extension_info::Properties>, } pub mod site_extension_info { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub title: Option<String>, #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<properties::Type>, #[serde(default, skip_serializing_if = "Option::is_none")] pub summary: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub description: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub version: Option<String>, #[serde(rename = "extensionUrl", default, skip_serializing_if = "Option::is_none")] pub extension_url: Option<String>, #[serde(rename = "projectUrl", default, skip_serializing_if = "Option::is_none")] pub project_url: Option<String>, #[serde(rename = "iconUrl", default, skip_serializing_if = "Option::is_none")] pub icon_url: Option<String>, #[serde(rename = "licenseUrl", default, skip_serializing_if = "Option::is_none")] pub license_url: Option<String>, #[serde(rename = "feedUrl", default, skip_serializing_if = "Option::is_none")] pub feed_url: Option<String>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub authors: Vec<String>, #[serde(rename = "installationArgs", default, skip_serializing_if = "Option::is_none")] pub installation_args: Option<String>, #[serde(rename = "publishedDateTime", default, skip_serializing_if = "Option::is_none")] pub published_date_time: Option<String>, #[serde(rename = "downloadCount", default, skip_serializing_if = "Option::is_none")] pub download_count: Option<i32>, #[serde(rename = "localIsLatestVersion", default, skip_serializing_if = "Option::is_none")] pub local_is_latest_version: Option<bool>, #[serde(rename = "localPath", default, skip_serializing_if = "Option::is_none")] pub local_path: Option<String>, #[serde(rename = "installedDateTime", default, skip_serializing_if = "Option::is_none")] pub installed_date_time: Option<String>, #[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")] pub provisioning_state: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub comment: Option<String>, } pub mod properties { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum Type { Gallery, WebRoot, } } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SiteExtensionInfoCollection { pub value: Vec<SiteExtensionInfo>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SiteInstance { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<site_instance::Properties>, } pub mod site_instance { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SiteLogsConfig { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<site_logs_config::Properties>, } pub mod site_logs_config { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(rename = "applicationLogs", default, skip_serializing_if = "Option::is_none")] pub application_logs: Option<ApplicationLogsConfig>, #[serde(rename = "httpLogs", default, skip_serializing_if = "Option::is_none")] pub http_logs: Option<HttpLogsConfig>, #[serde(rename = "failedRequestsTracing", default, skip_serializing_if = "Option::is_none")] pub failed_requests_tracing: Option<EnabledConfig>, #[serde(rename = "detailedErrorMessages", default, skip_serializing_if = "Option::is_none")] pub detailed_error_messages: Option<EnabledConfig>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SitePatchResource { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<site_patch_resource::Properties>, } pub mod site_patch_resource { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(default, skip_serializing_if = "Option::is_none")] pub state: Option<String>, #[serde(rename = "hostNames", default, skip_serializing_if = "Vec::is_empty")] pub host_names: Vec<String>, #[serde(rename = "repositorySiteName", default, skip_serializing_if = "Option::is_none")] pub repository_site_name: Option<String>, #[serde(rename = "usageState", default, skip_serializing_if = "Option::is_none")] pub usage_state: Option<properties::UsageState>, #[serde(default, skip_serializing_if = "Option::is_none")] pub enabled: Option<bool>, #[serde(rename = "enabledHostNames", default, skip_serializing_if = "Vec::is_empty")] pub enabled_host_names: Vec<String>, #[serde(rename = "availabilityState", default, skip_serializing_if = "Option::is_none")] pub availability_state: Option<properties::AvailabilityState>, #[serde(rename = "hostNameSslStates", default, skip_serializing_if = "Vec::is_empty")] pub host_name_ssl_states: Vec<HostNameSslState>, #[serde(rename = "serverFarmId", default, skip_serializing_if = "Option::is_none")] pub server_farm_id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub reserved: Option<bool>, #[serde(rename = "lastModifiedTimeUtc", default, skip_serializing_if = "Option::is_none")] pub last_modified_time_utc: Option<String>, #[serde(rename = "siteConfig", default, skip_serializing_if = "Option::is_none")] pub site_config: Option<SiteConfig>, #[serde(rename = "trafficManagerHostNames", default, skip_serializing_if = "Vec::is_empty")] pub traffic_manager_host_names: Vec<String>, #[serde(rename = "scmSiteAlsoStopped", default, skip_serializing_if = "Option::is_none")] pub scm_site_also_stopped: Option<bool>, #[serde(rename = "targetSwapSlot", default, skip_serializing_if = "Option::is_none")] pub target_swap_slot: Option<String>, #[serde(rename = "hostingEnvironmentProfile", default, skip_serializing_if = "Option::is_none")] pub hosting_environment_profile: Option<HostingEnvironmentProfile>, #[serde(rename = "clientAffinityEnabled", default, skip_serializing_if = "Option::is_none")] pub client_affinity_enabled: Option<bool>, #[serde(rename = "clientCertEnabled", default, skip_serializing_if = "Option::is_none")] pub client_cert_enabled: Option<bool>, #[serde(rename = "hostNamesDisabled", default, skip_serializing_if = "Option::is_none")] pub host_names_disabled: Option<bool>, #[serde(rename = "outboundIpAddresses", default, skip_serializing_if = "Option::is_none")] pub outbound_ip_addresses: Option<String>, #[serde(rename = "possibleOutboundIpAddresses", default, skip_serializing_if = "Option::is_none")] pub possible_outbound_ip_addresses: Option<String>, #[serde(rename = "containerSize", default, skip_serializing_if = "Option::is_none")] pub container_size: Option<i32>, #[serde(rename = "dailyMemoryTimeQuota", default, skip_serializing_if = "Option::is_none")] pub daily_memory_time_quota: Option<i32>, #[serde(rename = "suspendedTill", default, skip_serializing_if = "Option::is_none")] pub suspended_till: Option<String>, #[serde(rename = "maxNumberOfWorkers", default, skip_serializing_if = "Option::is_none")] pub max_number_of_workers: Option<i32>, #[serde(rename = "cloningInfo", default, skip_serializing_if = "Option::is_none")] pub cloning_info: Option<CloningInfo>, #[serde(rename = "snapshotInfo", default, skip_serializing_if = "Option::is_none")] pub snapshot_info: Option<SnapshotRecoveryRequest>, #[serde(rename = "resourceGroup", default, skip_serializing_if = "Option::is_none")] pub resource_group: Option<String>, #[serde(rename = "isDefaultContainer", default, skip_serializing_if = "Option::is_none")] pub is_default_container: Option<bool>, #[serde(rename = "defaultHostName", default, skip_serializing_if = "Option::is_none")] pub default_host_name: Option<String>, #[serde(rename = "slotSwapStatus", default, skip_serializing_if = "Option::is_none")] pub slot_swap_status: Option<SlotSwapStatus>, #[serde(rename = "httpsOnly", default, skip_serializing_if = "Option::is_none")] pub https_only: Option<bool>, } pub mod properties { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum UsageState { Normal, Exceeded, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum AvailabilityState { Normal, Limited, DisasterRecoveryMode, } } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SitePhpErrorLogFlag { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<site_php_error_log_flag::Properties>, } pub mod site_php_error_log_flag { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(rename = "localLogErrors", default, skip_serializing_if = "Option::is_none")] pub local_log_errors: Option<String>, #[serde(rename = "masterLogErrors", default, skip_serializing_if = "Option::is_none")] pub master_log_errors: Option<String>, #[serde(rename = "localLogErrorsMaxLength", default, skip_serializing_if = "Option::is_none")] pub local_log_errors_max_length: Option<String>, #[serde(rename = "masterLogErrorsMaxLength", default, skip_serializing_if = "Option::is_none")] pub master_log_errors_max_length: Option<String>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SiteSourceControl { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<site_source_control::Properties>, } pub mod site_source_control { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(rename = "repoUrl", default, skip_serializing_if = "Option::is_none")] pub repo_url: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub branch: Option<String>, #[serde(rename = "isManualIntegration", default, skip_serializing_if = "Option::is_none")] pub is_manual_integration: Option<bool>, #[serde(rename = "deploymentRollbackEnabled", default, skip_serializing_if = "Option::is_none")] pub deployment_rollback_enabled: Option<bool>, #[serde(rename = "isMercurial", default, skip_serializing_if = "Option::is_none")] pub is_mercurial: Option<bool>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SlotConfigNames { #[serde(rename = "connectionStringNames", default, skip_serializing_if = "Vec::is_empty")] pub connection_string_names: Vec<String>, #[serde(rename = "appSettingNames", default, skip_serializing_if = "Vec::is_empty")] pub app_setting_names: Vec<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SlotConfigNamesResource { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<SlotConfigNames>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SlotDifference { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<slot_difference::Properties>, } pub mod slot_difference { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, #[serde(rename = "settingType", default, skip_serializing_if = "Option::is_none")] pub setting_type: Option<String>, #[serde(rename = "diffRule", default, skip_serializing_if = "Option::is_none")] pub diff_rule: Option<String>, #[serde(rename = "settingName", default, skip_serializing_if = "Option::is_none")] pub setting_name: Option<String>, #[serde(rename = "valueInCurrentSlot", default, skip_serializing_if = "Option::is_none")] pub value_in_current_slot: Option<String>, #[serde(rename = "valueInTargetSlot", default, skip_serializing_if = "Option::is_none")] pub value_in_target_slot: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub description: Option<String>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SlotDifferenceCollection { pub value: Vec<SlotDifference>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Snapshot { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<snapshot::Properties>, } pub mod snapshot { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(default, skip_serializing_if = "Option::is_none")] pub time: Option<String>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SnapshotCollection { pub value: Vec<Snapshot>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct StorageMigrationOptions { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<storage_migration_options::Properties>, } pub mod storage_migration_options { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(rename = "azurefilesConnectionString")] pub azurefiles_connection_string: String, #[serde(rename = "azurefilesShare")] pub azurefiles_share: String, #[serde(rename = "switchSiteAfterMigration", default, skip_serializing_if = "Option::is_none")] pub switch_site_after_migration: Option<bool>, #[serde(rename = "blockWriteAccessToSite", default, skip_serializing_if = "Option::is_none")] pub block_write_access_to_site: Option<bool>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct StorageMigrationResponse { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<storage_migration_response::Properties>, } pub mod storage_migration_response { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(rename = "operationId", default, skip_serializing_if = "Option::is_none")] pub operation_id: Option<String>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct StringDictionary { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<serde_json::Value>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct TriggeredJobHistory { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<triggered_job_history::Properties>, } pub mod triggered_job_history { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(rename = "triggeredJobRuns", default, skip_serializing_if = "Vec::is_empty")] pub triggered_job_runs: Vec<TriggeredJobRun>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct TriggeredJobHistoryCollection { pub value: Vec<TriggeredJobHistory>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct TriggeredJobRun { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<triggered_job_run::Properties>, } pub mod triggered_job_run { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub status: Option<properties::Status>, #[serde(rename = "startTime", default, skip_serializing_if = "Option::is_none")] pub start_time: Option<String>, #[serde(rename = "endTime", default, skip_serializing_if = "Option::is_none")] pub end_time: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub duration: Option<String>, #[serde(rename = "outputUrl", default, skip_serializing_if = "Option::is_none")] pub output_url: Option<String>, #[serde(rename = "errorUrl", default, skip_serializing_if = "Option::is_none")] pub error_url: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub url: Option<String>, #[serde(rename = "jobName", default, skip_serializing_if = "Option::is_none")] pub job_name: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub trigger: Option<String>, } pub mod properties { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum Status { Success, Failed, Error, } } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct TriggeredWebJob { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<triggered_web_job::Properties>, } pub mod triggered_web_job { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(rename = "latestRun", default, skip_serializing_if = "Option::is_none")] pub latest_run: Option<TriggeredJobRun>, #[serde(rename = "historyUrl", default, skip_serializing_if = "Option::is_none")] pub history_url: Option<String>, #[serde(rename = "schedulerLogsUrl", default, skip_serializing_if = "Option::is_none")] pub scheduler_logs_url: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "runCommand", default, skip_serializing_if = "Option::is_none")] pub run_command: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub url: Option<String>, #[serde(rename = "extraInfoUrl", default, skip_serializing_if = "Option::is_none")] pub extra_info_url: Option<String>, #[serde(rename = "jobType", default, skip_serializing_if = "Option::is_none")] pub job_type: Option<properties::JobType>, #[serde(default, skip_serializing_if = "Option::is_none")] pub error: Option<String>, #[serde(rename = "usingSdk", default, skip_serializing_if = "Option::is_none")] pub using_sdk: Option<bool>, #[serde(default, skip_serializing_if = "Option::is_none")] pub settings: Option<serde_json::Value>, } pub mod properties { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum JobType { Continuous, Triggered, } } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct TriggeredWebJobCollection { pub value: Vec<TriggeredWebJob>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WebAppInstanceCollection { pub value: Vec<SiteInstance>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WebJob { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<web_job::Properties>, } pub mod web_job { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "runCommand", default, skip_serializing_if = "Option::is_none")] pub run_command: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub url: Option<String>, #[serde(rename = "extraInfoUrl", default, skip_serializing_if = "Option::is_none")] pub extra_info_url: Option<String>, #[serde(rename = "jobType", default, skip_serializing_if = "Option::is_none")] pub job_type: Option<properties::JobType>, #[serde(default, skip_serializing_if = "Option::is_none")] pub error: Option<String>, #[serde(rename = "usingSdk", default, skip_serializing_if = "Option::is_none")] pub using_sdk: Option<bool>, #[serde(default, skip_serializing_if = "Option::is_none")] pub settings: Option<serde_json::Value>, } pub mod properties { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum JobType { Continuous, Triggered, } } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WebJobCollection { pub value: Vec<WebJob>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WebAppCollection { pub value: Vec<Site>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Site { #[serde(flatten)] pub resource: Resource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<site::Properties>, #[serde(default, skip_serializing_if = "Option::is_none")] pub identity: Option<ManagedServiceIdentity>, } pub mod site { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(default, skip_serializing_if = "Option::is_none")] pub state: Option<String>, #[serde(rename = "hostNames", default, skip_serializing_if = "Vec::is_empty")] pub host_names: Vec<String>, #[serde(rename = "repositorySiteName", default, skip_serializing_if = "Option::is_none")] pub repository_site_name: Option<String>, #[serde(rename = "usageState", default, skip_serializing_if = "Option::is_none")] pub usage_state: Option<properties::UsageState>, #[serde(default, skip_serializing_if = "Option::is_none")] pub enabled: Option<bool>, #[serde(rename = "enabledHostNames", default, skip_serializing_if = "Vec::is_empty")] pub enabled_host_names: Vec<String>, #[serde(rename = "availabilityState", default, skip_serializing_if = "Option::is_none")] pub availability_state: Option<properties::AvailabilityState>, #[serde(rename = "hostNameSslStates", default, skip_serializing_if = "Vec::is_empty")] pub host_name_ssl_states: Vec<HostNameSslState>, #[serde(rename = "serverFarmId", default, skip_serializing_if = "Option::is_none")] pub server_farm_id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub reserved: Option<bool>, #[serde(rename = "lastModifiedTimeUtc", default, skip_serializing_if = "Option::is_none")] pub last_modified_time_utc: Option<String>, #[serde(rename = "siteConfig", default, skip_serializing_if = "Option::is_none")] pub site_config: Option<SiteConfig>, #[serde(rename = "trafficManagerHostNames", default, skip_serializing_if = "Vec::is_empty")] pub traffic_manager_host_names: Vec<String>, #[serde(rename = "scmSiteAlsoStopped", default, skip_serializing_if = "Option::is_none")] pub scm_site_also_stopped: Option<bool>, #[serde(rename = "targetSwapSlot", default, skip_serializing_if = "Option::is_none")] pub target_swap_slot: Option<String>, #[serde(rename = "hostingEnvironmentProfile", default, skip_serializing_if = "Option::is_none")] pub hosting_environment_profile: Option<HostingEnvironmentProfile>, #[serde(rename = "clientAffinityEnabled", default, skip_serializing_if = "Option::is_none")] pub client_affinity_enabled: Option<bool>, #[serde(rename = "clientCertEnabled", default, skip_serializing_if = "Option::is_none")] pub client_cert_enabled: Option<bool>, #[serde(rename = "hostNamesDisabled", default, skip_serializing_if = "Option::is_none")] pub host_names_disabled: Option<bool>, #[serde(rename = "outboundIpAddresses", default, skip_serializing_if = "Option::is_none")] pub outbound_ip_addresses: Option<String>, #[serde(rename = "possibleOutboundIpAddresses", default, skip_serializing_if = "Option::is_none")] pub possible_outbound_ip_addresses: Option<String>, #[serde(rename = "containerSize", default, skip_serializing_if = "Option::is_none")] pub container_size: Option<i32>, #[serde(rename = "dailyMemoryTimeQuota", default, skip_serializing_if = "Option::is_none")] pub daily_memory_time_quota: Option<i32>, #[serde(rename = "suspendedTill", default, skip_serializing_if = "Option::is_none")] pub suspended_till: Option<String>, #[serde(rename = "maxNumberOfWorkers", default, skip_serializing_if = "Option::is_none")] pub max_number_of_workers: Option<i32>, #[serde(rename = "cloningInfo", default, skip_serializing_if = "Option::is_none")] pub cloning_info: Option<CloningInfo>, #[serde(rename = "snapshotInfo", default, skip_serializing_if = "Option::is_none")] pub snapshot_info: Option<SnapshotRecoveryRequest>, #[serde(rename = "resourceGroup", default, skip_serializing_if = "Option::is_none")] pub resource_group: Option<String>, #[serde(rename = "isDefaultContainer", default, skip_serializing_if = "Option::is_none")] pub is_default_container: Option<bool>, #[serde(rename = "defaultHostName", default, skip_serializing_if = "Option::is_none")] pub default_host_name: Option<String>, #[serde(rename = "slotSwapStatus", default, skip_serializing_if = "Option::is_none")] pub slot_swap_status: Option<SlotSwapStatus>, #[serde(rename = "httpsOnly", default, skip_serializing_if = "Option::is_none")] pub https_only: Option<bool>, } pub mod properties { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum UsageState { Normal, Exceeded, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum AvailabilityState { Normal, Limited, DisasterRecoveryMode, } } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct HostNameSslState { #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "sslState", default, skip_serializing_if = "Option::is_none")] pub ssl_state: Option<host_name_ssl_state::SslState>, #[serde(rename = "virtualIP", default, skip_serializing_if = "Option::is_none")] pub virtual_ip: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub thumbprint: Option<String>, #[serde(rename = "toUpdate", default, skip_serializing_if = "Option::is_none")] pub to_update: Option<bool>, #[serde(rename = "hostType", default, skip_serializing_if = "Option::is_none")] pub host_type: Option<host_name_ssl_state::HostType>, } pub mod host_name_ssl_state { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum SslState { Disabled, SniEnabled, IpBasedEnabled, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum HostType { Standard, Repository, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SiteConfig { #[serde(rename = "numberOfWorkers", default, skip_serializing_if = "Option::is_none")] pub number_of_workers: Option<i32>, #[serde(rename = "defaultDocuments", default, skip_serializing_if = "Vec::is_empty")] pub default_documents: Vec<String>, #[serde(rename = "netFrameworkVersion", default, skip_serializing_if = "Option::is_none")] pub net_framework_version: Option<String>, #[serde(rename = "phpVersion", default, skip_serializing_if = "Option::is_none")] pub php_version: Option<String>, #[serde(rename = "pythonVersion", default, skip_serializing_if = "Option::is_none")] pub python_version: Option<String>, #[serde(rename = "nodeVersion", default, skip_serializing_if = "Option::is_none")] pub node_version: Option<String>, #[serde(rename = "linuxFxVersion", default, skip_serializing_if = "Option::is_none")] pub linux_fx_version: Option<String>, #[serde(rename = "requestTracingEnabled", default, skip_serializing_if = "Option::is_none")] pub request_tracing_enabled: Option<bool>, #[serde(rename = "requestTracingExpirationTime", default, skip_serializing_if = "Option::is_none")] pub request_tracing_expiration_time: Option<String>, #[serde(rename = "remoteDebuggingEnabled", default, skip_serializing_if = "Option::is_none")] pub remote_debugging_enabled: Option<bool>, #[serde(rename = "remoteDebuggingVersion", default, skip_serializing_if = "Option::is_none")] pub remote_debugging_version: Option<String>, #[serde(rename = "httpLoggingEnabled", default, skip_serializing_if = "Option::is_none")] pub http_logging_enabled: Option<bool>, #[serde(rename = "logsDirectorySizeLimit", default, skip_serializing_if = "Option::is_none")] pub logs_directory_size_limit: Option<i32>, #[serde(rename = "detailedErrorLoggingEnabled", default, skip_serializing_if = "Option::is_none")] pub detailed_error_logging_enabled: Option<bool>, #[serde(rename = "publishingUsername", default, skip_serializing_if = "Option::is_none")] pub publishing_username: Option<String>, #[serde(rename = "appSettings", default, skip_serializing_if = "Vec::is_empty")] pub app_settings: Vec<NameValuePair>, #[serde(rename = "connectionStrings", default, skip_serializing_if = "Vec::is_empty")] pub connection_strings: Vec<ConnStringInfo>, #[serde(rename = "machineKey", default, skip_serializing_if = "Option::is_none")] pub machine_key: Option<SiteMachineKey>, #[serde(rename = "handlerMappings", default, skip_serializing_if = "Vec::is_empty")] pub handler_mappings: Vec<HandlerMapping>, #[serde(rename = "documentRoot", default, skip_serializing_if = "Option::is_none")] pub document_root: Option<String>, #[serde(rename = "scmType", default, skip_serializing_if = "Option::is_none")] pub scm_type: Option<site_config::ScmType>, #[serde(rename = "use32BitWorkerProcess", default, skip_serializing_if = "Option::is_none")] pub use32_bit_worker_process: Option<bool>, #[serde(rename = "webSocketsEnabled", default, skip_serializing_if = "Option::is_none")] pub web_sockets_enabled: Option<bool>, #[serde(rename = "alwaysOn", default, skip_serializing_if = "Option::is_none")] pub always_on: Option<bool>, #[serde(rename = "javaVersion", default, skip_serializing_if = "Option::is_none")] pub java_version: Option<String>, #[serde(rename = "javaContainer", default, skip_serializing_if = "Option::is_none")] pub java_container: Option<String>, #[serde(rename = "javaContainerVersion", default, skip_serializing_if = "Option::is_none")] pub java_container_version: Option<String>, #[serde(rename = "appCommandLine", default, skip_serializing_if = "Option::is_none")] pub app_command_line: Option<String>, #[serde(rename = "managedPipelineMode", default, skip_serializing_if = "Option::is_none")] pub managed_pipeline_mode: Option<site_config::ManagedPipelineMode>, #[serde(rename = "virtualApplications", default, skip_serializing_if = "Vec::is_empty")] pub virtual_applications: Vec<VirtualApplication>, #[serde(rename = "loadBalancing", default, skip_serializing_if = "Option::is_none")] pub load_balancing: Option<site_config::LoadBalancing>, #[serde(default, skip_serializing_if = "Option::is_none")] pub experiments: Option<Experiments>, #[serde(default, skip_serializing_if = "Option::is_none")] pub limits: Option<SiteLimits>, #[serde(rename = "autoHealEnabled", default, skip_serializing_if = "Option::is_none")] pub auto_heal_enabled: Option<bool>, #[serde(rename = "autoHealRules", default, skip_serializing_if = "Option::is_none")] pub auto_heal_rules: Option<AutoHealRules>, #[serde(rename = "tracingOptions", default, skip_serializing_if = "Option::is_none")] pub tracing_options: Option<String>, #[serde(rename = "vnetName", default, skip_serializing_if = "Option::is_none")] pub vnet_name: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub cors: Option<CorsSettings>, #[serde(default, skip_serializing_if = "Option::is_none")] pub push: Option<PushSettings>, #[serde(rename = "apiDefinition", default, skip_serializing_if = "Option::is_none")] pub api_definition: Option<ApiDefinitionInfo>, #[serde(rename = "autoSwapSlotName", default, skip_serializing_if = "Option::is_none")] pub auto_swap_slot_name: Option<String>, #[serde(rename = "localMySqlEnabled", default, skip_serializing_if = "Option::is_none")] pub local_my_sql_enabled: Option<bool>, #[serde(rename = "ipSecurityRestrictions", default, skip_serializing_if = "Vec::is_empty")] pub ip_security_restrictions: Vec<IpSecurityRestriction>, #[serde(rename = "http20Enabled", default, skip_serializing_if = "Option::is_none")] pub http20_enabled: Option<bool>, #[serde(rename = "minTlsVersion", default, skip_serializing_if = "Option::is_none")] pub min_tls_version: Option<site_config::MinTlsVersion>, } pub mod site_config { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum ScmType { None, Dropbox, Tfs, LocalGit, GitHub, CodePlexGit, CodePlexHg, BitbucketGit, BitbucketHg, ExternalGit, ExternalHg, OneDrive, #[serde(rename = "VSO")] Vso, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum ManagedPipelineMode { Integrated, Classic, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum LoadBalancing { WeightedRoundRobin, LeastRequests, LeastResponseTime, WeightedTotalTraffic, RequestHash, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum MinTlsVersion { #[serde(rename = "1.0")] N1_0, #[serde(rename = "1.1")] N1_1, #[serde(rename = "1.2")] N1_2, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct NameValuePair { #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub value: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ConnStringInfo { #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "connectionString", default, skip_serializing_if = "Option::is_none")] pub connection_string: Option<String>, #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<conn_string_info::Type>, } pub mod conn_string_info { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum Type { MySql, #[serde(rename = "SQLServer")] SqlServer, #[serde(rename = "SQLAzure")] SqlAzure, Custom, NotificationHub, ServiceBus, EventHub, ApiHub, DocDb, RedisCache, #[serde(rename = "PostgreSQL")] PostgreSql, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SiteMachineKey { #[serde(default, skip_serializing_if = "Option::is_none")] pub validation: Option<String>, #[serde(rename = "validationKey", default, skip_serializing_if = "Option::is_none")] pub validation_key: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub decryption: Option<String>, #[serde(rename = "decryptionKey", default, skip_serializing_if = "Option::is_none")] pub decryption_key: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct HandlerMapping { #[serde(default, skip_serializing_if = "Option::is_none")] pub extension: Option<String>, #[serde(rename = "scriptProcessor", default, skip_serializing_if = "Option::is_none")] pub script_processor: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub arguments: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct VirtualApplication { #[serde(rename = "virtualPath", default, skip_serializing_if = "Option::is_none")] pub virtual_path: Option<String>, #[serde(rename = "physicalPath", default, skip_serializing_if = "Option::is_none")] pub physical_path: Option<String>, #[serde(rename = "preloadEnabled", default, skip_serializing_if = "Option::is_none")] pub preload_enabled: Option<bool>, #[serde(rename = "virtualDirectories", default, skip_serializing_if = "Vec::is_empty")] pub virtual_directories: Vec<VirtualDirectory>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct VirtualDirectory { #[serde(rename = "virtualPath", default, skip_serializing_if = "Option::is_none")] pub virtual_path: Option<String>, #[serde(rename = "physicalPath", default, skip_serializing_if = "Option::is_none")] pub physical_path: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Experiments { #[serde(rename = "rampUpRules", default, skip_serializing_if = "Vec::is_empty")] pub ramp_up_rules: Vec<RampUpRule>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct RampUpRule { #[serde(rename = "actionHostName", default, skip_serializing_if = "Option::is_none")] pub action_host_name: Option<String>, #[serde(rename = "reroutePercentage", default, skip_serializing_if = "Option::is_none")] pub reroute_percentage: Option<f64>, #[serde(rename = "changeStep", default, skip_serializing_if = "Option::is_none")] pub change_step: Option<f64>, #[serde(rename = "changeIntervalInMinutes", default, skip_serializing_if = "Option::is_none")] pub change_interval_in_minutes: Option<i32>, #[serde(rename = "minReroutePercentage", default, skip_serializing_if = "Option::is_none")] pub min_reroute_percentage: Option<f64>, #[serde(rename = "maxReroutePercentage", default, skip_serializing_if = "Option::is_none")] pub max_reroute_percentage: Option<f64>, #[serde(rename = "changeDecisionCallbackUrl", default, skip_serializing_if = "Option::is_none")] pub change_decision_callback_url: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SiteLimits { #[serde(rename = "maxPercentageCpu", default, skip_serializing_if = "Option::is_none")] pub max_percentage_cpu: Option<f64>, #[serde(rename = "maxMemoryInMb", default, skip_serializing_if = "Option::is_none")] pub max_memory_in_mb: Option<i64>, #[serde(rename = "maxDiskSizeInMb", default, skip_serializing_if = "Option::is_none")] pub max_disk_size_in_mb: Option<i64>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct AutoHealRules { #[serde(default, skip_serializing_if = "Option::is_none")] pub triggers: Option<AutoHealTriggers>, #[serde(default, skip_serializing_if = "Option::is_none")] pub actions: Option<AutoHealActions>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct AutoHealTriggers { #[serde(default, skip_serializing_if = "Option::is_none")] pub requests: Option<RequestsBasedTrigger>, #[serde(rename = "privateBytesInKB", default, skip_serializing_if = "Option::is_none")] pub private_bytes_in_kb: Option<i32>, #[serde(rename = "statusCodes", default, skip_serializing_if = "Vec::is_empty")] pub status_codes: Vec<StatusCodesBasedTrigger>, #[serde(rename = "slowRequests", default, skip_serializing_if = "Option::is_none")] pub slow_requests: Option<SlowRequestsBasedTrigger>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct RequestsBasedTrigger { #[serde(default, skip_serializing_if = "Option::is_none")] pub count: Option<i32>, #[serde(rename = "timeInterval", default, skip_serializing_if = "Option::is_none")] pub time_interval: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct StatusCodesBasedTrigger { #[serde(default, skip_serializing_if = "Option::is_none")] pub status: Option<i32>, #[serde(rename = "subStatus", default, skip_serializing_if = "Option::is_none")] pub sub_status: Option<i32>, #[serde(rename = "win32Status", default, skip_serializing_if = "Option::is_none")] pub win32_status: Option<i32>, #[serde(default, skip_serializing_if = "Option::is_none")] pub count: Option<i32>, #[serde(rename = "timeInterval", default, skip_serializing_if = "Option::is_none")] pub time_interval: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SlowRequestsBasedTrigger { #[serde(rename = "timeTaken", default, skip_serializing_if = "Option::is_none")] pub time_taken: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub count: Option<i32>, #[serde(rename = "timeInterval", default, skip_serializing_if = "Option::is_none")] pub time_interval: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct AutoHealActions { #[serde(rename = "actionType", default, skip_serializing_if = "Option::is_none")] pub action_type: Option<auto_heal_actions::ActionType>, #[serde(rename = "customAction", default, skip_serializing_if = "Option::is_none")] pub custom_action: Option<AutoHealCustomAction>, #[serde(rename = "minProcessExecutionTime", default, skip_serializing_if = "Option::is_none")] pub min_process_execution_time: Option<String>, } pub mod auto_heal_actions { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum ActionType { Recycle, LogEvent, CustomAction, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct AutoHealCustomAction { #[serde(default, skip_serializing_if = "Option::is_none")] pub exe: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub parameters: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct CorsSettings { #[serde(rename = "allowedOrigins", default, skip_serializing_if = "Vec::is_empty")] pub allowed_origins: Vec<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct PushSettings { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<push_settings::Properties>, } pub mod push_settings { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(rename = "isPushEnabled")] pub is_push_enabled: bool, #[serde(rename = "tagWhitelistJson", default, skip_serializing_if = "Option::is_none")] pub tag_whitelist_json: Option<String>, #[serde(rename = "tagsRequiringAuth", default, skip_serializing_if = "Option::is_none")] pub tags_requiring_auth: Option<String>, #[serde(rename = "dynamicTagsJson", default, skip_serializing_if = "Option::is_none")] pub dynamic_tags_json: Option<String>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ProxyOnlyResource { #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub kind: Option<String>, #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ApiDefinitionInfo { #[serde(default, skip_serializing_if = "Option::is_none")] pub url: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IpSecurityRestriction { #[serde(rename = "ipAddress")] pub ip_address: String, #[serde(rename = "subnetMask", default, skip_serializing_if = "Option::is_none")] pub subnet_mask: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct HostingEnvironmentProfile { #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct CloningInfo { #[serde(rename = "correlationId", default, skip_serializing_if = "Option::is_none")] pub correlation_id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub overwrite: Option<bool>, #[serde(rename = "cloneCustomHostNames", default, skip_serializing_if = "Option::is_none")] pub clone_custom_host_names: Option<bool>, #[serde(rename = "cloneSourceControl", default, skip_serializing_if = "Option::is_none")] pub clone_source_control: Option<bool>, #[serde(rename = "sourceWebAppId")] pub source_web_app_id: String, #[serde(rename = "hostingEnvironment", default, skip_serializing_if = "Option::is_none")] pub hosting_environment: Option<String>, #[serde(rename = "appSettingsOverrides", default, skip_serializing_if = "Option::is_none")] pub app_settings_overrides: Option<serde_json::Value>, #[serde(rename = "configureLoadBalancing", default, skip_serializing_if = "Option::is_none")] pub configure_load_balancing: Option<bool>, #[serde(rename = "trafficManagerProfileId", default, skip_serializing_if = "Option::is_none")] pub traffic_manager_profile_id: Option<String>, #[serde(rename = "trafficManagerProfileName", default, skip_serializing_if = "Option::is_none")] pub traffic_manager_profile_name: Option<String>, #[serde(rename = "ignoreQuotas", default, skip_serializing_if = "Option::is_none")] pub ignore_quotas: Option<bool>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SnapshotRecoveryRequest { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<snapshot_recovery_request::Properties>, } pub mod snapshot_recovery_request { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(rename = "snapshotTime", default, skip_serializing_if = "Option::is_none")] pub snapshot_time: Option<String>, #[serde(rename = "recoveryTarget", default, skip_serializing_if = "Option::is_none")] pub recovery_target: Option<SnapshotRecoveryTarget>, pub overwrite: bool, #[serde(rename = "recoverConfiguration", default, skip_serializing_if = "Option::is_none")] pub recover_configuration: Option<bool>, #[serde(rename = "ignoreConflictingHostNames", default, skip_serializing_if = "Option::is_none")] pub ignore_conflicting_host_names: Option<bool>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SnapshotRecoveryTarget { #[serde(default, skip_serializing_if = "Option::is_none")] pub location: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SlotSwapStatus { #[serde(rename = "timestampUtc", default, skip_serializing_if = "Option::is_none")] pub timestamp_utc: Option<String>, #[serde(rename = "sourceSlotName", default, skip_serializing_if = "Option::is_none")] pub source_slot_name: Option<String>, #[serde(rename = "destinationSlotName", default, skip_serializing_if = "Option::is_none")] pub destination_slot_name: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ManagedServiceIdentity { #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<managed_service_identity::Type>, #[serde(rename = "tenantId", default, skip_serializing_if = "Option::is_none")] pub tenant_id: Option<String>, #[serde(rename = "principalId", default, skip_serializing_if = "Option::is_none")] pub principal_id: Option<String>, } pub mod managed_service_identity { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum Type { SystemAssigned, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Resource { #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub kind: Option<String>, pub location: String, #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub tags: Option<serde_json::Value>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct User { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<user::Properties>, } pub mod user { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "publishingUserName")] pub publishing_user_name: String, #[serde(rename = "publishingPassword", default, skip_serializing_if = "Option::is_none")] pub publishing_password: Option<String>, #[serde(rename = "publishingPasswordHash", default, skip_serializing_if = "Option::is_none")] pub publishing_password_hash: Option<String>, #[serde(rename = "publishingPasswordHashSalt", default, skip_serializing_if = "Option::is_none")] pub publishing_password_hash_salt: Option<String>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IdentifierCollection { pub value: Vec<Identifier>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Identifier { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<identifier::Properties>, } pub mod identifier { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct HybridConnection { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<hybrid_connection::Properties>, } pub mod hybrid_connection { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(rename = "serviceBusNamespace", default, skip_serializing_if = "Option::is_none")] pub service_bus_namespace: Option<String>, #[serde(rename = "relayName", default, skip_serializing_if = "Option::is_none")] pub relay_name: Option<String>, #[serde(rename = "relayArmUri", default, skip_serializing_if = "Option::is_none")] pub relay_arm_uri: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub hostname: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub port: Option<i32>, #[serde(rename = "sendKeyName", default, skip_serializing_if = "Option::is_none")] pub send_key_name: Option<String>, #[serde(rename = "sendKeyValue", default, skip_serializing_if = "Option::is_none")] pub send_key_value: Option<String>, #[serde(rename = "serviceBusSuffix", default, skip_serializing_if = "Option::is_none")] pub service_bus_suffix: Option<String>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct HybridConnectionKey { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<hybrid_connection_key::Properties>, } pub mod hybrid_connection_key { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(rename = "sendKeyName", default, skip_serializing_if = "Option::is_none")] pub send_key_name: Option<String>, #[serde(rename = "sendKeyValue", default, skip_serializing_if = "Option::is_none")] pub send_key_value: Option<String>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ResourceMetricDefinitionCollection { pub value: Vec<ResourceMetricDefinition>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ResourceMetricDefinition { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<resource_metric_definition::Properties>, } pub mod resource_metric_definition { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<ResourceMetricName>, #[serde(default, skip_serializing_if = "Option::is_none")] pub unit: Option<String>, #[serde(rename = "primaryAggregationType", default, skip_serializing_if = "Option::is_none")] pub primary_aggregation_type: Option<String>, #[serde(rename = "metricAvailabilities", default, skip_serializing_if = "Vec::is_empty")] pub metric_availabilities: Vec<ResourceMetricAvailability>, #[serde(rename = "resourceUri", default, skip_serializing_if = "Option::is_none")] pub resource_uri: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<serde_json::Value>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ResourceMetricName { #[serde(default, skip_serializing_if = "Option::is_none")] pub value: Option<String>, #[serde(rename = "localizedValue", default, skip_serializing_if = "Option::is_none")] pub localized_value: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ResourceMetricAvailability { #[serde(rename = "timeGrain", default, skip_serializing_if = "Option::is_none")] pub time_grain: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub retention: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ResourceMetricCollection { pub value: Vec<ResourceMetric>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ResourceMetric { #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<ResourceMetricName>, #[serde(default, skip_serializing_if = "Option::is_none")] pub unit: Option<String>, #[serde(rename = "timeGrain", default, skip_serializing_if = "Option::is_none")] pub time_grain: Option<String>, #[serde(rename = "startTime", default, skip_serializing_if = "Option::is_none")] pub start_time: Option<String>, #[serde(rename = "endTime", default, skip_serializing_if = "Option::is_none")] pub end_time: Option<String>, #[serde(rename = "resourceId", default, skip_serializing_if = "Option::is_none")] pub resource_id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(rename = "metricValues", default, skip_serializing_if = "Vec::is_empty")] pub metric_values: Vec<ResourceMetricValue>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub properties: Vec<ResourceMetricProperty>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ResourceMetricValue { #[serde(default, skip_serializing_if = "Option::is_none")] pub timestamp: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub average: Option<f32>, #[serde(default, skip_serializing_if = "Option::is_none")] pub minimum: Option<f32>, #[serde(default, skip_serializing_if = "Option::is_none")] pub maximum: Option<f32>, #[serde(default, skip_serializing_if = "Option::is_none")] pub total: Option<f32>, #[serde(default, skip_serializing_if = "Option::is_none")] pub count: Option<f32>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub properties: Vec<ResourceMetricProperty>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ResourceMetricProperty { #[serde(default, skip_serializing_if = "Option::is_none")] pub key: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub value: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Operation { #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub status: Option<operation::Status>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub errors: Vec<ErrorEntity>, #[serde(rename = "createdTime", default, skip_serializing_if = "Option::is_none")] pub created_time: Option<String>, #[serde(rename = "modifiedTime", default, skip_serializing_if = "Option::is_none")] pub modified_time: Option<String>, #[serde(rename = "expirationTime", default, skip_serializing_if = "Option::is_none")] pub expiration_time: Option<String>, #[serde(rename = "geoMasterOperationId", default, skip_serializing_if = "Option::is_none")] pub geo_master_operation_id: Option<String>, } pub mod operation { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum Status { InProgress, Failed, Succeeded, TimedOut, Created, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ErrorEntity { #[serde(rename = "extendedCode", default, skip_serializing_if = "Option::is_none")] pub extended_code: Option<String>, #[serde(rename = "messageTemplate", default, skip_serializing_if = "Option::is_none")] pub message_template: Option<String>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub parameters: Vec<String>, #[serde(rename = "innerErrors", default, skip_serializing_if = "Vec::is_empty")] pub inner_errors: Vec<ErrorEntity>, #[serde(default, skip_serializing_if = "Option::is_none")] pub code: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub message: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct CsmUsageQuotaCollection { pub value: Vec<CsmUsageQuota>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct CsmUsageQuota { #[serde(default, skip_serializing_if = "Option::is_none")] pub unit: Option<String>, #[serde(rename = "nextResetTime", default, skip_serializing_if = "Option::is_none")] pub next_reset_time: Option<String>, #[serde(rename = "currentValue", default, skip_serializing_if = "Option::is_none")] pub current_value: Option<i64>, #[serde(default, skip_serializing_if = "Option::is_none")] pub limit: Option<i64>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<LocalizableString>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct LocalizableString { #[serde(default, skip_serializing_if = "Option::is_none")] pub value: Option<String>, #[serde(rename = "localizedValue", default, skip_serializing_if = "Option::is_none")] pub localized_value: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct VnetInfo { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<vnet_info::Properties>, } pub mod vnet_info { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(rename = "vnetResourceId", default, skip_serializing_if = "Option::is_none")] pub vnet_resource_id: Option<String>, #[serde(rename = "certThumbprint", default, skip_serializing_if = "Option::is_none")] pub cert_thumbprint: Option<String>, #[serde(rename = "certBlob", default, skip_serializing_if = "Option::is_none")] pub cert_blob: Option<String>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub routes: Vec<VnetRoute>, #[serde(rename = "resyncRequired", default, skip_serializing_if = "Option::is_none")] pub resync_required: Option<bool>, #[serde(rename = "dnsServers", default, skip_serializing_if = "Option::is_none")] pub dns_servers: Option<String>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct VnetRoute { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<vnet_route::Properties>, } pub mod vnet_route { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "startAddress", default, skip_serializing_if = "Option::is_none")] pub start_address: Option<String>, #[serde(rename = "endAddress", default, skip_serializing_if = "Option::is_none")] pub end_address: Option<String>, #[serde(rename = "routeType", default, skip_serializing_if = "Option::is_none")] pub route_type: Option<properties::RouteType>, } pub mod properties { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum RouteType { #[serde(rename = "DEFAULT")] Default, #[serde(rename = "INHERITED")] Inherited, #[serde(rename = "STATIC")] Static, } } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct VnetGateway { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<vnet_gateway::Properties>, } pub mod vnet_gateway { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(rename = "vnetName", default, skip_serializing_if = "Option::is_none")] pub vnet_name: Option<String>, #[serde(rename = "vpnPackageUri")] pub vpn_package_uri: String, } }
use day_20::{ check_monsters, create_picture, frames_to_picture, match_neighbours, print_picture, Frame, }; use std::cell::RefCell; use std::collections::HashMap; use std::io::{self}; fn main() -> io::Result<()> { let files_results = vec![ ("test.txt", 20899048083289_usize, 273), ("input.txt", 20913499394191, 2209), ]; for (f, result_1, result_2) in files_results.iter() { println!("{}", f); let file_content: Vec<Vec<char>> = std::fs::read_to_string(f)? .lines() .map(|x| x.chars().collect()) .collect(); let mut tmp: Vec<Vec<char>> = Vec::new(); let mut frames: HashMap<usize, RefCell<Frame>> = HashMap::new(); for line in file_content.iter() { if line.is_empty() { let frame = Frame::new(&tmp); frames.insert(frame.frame_no, RefCell::new(frame)); tmp.clear(); } else { tmp.push(line.to_vec()); } } match_neighbours(&frames); let res_1 = frames .values() .map(|x| x.borrow()) .filter(|x| x.get_neighbours_len() == 2) .fold(1, |acc, frame| acc * frame.frame_no); assert_eq!(res_1, *result_1); let end_vec = create_picture(&frames); print_picture(&end_vec); let end_data = frames_to_picture(end_vec); let monster: Vec<Vec<char>> = std::fs::read_to_string("monster.txt")? .lines() .map(|x| x.chars().collect()) .collect(); let res_2 = check_monsters(&end_data, &monster); assert_eq!(res_2, *result_2); } Ok(()) }
pub trait Sqrt<T> { fn sqrt(self: &Self) -> Self; } pub trait Prime<T> { fn is_prime(self: &Self) -> bool; } impl Sqrt<u32> for u32 { fn sqrt(&self) -> u32 { (*self as f32).sqrt() as u32 } } impl Prime<u32> for u32 { fn is_prime(&self) -> bool { (2..=self.sqrt()).all(|n| self % n != 0) } } pub fn nth(n: u32) -> u32 { (2..) .filter(u32::is_prime) .nth(n as usize) .unwrap_or(0) }
// Copyright 2019, 2020 Wingchain // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use log::trace; use std::borrow::Cow; use std::collections::VecDeque; use std::error; use std::fmt; use std::task::{Context, Poll}; use std::time::Duration; use futures::FutureExt; use futures::StreamExt; use futures_codec::BytesMut; use futures_timer::Delay; use libp2p::core::ConnectedPoint; use libp2p::swarm::protocols_handler::{InboundUpgradeSend, OutboundUpgradeSend}; use libp2p::swarm::{ IntoProtocolsHandler, KeepAlive, ProtocolsHandler, ProtocolsHandlerEvent, ProtocolsHandlerUpgrErr, SubstreamProtocol, }; use libp2p::PeerId; use crate::protocol::upgrade::{InProtocol, InSubstream, OutProtocol, OutSubstream}; use std::fmt::Formatter; const OPEN_TIMEOUT: Duration = Duration::from_secs(20); pub struct HandlerProto { local_peer_id: PeerId, protocol_name: Cow<'static, [u8]>, } impl HandlerProto { pub fn new(local_peer_id: PeerId, protocol_name: Cow<'static, [u8]>) -> Self { Self { local_peer_id, protocol_name, } } } impl IntoProtocolsHandler for HandlerProto { type Handler = Handler; fn into_handler( self, remote_peer_id: &PeerId, connected_point: &ConnectedPoint, ) -> Self::Handler { Handler { local_peer_id: self.local_peer_id.clone(), remote_peer_id: remote_peer_id.clone(), connected_point: connected_point.clone(), protocol_name: self.protocol_name, state: State::Init, events_queue: VecDeque::with_capacity(16), } } fn inbound_protocol(&self) -> <Self::Handler as ProtocolsHandler>::InboundProtocol { InProtocol::new(self.protocol_name.clone()) } } #[derive(Clone)] pub enum HandlerIn { Open { handshake: Vec<u8> }, Close, SendMessage { message: Vec<u8> }, } #[allow(clippy::large_enum_variant)] pub enum State { Init, Opening { in_substream: Option<InSubstream>, out_substream: Option<OutSubstream>, deadline: Delay, }, Opened { in_substream: InSubstream, out_substream: OutSubstream, }, Closed, Locked, } pub enum HandlerOut { ProtocolOpen { handshake: Vec<u8>, }, ProtocolClose { reason: Cow<'static, str>, }, ProtocolError { should_disconnect: bool, error: Box<dyn error::Error + Send + Sync>, }, Message { message: BytesMut, }, } #[derive(Debug, derive_more::Error, derive_more::Display)] pub enum HandlerError {} pub struct Handler { local_peer_id: PeerId, remote_peer_id: PeerId, connected_point: ConnectedPoint, protocol_name: Cow<'static, [u8]>, state: State, events_queue: VecDeque<ProtocolsHandlerEvent<OutProtocol, (), HandlerOut, HandlerError>>, } impl Handler { fn open(&mut self, handshake: Vec<u8>) { self.state = match std::mem::replace(&mut self.state, State::Locked) { State::Init => { let upgrade = OutProtocol::new(self.protocol_name.clone(), handshake); self.events_queue .push_back(ProtocolsHandlerEvent::OutboundSubstreamRequest { protocol: SubstreamProtocol::new(upgrade, ()), }); State::Opening { in_substream: None, out_substream: None, deadline: Delay::new(OPEN_TIMEOUT), } } State::Opening { in_substream, out_substream, .. } => { let upgrade = OutProtocol::new(self.protocol_name.clone(), handshake); self.events_queue .push_back(ProtocolsHandlerEvent::OutboundSubstreamRequest { protocol: SubstreamProtocol::new(upgrade, ()), }); State::Opening { in_substream, out_substream, deadline: Delay::new(OPEN_TIMEOUT), } } State::Opened { in_substream, out_substream, } => State::Opened { in_substream, out_substream, }, State::Closed => State::Closed, State::Locked => unreachable!(), }; } fn close(&mut self) { self.state = match std::mem::replace(&mut self.state, State::Locked) { State::Init => State::Closed, State::Opening { .. } => State::Closed, State::Opened { .. } => { self.events_queue.push_back(ProtocolsHandlerEvent::Custom( HandlerOut::ProtocolClose { reason: "Closed by handler".into(), }, )); State::Closed } State::Closed => State::Closed, State::Locked => unreachable!(), }; } fn send_message(&mut self, message: Vec<u8>) { match &mut self.state { State::Opened { out_substream, .. } => { out_substream.send_message(message); } _ => { self.events_queue.push_back(ProtocolsHandlerEvent::Custom( HandlerOut::ProtocolError { should_disconnect: false, error: "Send message when not opened".into(), }, )); } } } } impl ProtocolsHandler for Handler { type InEvent = HandlerIn; type OutEvent = HandlerOut; type Error = HandlerError; type InboundProtocol = InProtocol; type OutboundProtocol = OutProtocol; type InboundOpenInfo = (); type OutboundOpenInfo = (); fn listen_protocol(&self) -> SubstreamProtocol<Self::InboundProtocol, Self::InboundOpenInfo> { let upgrade = InProtocol::new(self.protocol_name.clone()); SubstreamProtocol::new(upgrade, ()) } fn inject_fully_negotiated_inbound( &mut self, mut protocol: <Self::InboundProtocol as InboundUpgradeSend>::Output, _info: Self::InboundOpenInfo, ) { trace!( "inject_fully_negotiated_inbound: \ state: {:?}, \ local_peer_id: {}, \ remote_peer_id: {}, \ connected_point: {:?}", self.state, self.local_peer_id, self.remote_peer_id, self.connected_point ); self.state = match std::mem::replace(&mut self.state, State::Locked) { State::Init => State::Opening { in_substream: Some(protocol), out_substream: None, deadline: Delay::new(OPEN_TIMEOUT), }, State::Opening { out_substream, deadline, .. } => match out_substream { Some(out_substream) => { let handshake = protocol.take_received_handshake().expect("qed"); self.events_queue.push_back(ProtocolsHandlerEvent::Custom( HandlerOut::ProtocolOpen { handshake }, )); State::Opened { in_substream: protocol, out_substream, } } None => State::Opening { in_substream: Some(protocol), out_substream: None, deadline, }, }, State::Opened { in_substream, out_substream, } => State::Opened { in_substream, out_substream, }, State::Closed => State::Closed, State::Locked => unreachable!(), }; } fn inject_fully_negotiated_outbound( &mut self, protocol: <Self::OutboundProtocol as OutboundUpgradeSend>::Output, _info: Self::OutboundOpenInfo, ) { trace!( "inject_fully_negotiated_outbound: \ state: {:?}, \ local_peer_id: {}, \ remote_peer_id: {}, \ connected_point: {:?}", self.state, self.local_peer_id, self.remote_peer_id, self.connected_point ); self.state = match std::mem::replace(&mut self.state, State::Locked) { State::Init => State::Opening { in_substream: None, out_substream: Some(protocol), deadline: Delay::new(OPEN_TIMEOUT), }, State::Opening { in_substream, deadline, .. } => match in_substream { Some(mut in_substream) => { let handshake = in_substream.take_received_handshake().expect("qed"); self.events_queue.push_back(ProtocolsHandlerEvent::Custom( HandlerOut::ProtocolOpen { handshake }, )); State::Opened { in_substream, out_substream: protocol, } } None => State::Opening { in_substream: None, out_substream: Some(protocol), deadline, }, }, State::Opened { in_substream, out_substream, } => State::Opened { in_substream, out_substream, }, State::Closed => State::Closed, State::Locked => unreachable!(), }; } fn inject_event(&mut self, event: HandlerIn) { match event { HandlerIn::Open { handshake } => self.open(handshake), HandlerIn::Close => self.close(), HandlerIn::SendMessage { message } => self.send_message(message), } } fn inject_dial_upgrade_error( &mut self, _info: Self::OutboundOpenInfo, error: ProtocolsHandlerUpgrErr<<Self::OutboundProtocol as OutboundUpgradeSend>::Error>, ) { let should_disconnect = matches!(error, ProtocolsHandlerUpgrErr::Upgrade(_)); let event = HandlerOut::ProtocolError { should_disconnect, error: Box::new(error), }; self.events_queue .push_back(ProtocolsHandlerEvent::Custom(event)); } fn connection_keep_alive(&self) -> KeepAlive { match self.state { State::Init | State::Opening { .. } | State::Opened { .. } => KeepAlive::Yes, _ => KeepAlive::No, } } fn poll( &mut self, cx: &mut Context, ) -> Poll< ProtocolsHandlerEvent< Self::OutboundProtocol, Self::OutboundOpenInfo, Self::OutEvent, Self::Error, >, > { if let Some(event) = self.events_queue.pop_front() { return Poll::Ready(event); } match std::mem::replace(&mut self.state, State::Locked) { State::Init => self.state = State::Init, State::Opening { in_substream, out_substream, mut deadline, } => { match deadline.poll_unpin(cx) { Poll::Ready(_) => { deadline.reset(OPEN_TIMEOUT); self.state = State::Opening { in_substream, out_substream, deadline, }; return Poll::Ready(ProtocolsHandlerEvent::Custom( HandlerOut::ProtocolError { should_disconnect: true, error: "Timeout when opening protocol".to_string().into(), }, )); } Poll::Pending => (), } self.state = State::Opening { in_substream, out_substream, deadline, }; } State::Opened { mut in_substream, mut out_substream, } => { match out_substream.poll_next_unpin(cx) { Poll::Ready(Some(Err(e))) => { self.state = State::Closed; return Poll::Ready(ProtocolsHandlerEvent::Custom( HandlerOut::ProtocolClose { reason: format!("Outbound substream encountered error: {}", e) .into(), }, )); } Poll::Ready(None) => { self.state = State::Closed; return Poll::Ready(ProtocolsHandlerEvent::Custom( HandlerOut::ProtocolClose { reason: "Outbound substream closed by the remote".into(), }, )); } Poll::Pending => (), Poll::Ready(Some(Ok(_))) => (), } match in_substream.poll_next_unpin(cx) { Poll::Ready(Some(Err(e))) => { self.state = State::Closed; return Poll::Ready(ProtocolsHandlerEvent::Custom( HandlerOut::ProtocolClose { reason: format!("Inbound substream encountered error: {}", e) .into(), }, )); } Poll::Ready(None) => { self.state = State::Closed; return Poll::Ready(ProtocolsHandlerEvent::Custom( HandlerOut::ProtocolClose { reason: "Inbound substream closed by the remote".into(), }, )); } Poll::Pending => { self.state = State::Opened { in_substream, out_substream, }; } Poll::Ready(Some(Ok(message))) => { self.state = State::Opened { in_substream, out_substream, }; return Poll::Ready(ProtocolsHandlerEvent::Custom(HandlerOut::Message { message, })); } } } State::Closed => { self.state = State::Closed; } State::Locked => unreachable!(), }; Poll::Pending } } impl fmt::Debug for State { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { match &self { State::Init => write!(f, "Init"), State::Opening { .. } => write!(f, "Opening"), State::Opened { .. } => write!(f, "Opened"), State::Closed => write!(f, "Closed"), State::Locked => write!(f, "Locked"), } } }
#![no_std] #![cfg_attr( all(feature = "async", feature = "nightly"), allow(incomplete_features), feature(async_fn_in_trait, impl_trait_projections) )] //! A generic display interface //! //! This crate contains an error type and traits to implement for bus interface drivers drivers to //! be consumed by display drivers. It abstracts over the different communication methods available //! to drive a display and allows a driver writer to focus on driving the display itself and only //! have to implement a single interface. #[cfg(all(feature = "async", not(feature = "nightly")))] extern crate alloc; pub mod prelude; /// A ubiquitous error type for all kinds of problems which could happen when communicating with a /// display #[derive(Clone, Debug)] #[non_exhaustive] pub enum DisplayError { /// Invalid data format selected for interface selected InvalidFormatError, /// Unable to write to bus BusWriteError, /// Unable to assert or de-assert data/command switching signal DCError, /// Unable to assert chip select signal CSError, /// The requested DataFormat is not implemented by this display interface implementation DataFormatNotImplemented, /// Unable to assert or de-assert reset signal RSError, /// Attempted to write to a non-existing pixel outside the display's bounds OutOfBoundsError, } /// DI specific data format wrapper around slices of various widths /// Display drivers need to implement non-trivial conversions (e.g. with padding) /// as the hardware requires. #[non_exhaustive] pub enum DataFormat<'a> { /// Slice of unsigned bytes U8(&'a [u8]), /// Slice of unsigned 16bit values with the same endianness as the system, not recommended U16(&'a [u16]), /// Slice of unsigned 16bit values to be sent in big endian byte order U16BE(&'a mut [u16]), /// Slice of unsigned 16bit values to be sent in little endian byte order U16LE(&'a mut [u16]), /// Iterator over unsigned bytes U8Iter(&'a mut dyn Iterator<Item = u8>), /// Iterator over unsigned 16bit values to be sent in big endian byte order U16BEIter(&'a mut dyn Iterator<Item = u16>), /// Iterator over unsigned 16bit values to be sent in little endian byte order U16LEIter(&'a mut dyn Iterator<Item = u16>), } /// This trait implements a write-only interface for a display which has separate data and command /// modes. It is the responsibility of implementations to activate the correct mode in their /// implementation when corresponding method is called. pub trait WriteOnlyDataCommand { /// Send a batch of commands to display fn send_commands(&mut self, cmd: DataFormat<'_>) -> Result<(), DisplayError>; /// Send pixel data to display fn send_data(&mut self, buf: DataFormat<'_>) -> Result<(), DisplayError>; } #[cfg(all(feature = "async", not(feature = "nightly")))] use alloc::boxed::Box; /// This trait implements a write-only interface for a display which has separate data and command /// modes. It is the responsibility of implementations to activate the correct mode in their /// implementation when corresponding method is called. #[cfg(feature = "async")] #[cfg_attr(not(feature = "nightly"), async_trait::async_trait(?Send))] pub trait AsyncWriteOnlyDataCommand { /// Send a batch of commands to display async fn send_commands(&mut self, cmd: DataFormat<'_>) -> Result<(), DisplayError>; /// Send pixel data to display async fn send_data(&mut self, buf: DataFormat<'_>) -> Result<(), DisplayError>; }
use futures::FutureExt; use log::*; use smol::Executor; use std::sync::Arc; use crate::net::error::{NetError, NetResult}; use crate::net::message_subscriber::MessageSubscription; use crate::net::messages; use crate::net::utility::sleep; use crate::net::{ChannelPtr, SettingsPtr}; /// Implements the protocol version handshake sent out by nodes at the beginning /// of a connection. pub struct ProtocolVersion { channel: ChannelPtr, version_sub: MessageSubscription<messages::VersionMessage>, verack_sub: MessageSubscription<messages::VerackMessage>, settings: SettingsPtr, } impl ProtocolVersion { /// Create a new version protocol. Makes a version and version /// acknowledgement subscription, then adds them to a version protocol /// instance. pub async fn new(channel: ChannelPtr, settings: SettingsPtr) -> Arc<Self> { // Creates a version subscription. let version_sub = channel .clone() .subscribe_msg::<messages::VersionMessage>() .await .expect("Missing version dispatcher!"); // Creates a version acknowledgement subscription. let verack_sub = channel .clone() .subscribe_msg::<messages::VerackMessage>() .await .expect("Missing verack dispatcher!"); Arc::new(Self { channel, version_sub, verack_sub, settings, }) } /// Start version information exchange. Start the timer. Send version info /// and wait for version acknowledgement. Wait for version info and send /// version acknowledgement. pub async fn run(self: Arc<Self>, executor: Arc<Executor<'_>>) -> NetResult<()> { debug!(target: "net", "ProtocolVersion::run() [START]"); // Start timer // Send version, wait for verack // Wait for version, send verack // Fin. let result = futures::select! { _ = self.clone().exchange_versions(executor).fuse() => Ok(()), _ = sleep(self.settings.channel_handshake_seconds).fuse() => Err(NetError::ChannelTimeout) }; debug!(target: "net", "ProtocolVersion::run() [END]"); result } /// Send and recieve version information. async fn exchange_versions(self: Arc<Self>, executor: Arc<Executor<'_>>) -> NetResult<()> { debug!(target: "net", "ProtocolVersion::exchange_versions() [START]"); let send = executor.spawn(self.clone().send_version()); let recv = executor.spawn(self.recv_version()); send.await.and(recv.await)?; debug!(target: "net", "ProtocolVersion::exchange_versions() [END]"); Ok(()) } /// Send version info and wait for version acknowledgement. async fn send_version(self: Arc<Self>) -> NetResult<()> { debug!(target: "net", "ProtocolVersion::send_version() [START]"); let version = messages::VersionMessage {}; self.channel.clone().send(version).await?; // Wait for version acknowledgement let _verack_msg = self.verack_sub.receive().await?; debug!(target: "net", "ProtocolVersion::send_version() [END]"); Ok(()) } /// Recieve version info, check the message is okay and send version /// acknowledgement. async fn recv_version(self: Arc<Self>) -> NetResult<()> { debug!(target: "net", "ProtocolVersion::recv_version() [START]"); // Rec let _version_msg = self.version_sub.receive().await?; // Check the message is OK // Send version acknowledgement let verack = messages::VerackMessage {}; self.channel.clone().send(verack).await?; debug!(target: "net", "ProtocolVersion::recv_version() [END]"); Ok(()) } }
use std::sync::RwLock; use std::collections::{BTreeMap, HashMap}; use rocksdb::{self, DB, WriteBatch}; use kite::doc_id_set::DocIdSet; use kite::document::DocRef; use byteorder::{ByteOrder, BigEndian, WriteBytesExt}; use key_builder::KeyBuilder; use segment_ops::SegmentMergeError; /// Manages the index's "document index" pub struct DocumentIndexManager { primary_key_index: RwLock<BTreeMap<Vec<u8>, DocRef>>, } impl DocumentIndexManager { /// Generates a new document index pub fn new(_db: &DB) -> Result<DocumentIndexManager, rocksdb::Error> { Ok(DocumentIndexManager { primary_key_index: RwLock::new(BTreeMap::new()), }) } /// Loads the document index from an index pub fn open(db: &DB) -> Result<DocumentIndexManager, rocksdb::Error> { // Read primary key index let mut primary_key_index = BTreeMap::new(); let mut iter = db.iterator(); iter.seek(b"k"); while iter.next() { let k = iter.key().unwrap(); if k[0] != b'k' { break; } let v = iter.value().unwrap(); let segment = BigEndian::read_u32(&v[0..4]); let ord = BigEndian::read_u16(&v[4..6]); let doc_ref = DocRef::from_segment_ord(segment, ord); primary_key_index.insert(k[1..].to_vec(), doc_ref); } Ok(DocumentIndexManager { primary_key_index: RwLock::new(primary_key_index), }) } fn delete_document_by_ref_unchecked(&self, write_batch: &mut WriteBatch, doc_ref: DocRef) -> Result<(), rocksdb::Error> { let kb = KeyBuilder::segment_del_list(doc_ref.segment()); let mut previous_doc_id_bytes = [0; 2]; BigEndian::write_u16(&mut previous_doc_id_bytes, doc_ref.ord()); try!(write_batch.merge(&kb.key(), &previous_doc_id_bytes)); // Increment deleted docs let kb = KeyBuilder::segment_stat(doc_ref.segment(), b"deleted_docs"); let mut inc_bytes = [0; 8]; BigEndian::write_i64(&mut inc_bytes, 1); try!(write_batch.merge(&kb.key(), &inc_bytes)); Ok(()) } pub fn insert_or_replace_key(&self, db: &DB, key: &Vec<u8>, doc_ref: DocRef) -> Result<Option<DocRef>, rocksdb::Error> { // Update primary_key_index let mut write_batch = WriteBatch::default(); let previous_doc_ref = self.primary_key_index.write().unwrap().insert(key.clone(), doc_ref); let kb = KeyBuilder::primary_key_index(key); let mut doc_ref_bytes = [0; 6]; BigEndian::write_u32(&mut doc_ref_bytes, doc_ref.segment()); BigEndian::write_u16(&mut doc_ref_bytes[4..], doc_ref.ord()); try!(write_batch.put(&kb.key(), &doc_ref_bytes)); // If there was a document there previously, delete it if let Some(previous_doc_ref) = previous_doc_ref { try!(self.delete_document_by_ref_unchecked(&mut write_batch, previous_doc_ref)); } // Write document data try!(db.write(write_batch)); Ok(previous_doc_ref) } pub fn delete_document_by_key(&self, db: &DB, key: &Vec<u8>) -> Result<Option<DocRef>, rocksdb::Error> { // Remove document from index let doc_ref = self.primary_key_index.write().unwrap().remove(key); if let Some(doc_ref) = doc_ref { let mut write_batch = WriteBatch::default(); try!(self.delete_document_by_ref_unchecked(&mut write_batch, doc_ref)); try!(db.write(write_batch)); } Ok(doc_ref) } pub fn contains_document_key(&self, key: &Vec<u8>) -> bool { self.primary_key_index.read().unwrap().contains_key(key) } pub fn commit_segment_merge(&self, db: &DB, mut write_batch: WriteBatch, source_segments: &Vec<u32>, dest_segment: u32, doc_ref_mapping: &HashMap<DocRef, u16>) -> Result<(), SegmentMergeError> { // Lock the primary key index let mut primary_key_index = self.primary_key_index.write().unwrap(); // Update primary keys to point to their new locations let mut keys_to_update: HashMap<Vec<u8>, DocRef> = HashMap::with_capacity(doc_ref_mapping.len()); for (key, doc_ref) in primary_key_index.iter() { if doc_ref_mapping.contains_key(&doc_ref) { keys_to_update.insert(key.clone(), *doc_ref); } } for (key, doc_ref) in keys_to_update { let new_doc_ord = doc_ref_mapping.get(&doc_ref).unwrap(); let new_doc_ref = DocRef::from_segment_ord(dest_segment, *new_doc_ord); let kb = KeyBuilder::primary_key_index(&key); let mut doc_ref_bytes = [0; 6]; BigEndian::write_u32(&mut doc_ref_bytes, new_doc_ref.segment()); BigEndian::write_u16(&mut doc_ref_bytes[4..], new_doc_ref.ord()); try!(write_batch.put(&kb.key(), &doc_ref_bytes)); primary_key_index.insert(key, new_doc_ref); } // Merge deletion lists // Must be done while the primary_key_index is locked as this prevents any more documents being deleted let mut deletion_list = Vec::new(); for source_segment in source_segments { let kb = KeyBuilder::segment_del_list(*source_segment); match try!(db.get(&kb.key())) { Some(docid_set) => { let doc_id_set = DocIdSet::from_bytes(docid_set.to_vec()); for doc_id in doc_id_set.iter() { let doc_ref = DocRef::from_segment_ord(*source_segment, doc_id); let new_doc_id = doc_ref_mapping.get(&doc_ref).unwrap(); deletion_list.write_u16::<BigEndian>(*new_doc_id).unwrap(); } } None => {}, } } let kb = KeyBuilder::segment_del_list(dest_segment); try!(db.put(&kb.key(), &deletion_list)); // Commit! try!(db.write_without_wal(write_batch)); Ok(()) } }
use aead::{ generic_array::{ typenum::{U16, U8}, ArrayLength, GenericArray, }, Error, }; use cipher::BlockCipher; pub type Counter<C> = [<<C as BlockCipher>::BlockSize as Sealed>::Counter; 2]; pub trait Sealed: ArrayLength<u8> { type Counter; fn block2ctr(block: &GenericArray<u8, Self>) -> [Self::Counter; 2]; fn ctr2block(ctr: &[Self::Counter; 2]) -> GenericArray<u8, Self>; fn incr_l(ctr: &mut [Self::Counter; 2]); fn incr_r(ctr: &mut [Self::Counter; 2]); fn lengths2block(adata_len: usize, data_len: usize) -> Result<GenericArray<u8, Self>, Error>; } impl Sealed for U16 { type Counter = u64; #[inline(always)] fn block2ctr(block: &GenericArray<u8, Self>) -> [Self::Counter; 2] { let (a, b) = block.split_at(8); [ u64::from_be_bytes(a.try_into().unwrap()), u64::from_be_bytes(b.try_into().unwrap()), ] } #[inline(always)] fn ctr2block(ctr: &[Self::Counter; 2]) -> GenericArray<u8, Self> { let a = ctr[0].to_be_bytes(); let b = ctr[1].to_be_bytes(); let mut block = GenericArray::<u8, Self>::default(); block[..8].copy_from_slice(&a); block[8..].copy_from_slice(&b); block } #[inline(always)] fn incr_l(ctr: &mut [Self::Counter; 2]) { ctr[0] = ctr[0].wrapping_add(1); } #[inline(always)] fn incr_r(ctr: &mut [Self::Counter; 2]) { ctr[1] = ctr[1].wrapping_add(1); } #[inline(always)] fn lengths2block(adata_len: usize, data_len: usize) -> Result<GenericArray<u8, Self>, Error> { let adata_len = adata_len .checked_mul(8) .ok_or(Error)? .try_into() .map_err(|_| Error)?; let data_len = data_len .checked_mul(8) .ok_or(Error)? .try_into() .map_err(|_| Error)?; Ok(Self::ctr2block(&[adata_len, data_len])) } } impl Sealed for U8 { type Counter = u32; #[inline(always)] fn block2ctr(block: &GenericArray<u8, Self>) -> [Self::Counter; 2] { let (a, b) = block.split_at(4); [ u32::from_be_bytes(a.try_into().unwrap()), u32::from_be_bytes(b.try_into().unwrap()), ] } #[inline(always)] fn ctr2block(ctr: &[Self::Counter; 2]) -> GenericArray<u8, Self> { let a = ctr[0].to_be_bytes(); let b = ctr[1].to_be_bytes(); let mut block = GenericArray::<u8, Self>::default(); block[..4].copy_from_slice(&a); block[4..].copy_from_slice(&b); block } #[inline(always)] fn incr_l(ctr: &mut [Self::Counter; 2]) { ctr[0] = ctr[0].wrapping_add(1); } #[inline(always)] fn incr_r(ctr: &mut [Self::Counter; 2]) { ctr[1] = ctr[1].wrapping_add(1); } #[inline(always)] fn lengths2block(adata_len: usize, data_len: usize) -> Result<GenericArray<u8, Self>, Error> { let adata_len = adata_len .checked_mul(8) .ok_or(Error)? .try_into() .map_err(|_| Error)?; let data_len = data_len .checked_mul(8) .ok_or(Error)? .try_into() .map_err(|_| Error)?; Ok(Self::ctr2block(&[adata_len, data_len])) } }
use std::sync::Arc; use std::sync::atomic::AtomicUsize; use std::sync::atomic::Ordering; use std::thread::sleep; use std::time::Duration; /// Used for benchmarking the controller pub struct ControllerBench { controller_cycle_counter: Arc<AtomicUsize>, average: Arc<AtomicUsize>, total: usize, total_secs: usize, } impl ControllerBench { /// Constructs pub fn new(controller_cycle_counter: Arc<AtomicUsize>, average: Arc<AtomicUsize>) -> Self { Self { controller_cycle_counter, average, total: 0, total_secs: 0, } } /// Launches the benchmarking thread, taking over this thread. pub fn launch(mut self) { loop { sleep(Duration::from_secs(1)); let new_count = self.controller_cycle_counter.load(Ordering::SeqCst); self.controller_cycle_counter.store(0, Ordering::SeqCst); self.total += new_count; self.total_secs += 1; let average = self.total / self.total_secs; self.average.store(average, Ordering::SeqCst); info!("Current cycle rate: {}", new_count); info!("Total cycle count: {}", self.total); info!("Average cycle rate: {}", average); } } }
use super::error::PolarResult; use super::events::*; use super::kb::*; use super::messages::*; use super::parser; use super::rewrites::*; use super::rules::*; use super::runnable::Runnable; use super::sources::*; use super::terms::*; use super::vm::*; use super::warnings::check_singletons; use std::collections::{HashMap, HashSet}; use std::sync::{Arc, RwLock}; pub struct Query { runnable_stack: Vec<(Box<dyn Runnable>, u64)>, // Tuple of Runnable + call_id. vm: PolarVirtualMachine, term: Term, done: bool, } impl Query { pub fn new(vm: PolarVirtualMachine, term: Term) -> Self { Self { runnable_stack: vec![], vm, term, done: false, } } #[cfg(target_arch = "wasm32")] pub fn set_logging_options(&mut self, rust_log: Option<String>, polar_log: Option<String>) { self.vm.set_logging_options(rust_log, polar_log); } /// Runnable lifecycle /// /// 1. Get Runnable A from the top of the Runnable stack, defaulting to the VM. /// 2. If Runnable A emits a Run event containing Runnable B, push Runnable B onto the stack. /// 3. Immediately request the next event, which will execute Runnable B. /// 4. When Runnable B emits a Done event, pop Runnable B off the stack and return its result as /// an answer to Runnable A. pub fn next_event(&mut self) -> PolarResult<QueryEvent> { let mut counter = self.vm.id_counter(); match self.top_runnable().run(Some(&mut counter))? { QueryEvent::Run { runnable, call_id } => { self.push_runnable(runnable, call_id); self.next_event() } QueryEvent::Done { result } => { if let Some((_, result_call_id)) = self.pop_runnable() { self.top_runnable() .external_question_result(result_call_id, result)?; self.next_event() } else { // VM is done. assert!(self.runnable_stack.is_empty()); Ok(QueryEvent::Done { result }) } } ev => Ok(ev), } } fn top_runnable(&mut self) -> &mut (dyn Runnable) { self.runnable_stack .last_mut() .map(|b| b.0.as_mut()) .unwrap_or(&mut self.vm) } fn push_runnable(&mut self, runnable: Box<dyn Runnable>, call_id: u64) { self.runnable_stack.push((runnable, call_id)); } fn pop_runnable(&mut self) -> Option<(Box<dyn Runnable>, u64)> { self.runnable_stack.pop() } pub fn call_result(&mut self, call_id: u64, value: Option<Term>) -> PolarResult<()> { self.top_runnable().external_call_result(call_id, value) } pub fn question_result(&mut self, call_id: u64, result: bool) -> PolarResult<()> { self.top_runnable() .external_question_result(call_id, result) } pub fn application_error(&mut self, message: String) -> PolarResult<()> { self.vm.external_error(message) } pub fn debug_command(&mut self, command: &str) -> PolarResult<()> { self.top_runnable().debug_command(command) } pub fn next_message(&self) -> Option<Message> { self.vm.messages.next() } pub fn source_info(&self) -> String { self.vm.term_source(&self.term, true) } pub fn bind(&mut self, name: Symbol, value: Term) -> PolarResult<()> { self.vm.bind(&name, value) } } // Query as an iterator returns `None` after the first time `Done` is seen impl Iterator for Query { type Item = PolarResult<QueryEvent>; fn next(&mut self) -> Option<PolarResult<QueryEvent>> { if self.done { return None; } let event = self.next_event(); if let Ok(QueryEvent::Done { .. }) = event { self.done = true; } Some(event) } } pub struct Polar { pub kb: Arc<RwLock<KnowledgeBase>>, messages: MessageQueue, /// Set of filenames already loaded loaded_files: Arc<RwLock<HashSet<String>>>, /// Map from source code loaded to the filename it was loaded as loaded_content: Arc<RwLock<HashMap<String, String>>>, } impl Default for Polar { fn default() -> Self { Self::new() } } impl Polar { pub fn new() -> Self { Self { kb: Arc::new(RwLock::new(KnowledgeBase::new())), messages: MessageQueue::new(), loaded_content: Arc::new(RwLock::new(HashMap::new())), // file content -> file name loaded_files: Arc::new(RwLock::new(HashSet::new())), // set of file names } } fn check_file(&self, src: &str, filename: &str) -> PolarResult<()> { match ( self.loaded_content.read().unwrap().get(src), self.loaded_files.read().unwrap().contains(filename), ) { (Some(other_file), true) if other_file == filename => { return Err(error::RuntimeError::FileLoading { msg: format!("File {} has already been loaded.", filename), } .into()) } (_, true) => { return Err(error::RuntimeError::FileLoading { msg: format!( "A file with the name {}, but different contents has already been loaded.", filename ), } .into()); } (Some(other_file), _) => { return Err(error::RuntimeError::FileLoading { msg: format!( "A file with the same contents as {} named {} has already been loaded.", filename, other_file ), } .into()); } _ => {} } self.loaded_content .write() .unwrap() .insert(src.to_string(), filename.to_string()); self.loaded_files .write() .unwrap() .insert(filename.to_string()); Ok(()) } pub fn load(&self, src: &str, filename: Option<String>) -> PolarResult<()> { if let Some(ref filename) = filename { self.check_file(src, filename)?; } let source = Source { filename, src: src.to_owned(), }; let mut kb = self.kb.write().unwrap(); let src_id = kb.new_id(); let mut lines = parser::parse_lines(src_id, src).map_err(|e| e.set_context(Some(&source), None))?; lines.reverse(); kb.sources.add_source(source, src_id); let mut warnings = vec![]; while let Some(line) = lines.pop() { match line { parser::Line::Rule(rule) => { let mut rule_warnings = check_singletons(&rule, &kb); warnings.append(&mut rule_warnings); let rule = rewrite_rule(rule, &mut kb); let name = rule.name.clone(); let generic_rule = kb .rules .entry(name.clone()) .or_insert_with(|| GenericRule::new(name, vec![])); generic_rule.add_rule(Arc::new(rule)); } parser::Line::Query(term) => { kb.inline_queries.push(term); } } } self.messages.extend(warnings.iter().map(|m| Message { kind: MessageKind::Warning, msg: m.to_owned(), })); Ok(()) } // Used in integration tests pub fn load_str(&self, src: &str) -> PolarResult<()> { self.load(src, None) } /// Clear rules from the knowledge base pub fn clear_rules(&self) { let mut kb = self.kb.write().unwrap(); kb.rules.clear(); kb.sources = Sources::default(); kb.inline_queries.clear(); self.loaded_content.write().unwrap().clear(); self.loaded_files.write().unwrap().clear(); } pub fn next_inline_query(&self, trace: bool) -> Option<Query> { let term = { self.kb.write().unwrap().inline_queries.pop() }; term.map(|t| self.new_query_from_term(t, trace)) } pub fn new_query(&self, src: &str, trace: bool) -> PolarResult<Query> { let source = Source { filename: None, src: src.to_owned(), }; let term = { let mut kb = self.kb.write().unwrap(); let src_id = kb.new_id(); let term = parser::parse_query(src_id, src).map_err(|e| e.set_context(Some(&source), None))?; kb.sources.add_source(source, src_id); term }; Ok(self.new_query_from_term(term, trace)) } pub fn new_query_from_term(&self, mut term: Term, trace: bool) -> Query { { let mut kb = self.kb.write().unwrap(); term = rewrite_term(term, &mut kb); } let query = Goal::Query { term: term.clone() }; let vm = PolarVirtualMachine::new(self.kb.clone(), trace, vec![query], self.messages.clone()); Query::new(vm, term) } // @TODO: Direct load_rules endpoint. pub fn get_external_id(&self) -> u64 { self.kb.read().unwrap().new_id() } pub fn register_constant(&self, name: Symbol, value: Term) { self.kb.write().unwrap().constant(name, value) } pub fn next_message(&self) -> Option<Message> { self.messages.next() } } #[cfg(test)] mod tests { use super::*; #[test] fn can_load_and_query() { let polar = Polar::new(); let _query = polar.new_query("1 = 1", false); let _ = polar.load_str("f(_);"); } }