text
stringlengths
8
4.13M
use std::{ future::Future, io, pin::Pin, task::{Context, Poll}, }; use futures_util::future::BoxFuture; #[cfg(feature = "tls")] use futures_util::FutureExt; use futures_util::future::{select_ok, SelectOk, TryFutureExt}; #[cfg(feature = "tls")] use native_tls::TlsConnector; #[cfg(feature = "tokio_io")] use tokio::net::TcpStream; #[cfg(feature = "async_std")] use async_std::net::TcpStream; use pin_project::pin_project; use url::Url; use crate::{errors::ConnectionError, io::Stream as InnerStream, Options}; #[cfg(feature = "tls")] use tokio_native_tls::TlsStream; type Result<T> = std::result::Result<T, ConnectionError>; type ConnectingFuture<T> = BoxFuture<'static, Result<T>>; #[pin_project(project = TcpStateProj)] enum TcpState { Wait(#[pin] SelectOk<ConnectingFuture<TcpStream>>), Fail(Option<ConnectionError>), } #[cfg(feature = "tls")] #[pin_project(project = TlsStateProj)] enum TlsState { Wait(#[pin] ConnectingFuture<TlsStream<TcpStream>>), Fail(Option<ConnectionError>), } #[pin_project(project = StateProj)] enum State { Tcp(#[pin] TcpState), #[cfg(feature = "tls")] Tls(#[pin] TlsState), } impl TcpState { fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<InnerStream>> { match self.project() { TcpStateProj::Wait(inner) => match inner.poll(cx) { Poll::Ready(Ok((tcp, _))) => Poll::Ready(Ok(InnerStream::Plain(tcp))), Poll::Ready(Err(err)) => Poll::Ready(Err(err)), Poll::Pending => Poll::Pending, }, TcpStateProj::Fail(ref mut err) => { Poll::Ready(Err(err.take().unwrap())) }, } } } #[cfg(feature = "tls")] impl TlsState { fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<InnerStream>> { match self.project() { TlsStateProj::Wait(ref mut inner) => match inner.poll_unpin(cx) { Poll::Ready(Ok(tls)) => Poll::Ready(Ok(InnerStream::Secure(tls))), Poll::Ready(Err(err)) => Poll::Ready(Err(err)), Poll::Pending => Poll::Pending, }, TlsStateProj::Fail(ref mut err) => { let e = err.take().unwrap(); Poll::Ready(Err(e)) }, } } } impl State { fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<InnerStream>> { match self.project() { StateProj::Tcp(inner) => inner.poll(cx), #[cfg(feature = "tls")] StateProj::Tls(inner) => inner.poll(cx), } } fn tcp_err(e: io::Error) -> Self { let conn_error = ConnectionError::IoError(e); State::Tcp(TcpState::Fail(Some(conn_error))) } #[cfg(feature = "tls")] fn tls_host_err() -> Self { State::Tls(TlsState::Fail(Some(ConnectionError::TlsHostNotProvided))) } fn tcp_wait(socket: SelectOk<ConnectingFuture<TcpStream>>) -> Self { State::Tcp(TcpState::Wait(socket)) } #[cfg(feature = "tls")] fn tls_wait(s: ConnectingFuture<TlsStream<TcpStream>>) -> Self { State::Tls(TlsState::Wait(s)) } } #[pin_project] pub(crate) struct ConnectingStream { #[pin] state: State, } impl ConnectingStream { #[allow(unused_variables)] pub(crate) fn new(addr: &Url, options: &Options) -> Self { match addr.socket_addrs(|| None) { Ok(addresses) => { let streams: Vec<_> = addresses .iter() .copied() .map(|address| -> ConnectingFuture<TcpStream> { Box::pin(TcpStream::connect(address).map_err(ConnectionError::IoError)) }) .collect(); if streams.is_empty() { let err = io::Error::new( io::ErrorKind::InvalidInput, "Could not resolve to any address.", ); return Self { state: State::tcp_err(err), }; } let socket = select_ok(streams); #[cfg(feature = "tls")] { if options.secure { return ConnectingStream::new_tls_connection(addr, socket, options); } } Self { state: State::tcp_wait(socket), } } Err(err) => Self { state: State::tcp_err(err), }, } } #[cfg(feature = "tls")] fn new_tls_connection(addr: &Url, socket: SelectOk<ConnectingFuture<TcpStream>>, options: &Options) -> Self { match addr.host_str().map(|host| host.to_owned()) { None => { Self { state: State::tls_host_err(), } }, Some(host) => { let mut builder = TlsConnector::builder(); builder.danger_accept_invalid_certs(options.skip_verify); if let Some(certificate) = options.certificate.clone() { let native_cert = native_tls::Certificate::from(certificate); builder.add_root_certificate(native_cert); } Self { state: State::tls_wait( Box::pin(async move { let (s, _) = socket.await?; let cx = builder.build()?; let cx = tokio_native_tls::TlsConnector::from(cx); Ok(cx.connect(&host, s).await?) }) ) } }, } } } impl Future for ConnectingStream { type Output = Result<InnerStream>; fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> { self.project().state.poll(cx) } }
use crate::{ websocket::push_messages::InternalMessage, }; use actix::{ Actor, Message, Handler, Context, AsyncContext, WrapFuture, Recipient, ActorContext, ActorFuture, fut, ResponseActFuture, }; use log::{debug, error}; use redis::{ aio::{PubSub, Connection}, Msg, }; use std::{ collections::{HashMap, HashSet}, convert::Infallible, result::Result, sync::Arc, }; use tokio::{ stream::StreamExt, sync::RwLock, }; #[derive(Message)] #[rtype(result = "Result<(), Infallible>")] pub struct UpdateSubscribe { pub client: Recipient<InternalMessage>, pub subjects: HashSet<String>, } #[derive(Message)] #[rtype(result = "Result<(), Infallible>")] pub struct RedisMessage(Msg); pub struct MainSubscriber { publisher: Arc<RwLock<Connection>>, subscriber: Arc<RwLock<PubSub>>, subject2client: HashMap<String, HashSet<Recipient<InternalMessage>>>, client2subject: HashMap<Recipient<InternalMessage>, HashSet<String>>, } impl MainSubscriber { pub fn new(publisher: Connection, subscriber: PubSub) -> Self { Self { publisher: Arc::new(RwLock::new(publisher)), subscriber: Arc::new(RwLock::new(subscriber)), subject2client: HashMap::new(), client2subject: HashMap::new(), } } } impl Actor for MainSubscriber { type Context = Context<Self>; fn started(&mut self, ctx: &mut Context<Self>) { let redis = self.subscriber.clone(); let addr = ctx.address(); ctx.spawn(async move { let mut redis_mut = redis.write().await; if let Err(e) = redis_mut.subscribe(crate::constants::CHANNEL_NAME).await { error!("subscribe redis error: {}", e); return; } let mut stream = redis_mut.on_message(); while let Some(msg) = stream.next().await { if let Err(e) = addr.send(RedisMessage(msg)).await { error!("send RedisMessage error: {}", e); } } } .into_actor(self) .then(|_, _, ctx| { ctx.stop(); fut::ready(()) })); } } impl Handler<RedisMessage> for MainSubscriber { type Result = Result<(), Infallible>; fn handle(&mut self, msg: RedisMessage, ctx: &mut Context<Self>) -> Self::Result { let origin_msg: InternalMessage = match serde_json::from_slice(msg.0.get_payload_bytes()) { Ok(r) => r, Err(e) => { error!("RedisMessage deserialize error: {}", e); return Ok(()); } }; let mut deliver_msgs: HashMap<Recipient<InternalMessage>, InternalMessage> = HashMap::new(); for msg in origin_msg.messages.iter() { let subject = inflector::cases::kebabcase::to_kebab_case(msg.as_ref()); if let Some(clients) = self.subject2client.get(&subject) { for client in clients { let mailbox = &mut deliver_msgs.entry((*client).clone()) .or_insert_with(|| InternalMessage { sender_uid: origin_msg.sender_uid, sender_jti: origin_msg.sender_jti, messages: Vec::new(), created_at: origin_msg.created_at, }).messages; mailbox.push(msg.clone()) } } } for (client, msg) in deliver_msgs.into_iter() { ctx.spawn(client.send(msg.clone()) .into_actor(self) .then(|result, _, _| { if let Err(e) = result { error!("send InternalMessage to client error {}", e); } fut::ready(()) }) ); } Ok(()) } } impl Handler<UpdateSubscribe> for MainSubscriber { type Result = Result<(), Infallible>; fn handle(&mut self, msg: UpdateSubscribe, _ctx: &mut Context<Self>) -> Self::Result { let empty_subjects = HashSet::new(); let old_subjects = self.client2subject.get(&msg.client) .unwrap_or(&empty_subjects); if *old_subjects == msg.subjects { return Ok(()); } // Remove old let to_remove = old_subjects - &msg.subjects; for subject in to_remove.iter() { let clients = self.subject2client.get_mut(subject).unwrap(); clients.remove(&msg.client); if clients.is_empty() { self.subject2client.remove(subject); } } let to_add = &msg.subjects - old_subjects; for subject in to_add.iter() { let clients = self.subject2client.entry(subject.clone()) .or_insert_with(HashSet::new); clients.insert(msg.client.clone()); } if msg.subjects.is_empty() { self.client2subject.remove(&msg.client); } else { self.client2subject.insert(msg.client, msg.subjects); } // Debug debug!("subject2client:"); for (subject, clients) in self.subject2client.iter() { debug!(" {}: {}", subject, clients.len()); } debug!("client2subject:"); for (i, (_client, subjects)) in self.client2subject.iter().enumerate() { debug!(" {:?}: {}", i, subjects.iter() .map(|x| &x[..]) .collect::<Vec<_>>() .join(", ")); } Ok(()) } } impl Handler<InternalMessage> for MainSubscriber { type Result = ResponseActFuture<Self, Result<(), Infallible>>; fn handle(&mut self, msg: InternalMessage, _ctx: &mut Self::Context) -> Self::Result { let redis = self.publisher.clone(); Box::new(async move { let msg = match serde_json::to_string(&msg) { Ok(msg) => msg, Err(e) => { error!("InternalMessage serialize error: {}", e); return Ok(()); } }; let mut redis_mut = redis.write().await; if let Err(e) = redis::cmd("PUBLISH").arg(&[crate::constants::CHANNEL_NAME, &msg]) .query_async::<Connection, ()>(&mut *redis_mut).await { error!("send InternalMessage to redis error {}", e); } Ok(()) }.into_actor(self)) } }
use crate::coin::Coin; pub const X_BOARD_LENGTH: usize = 7; pub const Y_BOARD_LENGTH: usize = 6; pub type Board = [[Coin; Y_BOARD_LENGTH]; X_BOARD_LENGTH]; pub type Coord = (usize, usize); pub fn new_board() -> Board { [[Coin::Empty; Y_BOARD_LENGTH]; X_BOARD_LENGTH] } pub fn drop_coin(board: &mut Board, coin: Coin, col: usize) -> (bool, Coord) { let col_idx = col; for i in 0..(&board[col_idx]).len() { if board[col_idx][i] == Coin::Empty { board[col_idx][i] = coin; return (true, (col_idx, i)) } } (false, (0,0)) }
#![cfg_attr(not(feature = "std"), no_std)] use frame_support::traits::{Currency, Get, EnsureOrigin, fungible::Inspect}; use frame_support::{ decl_event, decl_error, decl_module, decl_storage, dispatch::DispatchResult, ensure, }; use sp_runtime::traits::{Convert}; use frame_system::{ensure_root, ensure_signed}; use weights::WeightInfo; use sp_std::prelude::*; pub mod custody; pub mod weights; #[cfg(test)] mod mock; #[cfg(test)] mod tests; #[cfg(feature = "runtime-benchmarks")] pub mod benchmarking; pub type BalanceOf<T> = <<T as Config>::Currency as Currency<<T as frame_system::Config>::AccountId>>::Balance; pub trait Config: frame_system::Config + pallet_proxy::Config + pallet_staking::Config { /// The Event type. type Event: From<Event<Self>> + Into<<Self as frame_system::Config>::Event>; /// The currency mechanism. type Currency: Currency<Self::AccountId> + Inspect<Self::AccountId>; //---------------- CUSTODY ----------------// /// The payout frequency of vested coins under custody. type PayoutFrequency: Get<Self::BlockNumber>; /// The custody duration. type CustodyDuration: Get<Self::BlockNumber>; /// The governance custody duration. type GovernanceCustodyDuration: Get<Self::BlockNumber>; /// The getter for the proxy type to use for custody accounts type CustodyProxy: Get<<Self as pallet_proxy::Config>::ProxyType>; /// Convert the block number into a balance. type BlockNumberToBalance: Convert<Self::BlockNumber, BalanceOf<Self>>; //---------------- ADMIN ----------------// /// The admin origin for the pallet (Tech Committee unanimity). type AdminOrigin: EnsureOrigin<Self::Origin>; /// Weight information for extrinsics in this pallet. type WeightInfo: WeightInfo; } decl_storage! { trait Store for Module<T: Config> as XXCustody { /// Keep track of team members'accounts custody info pub TeamAccounts get(fn team_accounts): map hasher(twox_64_concat) T::AccountId => custody::CustodyInfo<T::AccountId, BalanceOf<T>>; /// Keep track of custody accounts pub CustodyAccounts get(fn custody_accounts): map hasher(twox_64_concat) T::AccountId => (); /// Keep track of custodians pub Custodians get(fn custodians) config(): map hasher(twox_64_concat) T::AccountId => (); /// Total amount under custody pub TotalCustody get(fn total_custody): BalanceOf<T>; } add_extra_genesis { config(team_allocations): Vec<(T::AccountId, BalanceOf<T>)>; build(|config| { for &(ref who, balance) in &config.team_allocations { // Initialized custody for this member <Module<T>>::initialize_custody(who, balance); } }); } } decl_event! { pub enum Event<T> where Balance = BalanceOf<T>, <T as frame_system::Config>::AccountId, { //---------------- CUSTODY ----------------// /// Team payout was given from custody PayoutFromCustody(AccountId, Balance), /// Team payout was given from reserve PayoutFromReserve(AccountId, Balance), /// Custody finished for the given team account CustodyDone(AccountId), //---------------- ADMIN ----------------// /// Custodian added CustodianAdded(AccountId), /// Custodian removed CustodianRemoved(AccountId), /// Team member updated TeamMemberUpdated(AccountId, AccountId), } } decl_error! { pub enum Error for Module<T: Config> { /// Invalid team member account InvalidTeamMember, /// Invalid custody account InvalidCustodyAccount, /// Must be custodian to call this function MustBeCustodian, /// Payout not available yet PayoutNotAvailable, /// Payout failed due to insufficient custody + reserve funds PayoutFailedInsufficientFunds, /// Custody period ended, custodian can't call this function anymore CustodyPeriodEnded, /// Governance custody ongoing, team member can't call this function yet GovernanceCustodyActive, /// Governance custody period ended, custodian can't call this function anymore GovernanceCustodyPeriodEnded, /// This team member account already exists TeamMemberExists, } } decl_module! { pub struct Module<T: Config> for enum Call where origin: T::Origin { type Error = Error<T>; fn deposit_event() = default; //---------------- CUSTODY ----------------// const PayoutFrequency: T::BlockNumber = T::PayoutFrequency::get(); const CustodyDuration: T::BlockNumber = T::CustodyDuration::get(); const GovernanceCustodyDuration: T::BlockNumber = T::GovernanceCustodyDuration::get(); /// Payout the amount already vested to the given team member account /// /// Anyone can call this function since it is deterministic /// #[weight = <T as Config>::WeightInfo::payout()] pub fn payout(origin, who: T::AccountId) { ensure_signed(origin)?; ensure!(Self::is_team_member(&who), Error::<T>::InvalidTeamMember); Self::try_payout(who)?; } /// Bond the given amount from the given custody account, with the specified controller /// /// During the Custody period, the function is callable by Custodians only. After the /// Custody ends, the function is not callable anymore. /// #[weight = <T as Config>::WeightInfo::custody_bond()] pub fn custody_bond(origin, custody: T::AccountId, controller: T::AccountId, #[compact] value: pallet_staking::BalanceOf<T>, ) { let who = ensure_signed(origin)?; ensure!(Self::is_custodian(&who), Error::<T>::MustBeCustodian); ensure!(Self::is_custody(&custody), Error::<T>::InvalidCustodyAccount); Self::try_custody_bond(custody, controller, value)?; } /// Bond extra amount from the given custody account /// /// During the Custody period, the function is callable by Custodians only. After the /// Custody ends, the function is not callable anymore. /// #[weight = <T as Config>::WeightInfo::custody_bond_extra()] pub fn custody_bond_extra(origin, custody: T::AccountId, #[compact] value: pallet_staking::BalanceOf<T>, ) { let who = ensure_signed(origin)?; ensure!(Self::is_custodian(&who), Error::<T>::MustBeCustodian); ensure!(Self::is_custody(&custody), Error::<T>::InvalidCustodyAccount); Self::try_custody_bond_extra(custody, value)?; } /// Set the controller of a given custody account /// /// During the Custody period, the function is callable by Custodians only. After the /// Custody ends, the function is not callable anymore. /// #[weight = <T as Config>::WeightInfo::custody_set_controller()] pub fn custody_set_controller(origin, custody: T::AccountId, controller: T::AccountId, ) { let who = ensure_signed(origin)?; ensure!(Self::is_custodian(&who), Error::<T>::MustBeCustodian); ensure!(Self::is_custody(&custody), Error::<T>::InvalidCustodyAccount); Self::try_custody_set_controller(custody, controller)?; } /// Set the governance proxy of a given custody account /// /// Only one proxy account is allowed per custody account, so this function /// removes any proxies first, and then adds the new proxy /// /// During the Governance Custody period, the function is callable by Custodians only. /// After the Governance Custody ends, the function is not callable anymore. /// #[weight = <T as Config>::WeightInfo::custody_set_proxy()] pub fn custody_set_proxy(origin, custody: T::AccountId, proxy: T::AccountId, ) { let who = ensure_signed(origin)?; ensure!(Self::is_custodian(&who), Error::<T>::MustBeCustodian); ensure!(Self::is_custody(&custody), Error::<T>::InvalidCustodyAccount); Self::try_custody_set_proxy(custody, proxy)?; } /// Allow the team member to set a governance proxy of their own custody account /// /// During the Governance Custody period, the function is not callable. /// After the Governance Custody ends, the function is callable by team members only. /// #[weight = <T as Config>::WeightInfo::team_custody_set_proxy()] pub fn team_custody_set_proxy(origin, proxy: T::AccountId) { let who = ensure_signed(origin)?; ensure!(Self::is_team_member(&who), Error::<T>::InvalidTeamMember); Self::try_team_custody_set_proxy(who, proxy)?; } //---------------- ADMIN ----------------// /// Add a custodian account /// /// The dispatch origin must be AdminOrigin. /// #[weight = <T as Config>::WeightInfo::add_custodian()] pub fn add_custodian(origin, custodian: T::AccountId) { Self::ensure_admin(origin)?; <Custodians<T>>::insert(&custodian, ()); Self::deposit_event(RawEvent::CustodianAdded(custodian)); } /// Remove a custodian account /// /// The dispatch origin must be AdminOrigin. /// #[weight = <T as Config>::WeightInfo::remove_custodian()] pub fn remove_custodian(origin, custodian: T::AccountId) { Self::ensure_admin(origin)?; <Custodians<T>>::remove(&custodian); Self::deposit_event(RawEvent::CustodianRemoved(custodian)); } /// Replace an existing team member account with a new account /// /// The dispatch origin must be AdminOrigin. /// #[weight = <T as Config>::WeightInfo::replace_team_member()] pub fn replace_team_member(origin, who: T::AccountId, new: T::AccountId) { Self::ensure_admin(origin)?; ensure!(Self::is_team_member(&who), Error::<T>::InvalidTeamMember); ensure!(!Self::is_team_member(&new), Error::<T>::TeamMemberExists); Self::update_team_member(who.clone(), new.clone()); Self::deposit_event(RawEvent::TeamMemberUpdated(who, new)); } } } impl<T: Config> Module<T> { /// Check if given account is a team member fn is_team_member(who: &T::AccountId) -> bool { <TeamAccounts<T>>::contains_key(who) } /// Check if given account is a custody account fn is_custody(who: &T::AccountId) -> bool { <CustodyAccounts<T>>::contains_key(who) } /// Check if given account is a custodian fn is_custodian(who: &T::AccountId) -> bool { <Custodians<T>>::contains_key(who) } /// Check if origin is admin fn ensure_admin(o: T::Origin) -> DispatchResult { <T as Config>::AdminOrigin::try_origin(o) .map(|_| ()) .or_else(ensure_root)?; Ok(()) } } /// Implement CustodianHandler trait impl<T: Config> pallet_staking::CustodianHandler<T::AccountId, BalanceOf<T>> for Module<T> { fn is_custody_account(who: &T::AccountId) -> bool { Self::is_custody(who) } fn total_custody() -> BalanceOf<T> { Self::total_custody() } }
use eos::types::*; use serde::Serialize; use serde_json; use stdweb::Value; use yew::prelude::*; #[derive(Serialize, Deserialize, Debug, Default)] pub struct ScatterRequiredFields { #[serde(skip_serializing_if = "Option::is_none")] pub accounts: Option<Vec<ScatterNetwork>>, } impl PartialEq for ScatterRequiredFields { fn eq(&self, other: &ScatterRequiredFields) -> bool { self.accounts == other.accounts } } #[derive(Serialize, Deserialize, Debug, Default)] #[serde(rename_all = "camelCase")] pub struct ScatterNetwork { #[serde(skip_serializing_if = "Option::is_none")] pub chain_id: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] pub protocol: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] pub blockchain: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] pub host: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] pub port: Option<u16>, } impl PartialEq for ScatterNetwork { fn eq(&self, other: &ScatterNetwork) -> bool { self.chain_id == other.chain_id && self.protocol == other.protocol && self.blockchain == other.blockchain && self.host == other.host && self.port == other.port } } #[derive(Serialize, Deserialize, Debug, Default, Clone)] pub struct ScatterAccount { pub name: AccountName, pub authority: String, pub blockchain: String, } impl PartialEq for ScatterAccount { fn eq(&self, other: &ScatterAccount) -> bool { self.name == other.name && self.authority == other.authority && self.blockchain == other.blockchain } } #[derive(Serialize, Deserialize, Debug, Default, Clone)] #[serde(default)] #[serde(rename_all = "camelCase")] pub struct ScatterIdentity { pub hash: String, pub kyc: bool, pub name: String, pub public_key: PublicKey, pub accounts: Vec<ScatterAccount>, } impl ScatterIdentity { pub fn account_name(&self) -> Option<AccountName> { match self.accounts.first() { Some(account) => Some(account.name.clone()), None => None, } } } impl PartialEq for ScatterIdentity { fn eq(&self, other: &ScatterIdentity) -> bool { self.hash == other.hash && self.kyc == other.kyc && self.name == other.name && self.public_key == other.public_key && self.accounts == other.accounts } } #[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] pub enum ScatterError { NotConnected, Locked, Rejected, Unknown(String), } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ScatterTransaction { pub actions: Vec<serde_json::Value>, } impl<Data> From<Action<Data>> for ScatterTransaction where Data: Serialize, { fn from(action: Action<Data>) -> ScatterTransaction { let serialized_action = serde_json::to_value(&action).unwrap(); ScatterTransaction { actions: vec![serialized_action], } } } impl<Data> From<Vec<Action<Data>>> for ScatterTransaction where Data: Serialize, { fn from(actions: Vec<Action<Data>>) -> ScatterTransaction { let mut serialized_actions = Vec::new(); for action in &actions { serialized_actions.push(serde_json::to_value(&action).unwrap()); } ScatterTransaction { actions: serialized_actions, } } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct PushedTransaction { pub transaction_id: String, }
#[doc = "Register `SPI_I2SCFGR` reader"] pub type R = crate::R<SPI_I2SCFGR_SPEC>; #[doc = "Register `SPI_I2SCFGR` writer"] pub type W = crate::W<SPI_I2SCFGR_SPEC>; #[doc = "Field `I2SMOD` reader - I2SMOD"] pub type I2SMOD_R = crate::BitReader; #[doc = "Field `I2SMOD` writer - I2SMOD"] pub type I2SMOD_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `I2SCFG` reader - I2SCFG"] pub type I2SCFG_R = crate::FieldReader; #[doc = "Field `I2SCFG` writer - I2SCFG"] pub type I2SCFG_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 3, O>; #[doc = "Field `I2SSTD` reader - I2SSTD"] pub type I2SSTD_R = crate::FieldReader; #[doc = "Field `I2SSTD` writer - I2SSTD"] pub type I2SSTD_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>; #[doc = "Field `PCMSYNC` reader - PCMSYNC"] pub type PCMSYNC_R = crate::BitReader; #[doc = "Field `PCMSYNC` writer - PCMSYNC"] pub type PCMSYNC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `DATLEN` reader - DATLEN"] pub type DATLEN_R = crate::FieldReader; #[doc = "Field `DATLEN` writer - DATLEN"] pub type DATLEN_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>; #[doc = "Field `CHLEN` reader - CHLEN"] pub type CHLEN_R = crate::BitReader; #[doc = "Field `CHLEN` writer - CHLEN"] pub type CHLEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `CKPOL` reader - CKPOL"] pub type CKPOL_R = crate::BitReader; #[doc = "Field `CKPOL` writer - CKPOL"] pub type CKPOL_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `FIXCH` reader - FIXCH"] pub type FIXCH_R = crate::BitReader; #[doc = "Field `FIXCH` writer - FIXCH"] pub type FIXCH_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `WSINV` reader - WSINV"] pub type WSINV_R = crate::BitReader; #[doc = "Field `WSINV` writer - WSINV"] pub type WSINV_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `DATFMT` reader - DATFMT"] pub type DATFMT_R = crate::BitReader; #[doc = "Field `DATFMT` writer - DATFMT"] pub type DATFMT_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `I2SDIV` reader - I2SDIV"] pub type I2SDIV_R = crate::FieldReader; #[doc = "Field `I2SDIV` writer - I2SDIV"] pub type I2SDIV_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 8, O>; #[doc = "Field `ODD` reader - ODD"] pub type ODD_R = crate::BitReader; #[doc = "Field `ODD` writer - ODD"] pub type ODD_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `MCKOE` reader - MCKOE"] pub type MCKOE_R = crate::BitReader; #[doc = "Field `MCKOE` writer - MCKOE"] pub type MCKOE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; impl R { #[doc = "Bit 0 - I2SMOD"] #[inline(always)] pub fn i2smod(&self) -> I2SMOD_R { I2SMOD_R::new((self.bits & 1) != 0) } #[doc = "Bits 1:3 - I2SCFG"] #[inline(always)] pub fn i2scfg(&self) -> I2SCFG_R { I2SCFG_R::new(((self.bits >> 1) & 7) as u8) } #[doc = "Bits 4:5 - I2SSTD"] #[inline(always)] pub fn i2sstd(&self) -> I2SSTD_R { I2SSTD_R::new(((self.bits >> 4) & 3) as u8) } #[doc = "Bit 7 - PCMSYNC"] #[inline(always)] pub fn pcmsync(&self) -> PCMSYNC_R { PCMSYNC_R::new(((self.bits >> 7) & 1) != 0) } #[doc = "Bits 8:9 - DATLEN"] #[inline(always)] pub fn datlen(&self) -> DATLEN_R { DATLEN_R::new(((self.bits >> 8) & 3) as u8) } #[doc = "Bit 10 - CHLEN"] #[inline(always)] pub fn chlen(&self) -> CHLEN_R { CHLEN_R::new(((self.bits >> 10) & 1) != 0) } #[doc = "Bit 11 - CKPOL"] #[inline(always)] pub fn ckpol(&self) -> CKPOL_R { CKPOL_R::new(((self.bits >> 11) & 1) != 0) } #[doc = "Bit 12 - FIXCH"] #[inline(always)] pub fn fixch(&self) -> FIXCH_R { FIXCH_R::new(((self.bits >> 12) & 1) != 0) } #[doc = "Bit 13 - WSINV"] #[inline(always)] pub fn wsinv(&self) -> WSINV_R { WSINV_R::new(((self.bits >> 13) & 1) != 0) } #[doc = "Bit 14 - DATFMT"] #[inline(always)] pub fn datfmt(&self) -> DATFMT_R { DATFMT_R::new(((self.bits >> 14) & 1) != 0) } #[doc = "Bits 16:23 - I2SDIV"] #[inline(always)] pub fn i2sdiv(&self) -> I2SDIV_R { I2SDIV_R::new(((self.bits >> 16) & 0xff) as u8) } #[doc = "Bit 24 - ODD"] #[inline(always)] pub fn odd(&self) -> ODD_R { ODD_R::new(((self.bits >> 24) & 1) != 0) } #[doc = "Bit 25 - MCKOE"] #[inline(always)] pub fn mckoe(&self) -> MCKOE_R { MCKOE_R::new(((self.bits >> 25) & 1) != 0) } } impl W { #[doc = "Bit 0 - I2SMOD"] #[inline(always)] #[must_use] pub fn i2smod(&mut self) -> I2SMOD_W<SPI_I2SCFGR_SPEC, 0> { I2SMOD_W::new(self) } #[doc = "Bits 1:3 - I2SCFG"] #[inline(always)] #[must_use] pub fn i2scfg(&mut self) -> I2SCFG_W<SPI_I2SCFGR_SPEC, 1> { I2SCFG_W::new(self) } #[doc = "Bits 4:5 - I2SSTD"] #[inline(always)] #[must_use] pub fn i2sstd(&mut self) -> I2SSTD_W<SPI_I2SCFGR_SPEC, 4> { I2SSTD_W::new(self) } #[doc = "Bit 7 - PCMSYNC"] #[inline(always)] #[must_use] pub fn pcmsync(&mut self) -> PCMSYNC_W<SPI_I2SCFGR_SPEC, 7> { PCMSYNC_W::new(self) } #[doc = "Bits 8:9 - DATLEN"] #[inline(always)] #[must_use] pub fn datlen(&mut self) -> DATLEN_W<SPI_I2SCFGR_SPEC, 8> { DATLEN_W::new(self) } #[doc = "Bit 10 - CHLEN"] #[inline(always)] #[must_use] pub fn chlen(&mut self) -> CHLEN_W<SPI_I2SCFGR_SPEC, 10> { CHLEN_W::new(self) } #[doc = "Bit 11 - CKPOL"] #[inline(always)] #[must_use] pub fn ckpol(&mut self) -> CKPOL_W<SPI_I2SCFGR_SPEC, 11> { CKPOL_W::new(self) } #[doc = "Bit 12 - FIXCH"] #[inline(always)] #[must_use] pub fn fixch(&mut self) -> FIXCH_W<SPI_I2SCFGR_SPEC, 12> { FIXCH_W::new(self) } #[doc = "Bit 13 - WSINV"] #[inline(always)] #[must_use] pub fn wsinv(&mut self) -> WSINV_W<SPI_I2SCFGR_SPEC, 13> { WSINV_W::new(self) } #[doc = "Bit 14 - DATFMT"] #[inline(always)] #[must_use] pub fn datfmt(&mut self) -> DATFMT_W<SPI_I2SCFGR_SPEC, 14> { DATFMT_W::new(self) } #[doc = "Bits 16:23 - I2SDIV"] #[inline(always)] #[must_use] pub fn i2sdiv(&mut self) -> I2SDIV_W<SPI_I2SCFGR_SPEC, 16> { I2SDIV_W::new(self) } #[doc = "Bit 24 - ODD"] #[inline(always)] #[must_use] pub fn odd(&mut self) -> ODD_W<SPI_I2SCFGR_SPEC, 24> { ODD_W::new(self) } #[doc = "Bit 25 - MCKOE"] #[inline(always)] #[must_use] pub fn mckoe(&mut self) -> MCKOE_W<SPI_I2SCFGR_SPEC, 25> { MCKOE_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "All documented bits in this register must be configured when the I2S is disabled (SPE = 0).These bits are not used in SPI mode except for I2SMOD which needs to be set to 0 in SPI mode.\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`spi_i2scfgr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`spi_i2scfgr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct SPI_I2SCFGR_SPEC; impl crate::RegisterSpec for SPI_I2SCFGR_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`spi_i2scfgr::R`](R) reader structure"] impl crate::Readable for SPI_I2SCFGR_SPEC {} #[doc = "`write(|w| ..)` method takes [`spi_i2scfgr::W`](W) writer structure"] impl crate::Writable for SPI_I2SCFGR_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets SPI_I2SCFGR to value 0"] impl crate::Resettable for SPI_I2SCFGR_SPEC { const RESET_VALUE: Self::Ux = 0; }
use itertools::Itertools; fn is_valid(passphrase: &&str) -> bool { let words = passphrase.split_whitespace(); let unique_words = words.clone().unique(); words.count() == unique_words.count() } fn count_letters(word: &str) -> [usize; 28] { let mut rv = [0; 28]; for c in word.chars() { rv[c as usize - 'a' as usize] += 1; } rv } fn is_valid_p2(passphrase: &&str) -> bool { let words = passphrase.split_whitespace(); let counts = words.map(count_letters); let unique = counts.clone().unique(); counts.count() == unique.count() } #[allow(unused)] pub fn run() { let input = include_str!("../input/4"); println!("{}", input.lines().filter(is_valid).count()); println!("{}", input.lines().filter(is_valid_p2).count()); }
struct Point{ x: i32, y: i32, } fn test_function(x:&mut Point){ x.x+=1; x.y+=1; } fn main() { let x: u32=1_000_000; assert_eq!(1000000,x); }
// Copyright 2018 Vlad Yermakov // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use super::{Complex, Integer, Natural, Rational, Real}; use std::cmp::{Ordering, PartialEq, PartialOrd}; use std::fmt::{self, Display, Formatter}; use std::ops::{Add, Div, Mul, Neg, Sub, Rem}; use std::str::FromStr; #[derive(Debug, Copy, Clone)] pub enum Number { Natural(Natural), Integer(Integer), Rational(Rational), Real(Real), Complex(Complex), } impl Number { pub fn natural(s: String) -> Result<Number, <i128 as FromStr>::Err> { let nat = <i128>::from_str(s.as_str())?; Ok(Number::Natural(natural!(nat))) } pub fn rational(n: String, d: String) -> Result<Number, <i128 as FromStr>::Err> { let num = <i128>::from_str(n.as_str())?; let den = <i128>::from_str(d.as_str())?; Ok(Number::Rational(rational!(num / den))) } pub fn complex(s: String) -> Result<Number, <f64 as FromStr>::Err> { let com = <f64>::from_str(s.as_str())?; Ok(Number::Complex(complex!(com.i))) } pub fn real(s: String) -> Result<Number, <f64 as FromStr>::Err> { let real = <f64>::from_str(s.as_str())?; Ok(Number::Real(real!(real))) } } impl Display for Number { fn fmt(&self, f: &mut Formatter) -> fmt::Result { match *self { Number::Natural(val) => val.fmt(f), Number::Integer(val) => val.fmt(f), Number::Rational(val) => val.fmt(f), Number::Real(val) => val.fmt(f), Number::Complex(val) => val.fmt(f), } } } impl Rem for Number { type Output = Number; fn rem(self, other: Number) -> Number { match (self, other) { (Number::Natural(a), Number::Natural(b)) => Number::Natural(a % b), (Number::Natural(a), Number::Integer(b)) => Number::Integer(a % b), (Number::Integer(a), Number::Natural(b)) => Number::Integer(a % b), (Number::Integer(a), Number::Integer(b)) => Number::Integer(a % b), _ => unimplemented!() } } } impl Neg for Number { type Output = Number; fn neg(self) -> Number { match self { Number::Natural(nat) => Number::Integer(-nat), Number::Integer(int) => Number::Integer(-int), Number::Rational(rat) => Number::Rational(-rat), Number::Real(rea) => Number::Real(-rea), Number::Complex(com) => Number::Complex(-com), } } } macro_rules! impl_ops_for_number { () => {impl_ops_for_number! { (Add; add; +) (Sub; sub; -) (Mul; mul; *) (Div; div; /) } }; ($(($tr:ty; $name:ident; $op:tt))*) => { $(impl_ops_for_number! { @impl $tr; $name; $op } )* }; (@impl $tr:ty; $name:ident; $op:tt) => { impl $tr for Number { type Output = Number; fn $name(self, other: Number) -> Number { match self { Number::Natural(nat) => match other { Number::Natural(oth) => Number::Natural(nat $op oth), Number::Integer(int) => Number::Integer(nat $op int), Number::Rational(rat) => Number::Rational(nat $op rat), Number::Real(rea) => Number::Real(nat $op rea), Number::Complex(com) => Number::Complex(nat $op com), }, Number::Integer(int) => match other { Number::Natural(nat) => Number::Integer(int $op nat), Number::Integer(oth) => Number::Integer(int $op oth), Number::Rational(rat) => Number::Rational(int $op rat), Number::Real(rea) => Number::Real(int $op rea), Number::Complex(com) => Number::Complex(int $op com), }, Number::Rational(rat) => match other { Number::Natural(nat) => Number::Rational(rat $op nat), Number::Integer(int) => Number::Rational(rat $op int), Number::Rational(oth) => Number::Rational(rat $op oth), Number::Real(rea) => Number::Real(rat $op rea), Number::Complex(com) => Number::Complex(rat $op com), }, Number::Real(rea) => match other { Number::Natural(nat) => Number::Real(rea $op nat), Number::Integer(int) => Number::Real(rea $op int), Number::Rational(rat) => Number::Real(rea $op rat), Number::Real(oth) => Number::Real(rea $op oth), Number::Complex(com) => Number::Complex(rea $op com), }, Number::Complex(com) => match other { Number::Natural(nat) => Number::Complex(com $op nat), Number::Integer(int) => Number::Complex(com $op int), Number::Rational(rat) => Number::Complex(com $op rat), Number::Real(rea) => Number::Complex(com $op rea), Number::Complex(oth) => Number::Complex(com $op oth), }, } } } } } impl_ops_for_number!{} impl PartialOrd for Number { fn partial_cmp(&self, other: &Number) -> Option<Ordering> { let self_ = match *self { Number::Natural(nat) => nat.into(), Number::Integer(int) => int.into(), Number::Rational(rat) => rat.into(), Number::Real(rea) => rea.into(), Number::Complex(com) => com, }; let other_ = match *other { Number::Natural(nat) => nat.into(), Number::Integer(int) => int.into(), Number::Rational(rat) => rat.into(), Number::Real(rea) => rea.into(), Number::Complex(com) => com, }; self_.partial_cmp(&other_) } } impl PartialEq for Number { fn eq(&self, other: &Number) -> bool { let self_ = match *self { Number::Natural(nat) => nat.into(), Number::Integer(int) => int.into(), Number::Rational(rat) => rat.into(), Number::Real(rea) => rea.into(), Number::Complex(com) => com, }; let other_ = match *other { Number::Natural(nat) => nat.into(), Number::Integer(int) => int.into(), Number::Rational(rat) => rat.into(), Number::Real(rea) => rea.into(), Number::Complex(com) => com, }; self_.eq(&other_) } } //macro_rules! product { // ($first:tt) => { product! { @product $first; $first } }; // (@product [$($first:ident);*]; $second:tt ) => {{ // let mut cnt = 0; // $( // cnt = cnt + 1; // product! { @first $first; cnt; $second } // )* // }}; // (@first $first:ident; $cnt_first:expr; [$($second:ident);*] ) => {{ // let mut cnt = 0; // $( // cnt = cnt + 1; // product! { @second $first; $cnt_first; $second; cnt } // )* // }}; // (@second $first:ident; $cnt_first:expr; $second:ident; $cnt_second:expr) => { // if $cnt_first <= $cnt_second { // ($first, $second) // } // } //} //#[test] //fn test_product() { // let a = 1; // let b = 2; // let c = 3; // let d = 4; // let e = 5; // product!([a; b; c; d; e]); //}
//! `x-www-form-urlencoded` meets Serde extern crate dtoa; extern crate itoa; pub mod de; pub mod ser; #[doc(inline)] pub use self::de::{from_bytes, from_reader, from_str, Deserializer}; #[doc(inline)] pub use self::ser::{to_string, Serializer}; #[cfg(test)] mod tests { #[test] fn deserialize_bytes() { let result = vec![("first".to_owned(), 23), ("last".to_owned(), 42)]; assert_eq!(super::from_bytes(b"first=23&last=42"), Ok(result)); } #[test] fn deserialize_str() { let result = vec![("first".to_owned(), 23), ("last".to_owned(), 42)]; assert_eq!(super::from_str("first=23&last=42"), Ok(result)); } #[test] fn deserialize_reader() { let result = vec![("first".to_owned(), 23), ("last".to_owned(), 42)]; assert_eq!(super::from_reader(b"first=23&last=42" as &[_]), Ok(result)); } #[test] fn deserialize_option() { let result = vec![ ("first".to_owned(), Some(23)), ("last".to_owned(), Some(42)), ]; assert_eq!(super::from_str("first=23&last=42"), Ok(result)); } #[test] fn deserialize_unit() { assert_eq!(super::from_str(""), Ok(())); assert_eq!(super::from_str("&"), Ok(())); assert_eq!(super::from_str("&&"), Ok(())); assert!(super::from_str::<()>("first=23").is_err()); } #[derive(Serialize, Deserialize, Debug, PartialEq, Eq)] enum X { A, B, C, } #[test] fn deserialize_unit_enum() { let result = vec![ ("one".to_owned(), X::A), ("two".to_owned(), X::B), ("three".to_owned(), X::C), ]; assert_eq!(super::from_str("one=A&two=B&three=C"), Ok(result)); } #[test] fn serialize_option_map_int() { let params = &[("first", Some(23)), ("middle", None), ("last", Some(42))]; assert_eq!(super::to_string(params), Ok("first=23&last=42".to_owned())); } #[test] fn serialize_option_map_string() { let params = &[ ("first", Some("hello")), ("middle", None), ("last", Some("world")), ]; assert_eq!( super::to_string(params), Ok("first=hello&last=world".to_owned()) ); } #[test] fn serialize_option_map_bool() { let params = &[("one", Some(true)), ("two", Some(false))]; assert_eq!( super::to_string(params), Ok("one=true&two=false".to_owned()) ); } #[test] fn serialize_map_bool() { let params = &[("one", true), ("two", false)]; assert_eq!( super::to_string(params), Ok("one=true&two=false".to_owned()) ); } #[test] fn serialize_unit_enum() { let params = &[("one", X::A), ("two", X::B), ("three", X::C)]; assert_eq!( super::to_string(params), Ok("one=A&two=B&three=C".to_owned()) ); } }
/// function that prints "Hello World!" /// /// function definition fn main() { // Invoke print macro println!("Hello World!"); }
use std::env; use std::fs; use std::str::Lines; use std::collections::HashMap; pub struct VaultConfiguration { pub map: HashMap<String, String>, } impl VaultConfiguration { ///Creates a new structure containing key-value pairs from a file. ///The name of the virtual environment parameter, which contains the path to the file, is taken as an input parameter. /// ///# Example /// /// ``` /// let VAULT_CONF: VaultConfiguration = VaultConfiguration::new("PATH_TO_FILE"); /// ``` pub fn new(name_key_path: &str) -> Self{ let map = match env::var(&name_key_path) { Ok(path) => get_map(&path), Err(_) => HashMap::new() }; Self{map} } /// The value is increased by the key stored in the structure, otherwise the value stored in the virtual environment by the key. /// ///# Examlpe /// /// ``` /// use std::collections::HashMap; /// use std::env; /// /// let mut config = VaultConfiguration{map: HashMap::new()} /// /// config.map.insert("db_port".to_string(), "8000".to_string()) /// /// assert_eq!(config.get_val("db_port"), "8000".to_string()) /// ``` pub fn get_val(&self, name: &str) -> String{ match self.map.get(name) { Some(value) => value.to_string(), None => env::var(name).unwrap().to_string() } } } fn get_map(path: &str) -> HashMap<String, String>{ parse_text_to_map(fs::read_to_string(path).unwrap().lines()) } fn parse_text_to_map(array_text: Lines) -> HashMap<String, String> { array_text.map(|l| l.split(": ").collect::<Vec<&str>>()).map(|x| (x[0].to_string(), x[1].to_string())).collect() }
use crate::event::moisture::Measurement; #[derive(Debug, Deserialize, PartialEq, Eq)] pub struct ADCSettings { pub device: String, pub device_type: String, pub chip_select_gpio: u8, pub enable_gpio: u8, pub update: u64 } #[derive(Clone, Debug, Deserialize, PartialEq, Eq)] pub struct MoistureSensorSettings { pub name: String, pub socket: String, pub channel: u8, pub min_reading: Measurement, pub max_reading: Measurement }
use reqwest::blocking::Client; use crate::event::{Event, ToInfluxDB}; use crate::event::moisture::Measurement; use crate::settings::DatabaseSettings; pub struct Database { client: Client, write_url: String, auth_header: String } impl Database { pub fn new(settings: &DatabaseSettings) -> Self { let write_url = format!( "{}/api/v2/write?org={}&bucket={}&precision=s", settings.url, settings.organisation, settings.bucket ); let auth_header = format!("Token {}", settings.token); Database { client: Client::new(), write_url, auth_header } } pub fn store_event(&self, event: &Event) { match event { Event::ButtonEvent(b) => self.write_event(b), Event::WeatherEvent(w) => self.write_event(w), Event::MoistureEvent(m) => self.write_event(m), Event::IrrigatedEvent(i) => self.write_event(i), _ => () }; } fn write_event<E: ToInfluxDB>(&self, event: &E) { let rsp = self.client.post(&self.write_url) .header("Authorization", &self.auth_header) .body(event.to_line()) .send(); debug!("influxdb>> {:?}", rsp); match rsp { Err(e) => warn!("Failed to write to influxdb: {}", e), Ok(e) if e.status().as_u16() > 299 => warn!("influxdb response code {}", e.status()), _ => {} } } pub fn get_min_moisture_in_last_hour(&self, sensor: &str) -> Result<Measurement, Box<dyn std::error::Error>> { Ok(0) } }
use clap::{App, AppSettings, Arg, SubCommand}; use std::path::Path; mod dockerignore; mod executor; mod hasher; mod types; use dockerignore::*; use executor::{annotate_component, CommandConfig, CommandRegistry}; use hasher::*; use types::CustomError; enum Deps { Dependencies, Dependents, } fn main() -> Result<(), anyhow::Error> { env_logger::init(); let version = env!("CARGO_PKG_VERSION"); let matches = App::new("Build Helper") .version(version) .setting(AppSettings::SubcommandRequiredElseHelp) .subcommand( SubCommand::with_name("hash-components") .about("Annotate components.json with hashes") .arg( Arg::with_name("directory") .required(false) .index(1) .default_value("."), ) .arg( Arg::with_name("pretty-print") .short("-p") .required(false) .takes_value(false), ) .arg( Arg::with_name("short-shas") .long("short") .short("-s") .required(false) .takes_value(false), ) .arg( Arg::with_name("remove-dependencies") .short("-r") .required(false) .takes_value(false), ) .arg( Arg::with_name("add-exec-prop") .long("add-exec-prop") .required(false) .takes_value(true) .multiple(true) .number_of_values(1), ) .arg( Arg::with_name("add-sh-prop") .long("add-sh-prop") .required(false) .takes_value(true) .multiple(true) .number_of_values(1), ) .arg( Arg::with_name("add-prop") .long("add-prop") .required(false) .takes_value(true) .multiple(true) .number_of_values(1), ), ) .subcommand( SubCommand::with_name("gen-dockerignore") .about("Generate .dockerignore file") .arg( Arg::with_name("directory") .short("d") .required(false) .default_value("."), ) .arg( Arg::with_name("overwrite") .short("f") .required(false) .takes_value(false), ) .arg( Arg::with_name("no-include-ignore") .short("n") .required(false) .takes_value(false), ) .arg(Arg::with_name("component").required(true).index(1)), ) .subcommand( SubCommand::with_name("toposort") .about("Topologically sort components") .arg( Arg::with_name("directory") .required(false) .index(1) .default_value("."), ), ) .subcommand( SubCommand::with_name("transitive-dependencies") .about("List all transitive dependencies of component (topologically sorted)") .arg( Arg::with_name("directory") .short("d") .required(false) .default_value("."), ) .arg( Arg::with_name("no-include-self") .short("n") .required(false) .takes_value(false), ) .arg( Arg::with_name("reverse-topological-order") .short("r") .required(false) .takes_value(false), ) .arg( Arg::with_name("component") .required(true) .index(1) .multiple(true), ), ) .subcommand( SubCommand::with_name("transitive-dependents") .about("List all transitive dependents of component (topologically sorted)") .arg( Arg::with_name("directory") .short("d") .required(false) .default_value("."), ) .arg( Arg::with_name("no-include-self") .short("n") .required(false) .takes_value(false), ) .arg( Arg::with_name("component") .required(true) .index(1) .multiple(true), ), ) .get_matches(); if let Some(m) = matches.subcommand_matches("hash-components") { let mut reg = CommandRegistry::new(); let p: &Path = m.value_of_os("directory").unwrap().as_ref(); let path = p.canonicalize()?; let short = m.is_present("short-shas"); if let Some(cmds) = m.values_of("add-exec-prop") { register_added_props(&mut reg, cmds, CommandConfig::new_exec_command())?; } if let Some(cmds) = m.values_of("add-sh-prop") { register_added_props(&mut reg, cmds, CommandConfig::new_shell_command())?; } if let Some(cmds) = m.values_of("add-prop") { register_added_props(&mut reg, cmds, CommandConfig::new_template())?; } run_hasher( &path, m.is_present("pretty-print"), m.is_present("remove-dependencies"), short, |mut c| annotate_component(&reg, &mut c), ) } else if let Some(m) = matches.subcommand_matches("gen-dockerignore") { let p: &Path = m.value_of_os("directory").unwrap().as_ref(); let path = p.canonicalize()?; let d = m.value_of("component").unwrap(); let overwrite = m.is_present("overwrite"); let noinclude = m.is_present("no-include-ignore"); run_dockerignore_creator(&path, d, overwrite, noinclude) } else if let Some(m) = matches.subcommand_matches("toposort") { let p: &Path = m.value_of_os("directory").unwrap().as_ref(); let path = p.canonicalize()?; run_topo(&path) } else if let Some(m) = matches.subcommand_matches("transitive-dependencies") { let p: &Path = m.value_of_os("directory").unwrap().as_ref(); let path = p.canonicalize()?; let components: Vec<_> = m.values_of("component").unwrap().collect(); let noinclude = m.is_present("no-include-self"); let reverse = m.is_present("reverse-topological-order"); run_listdeps( &path, Deps::Dependencies, !noinclude, Some(reverse), components, ) } else if let Some(m) = matches.subcommand_matches("transitive-dependents") { let p: &Path = m.value_of_os("directory").unwrap().as_ref(); let path = p.canonicalize()?; let components: Vec<_> = m.values_of("component").unwrap().collect(); let noinclude = m.is_present("no-include-self"); run_listdeps(&path, Deps::Dependents, !noinclude, None, components) } else { panic!("unexpected subcommand") } } fn register_added_props<A: Iterator<Item = T>, T: AsRef<str>>( reg: &mut CommandRegistry, props: A, config: CommandConfig, ) -> Result<(), CustomError> { for cmd_ref in props { let cmd = cmd_ref.as_ref(); if let Some(p) = cmd.find('=') { let mut x = &cmd[..p]; let is_command = config.is_command(); let conf = if is_command && x.ends_with('?') { x = &x[..p - 1]; config.set_bool() } else { config }; let y = &cmd[p + 1..]; reg.add_command(x, y, conf)?; } else { return Err(CustomError::PropMissingEqualsError { argument: cmd.to_owned(), }); } } Ok(()) } fn run_topo(path: &Path) -> anyhow::Result<()> { let x = types::load_components(path); for component in types::toposort_components(x)?.iter() { println!("{}", component.dir); } Ok(()) } fn run_listdeps( path: &Path, deps: Deps, include_self: bool, reverse_order: Option<bool>, components: Vec<&str>, ) -> anyhow::Result<()> { let data = types::load_components(&path); let r = match deps { Deps::Dependencies => types::transitive_dependencies( data, &components[..], include_self, reverse_order.unwrap_or(false), )?, Deps::Dependents => types::transitive_dependents(data, &components[..], include_self)?, }; for component in r.iter() { println!("{}", component.dir); } Ok(()) }
use error_chain::error_chain; use crate::concurrency::explicit_threads::walk_dir; error_chain! { foreign_links { IO(std::io::Error); WalkDir(walkdir::Error); SystemTime(std::time::SystemTimeError); Glob(glob::GlobError); Pattern(glob::PatternError); } } pub fn find_readed_in_24hours() -> Result<()> { use walkdir::WalkDir; use std::fs::Metadata; let current_dir = std::env::current_dir()?; println!( "Entries modified in the last 24 hours in {:?}:", current_dir ); for entry in WalkDir::new(current_dir) { let entry = entry?; let path = entry.path(); if path.is_dir() { continue; } let metadata: Metadata = path.metadata()?; let last_modified = metadata.modified()?.elapsed()?.as_secs(); if last_modified > 24 * 60 * 60 { println!( "Last modified: {:?} hours, is read only: {:?}, size: {:?} bytes, filename: {:?}", last_modified as f32 / 3600 as f32, metadata.permissions().readonly(), metadata.len() / 1000, path.file_name().ok_or("No filename")? ); } } Ok(()) } pub fn contains_loop() -> Result<()> { use std::path::{Path, PathBuf}; fn is_contains_loop<P: AsRef<Path>>(path: P) -> Result<Option<(PathBuf, PathBuf)>> { let path = path.as_ref(); let mut path_buf = path.to_path_buf(); println!("{:?}", path); while path_buf.pop() { println!("{:?}", path_buf); if same_file::is_same_file(path_buf.as_path(), path)? { return Ok(Some((path.to_path_buf(), path_buf))); } else if let Some(looped_paths) = is_contains_loop(&path_buf)? { return Ok(Some(looped_paths)); } } return Ok(None); } if let Some((path, path_buf)) = is_contains_loop("/tmp/foo/bar/baz/qux/bar/baz")? { println!("{:?} == {:?}", path, path_buf); } Ok(()) } pub fn contains_repeat() -> Result<()> { use std::collections::HashMap; use std::path::Path; use walkdir::WalkDir; fn is_contains_repeat<P: AsRef<Path>>(path: P) -> Result<()> { let mut filenames = HashMap::new(); for entry in WalkDir::new(path) .into_iter() .filter_map(|entry| entry.ok()) .filter(|e| !e.file_type().is_dir()) { let f_name = entry.file_name().to_string_lossy().to_string(); let counter = filenames.entry(f_name.clone()).or_insert(0); *counter += 1; if *counter == 2 { println!("{}", f_name); } } let mut sorted_hashmap = filenames.into_iter().collect::<Vec<(String, i32)>>(); // println!("{:?}", sorted_hashmap); sorted_hashmap.sort_by(|a, b| a.1.cmp(&b.1)); println!("{:?}", sorted_hashmap); Ok(()) } is_contains_repeat(std::env::current_dir()?)?; Ok(()) } pub fn skip_hiddens() -> Result<()> { use walkdir::{DirEntry, WalkDir}; fn is_not_hidden(entry: &DirEntry) -> bool { entry.file_name().to_string_lossy().starts_with('.') } for entry in WalkDir::new(".") .min_depth(1) // 确定递归深度 .max_depth(3) .follow_links(true) // 确保软连接也会被查询 .into_iter() .filter_entry(|e| is_not_hidden(e)) // filter_entry 判断是否为隐藏 .filter_map(|e| e.ok()) { let f_name = entry.file_name().to_string_lossy(); let sec = entry.metadata()?.modified()?; if f_name.ends_with(".json") && sec.elapsed()?.as_secs() < 86400 { println!("{}", f_name); } } Ok(()) } pub fn find_all_png() -> Result<()> { for entry in glob::glob("**/*.png")? { println!("{}", entry?.display()); } let option = glob::MatchOptions { case_sensitive: false, // 大小写不敏感 ..Default::default() // 其他默认 }; // glob_with能自定义参数 for entry in glob::glob_with("**/OUT.*.png", option)? { println!("{}", entry?.display()); } for entry in walkdir::WalkDir::new(".") { let entry = entry?; if entry.file_name().to_string_lossy().ends_with(".png") { println!("{}", entry.path().display()) } } Ok(()) } #[test] pub fn test() { // find_readed_in_24hours(); // contains_repeat(); // if let Err(errors) = contains_loop() { // errors.iter() // .enumerate() // .for_each(|(index, error)| println!("└> {} - {}", index, error)); // } find_all_png(); }
pub mod generic; pub mod specialized; pub mod algorithms; pub mod bitvec;
pub const ADDRESS: &str = "0.0.0.0:80"; pub mod index;
extern crate orbclient; extern crate tetrahedrane; use tetrahedrane::vid::*; use tetrahedrane::start; fn main() { let mut window = start::Window::new(1280, 720, "Hello!", 1 as usize); window.window.set(Color::new(20, 40, 60).orb_color()); let point1 = DepthPoint::new(0.0, -0.5, 0.0); let point2 = DepthPoint::new(0.5, 0.5, 0.0); let point3 = DepthPoint::new(-0.5, 0.5, 0.0); let mut triangle = Triangle::new(point1, point2, point3, 0.0, 0.0, 2.0, Color::new(200, 200, 200)); 'game_loop: loop { window.window.set(Color::new(20, 40, 60).orb_color()); let mut events = window.window.events(); for event in events.next() { if event.code == 3 { break 'game_loop; } } //window.camera_z += 0.01; triangle.coord_rotate_x_y(0.0, 0.0, 0.01); triangle.coord_rotate_x_z(0.0, 0.0, 0.02); triangle.coord_rotate_y_z(0.0, 0.0, 0.03); window.render_queue.push(triangle); window.render(); window.window.sync(); std::thread::sleep(std::time::Duration::from_millis(33)); } }
//! Hyper SSL support via modern versions of OpenSSL. //! //! Hyper's built in OpenSSL support depends on version 0.7 of `openssl`. This crate provides //! SSL support using version 0.9 of `openssl`. //! //! # Usage //! //! Hyper's `ssl` feature is enabled by default, so it must be explicitly turned off in your //! Cargo.toml: //! //! ```toml //! [dependencies] //! hyper = { version = "0.9", default_features = false } //! hyper-openssl = "0.1" //! ``` //! //! Then on the client side: //! //! ``` //! extern crate hyper; //! extern crate hyper_openssl; //! //! use hyper::Client; //! use hyper::net::HttpsConnector; //! use hyper_openssl::OpensslClient; //! use std::io::Read; //! //! fn main() { //! let ssl = OpensslClient::new().unwrap(); //! let connector = HttpsConnector::new(ssl); //! let client = Client::with_connector(connector); //! //! let mut resp = client.get("https://google.com").send().unwrap(); //! let mut body = vec![]; //! resp.read_to_end(&mut body).unwrap(); //! println!("{}", String::from_utf8_lossy(&body)); //! } //! ``` //! //! Or on the server side: //! //! ```no_run //! extern crate hyper; //! extern crate hyper_openssl; //! extern crate openssl; //! //! use hyper::Server; //! use hyper_openssl::OpensslServer; //! use openssl::ssl::{SslMethod, SslAcceptorBuilder}; //! use openssl::pkcs12::Pkcs12; //! use std::io::Read; //! use std::fs::File; //! //! fn main() { //! let mut pkcs12 = vec![]; //! File::open("identity.pfx") //! .unwrap() //! .read_to_end(&mut pkcs12) //! .unwrap(); //! let pkcs12 = Pkcs12::from_der(&pkcs12) //! .unwrap() //! .parse("hunter2") //! .unwrap(); //! //! let acceptor = SslAcceptorBuilder::mozilla_intermediate(SslMethod::tls(), //! &pkcs12.pkey, //! &pkcs12.cert, //! pkcs12.chain) //! .unwrap() //! .build(); //! let ssl = OpensslServer::from(acceptor); //! //! let server = Server::https("0.0.0.0:8443", ssl).unwrap(); //! } //! ``` #![warn(missing_docs)] #![doc(html_root_url="https://docs.rs/hyper-openssl/0.1.0")] extern crate antidote; extern crate hyper; extern crate openssl; use antidote::Mutex; use hyper::net::{SslClient, SslServer, NetworkStream}; use openssl::error::ErrorStack; use openssl::ssl::{self, SslMethod, SslConnector, SslConnectorBuilder, SslAcceptor}; use std::io::{self, Read, Write}; use std::net::SocketAddr; use std::sync::Arc; use std::time::Duration; use std::fmt::Debug; /// An `SslClient` implementation using OpenSSL. #[derive(Clone)] pub struct OpensslClient(SslConnector); impl OpensslClient { /// Creates a new `OpenSslClient` with default settings. pub fn new() -> Result<OpensslClient, ErrorStack> { let connector = try!(SslConnectorBuilder::new(SslMethod::tls())).build(); Ok(OpensslClient(connector)) } } impl From<SslConnector> for OpensslClient { fn from(connector: SslConnector) -> OpensslClient { OpensslClient(connector) } } impl<T> SslClient<T> for OpensslClient where T: NetworkStream + Clone + Sync + Send + Debug { type Stream = SslStream<T>; fn wrap_client(&self, stream: T, host: &str) -> hyper::Result<SslStream<T>> { match self.0.connect(host, stream) { Ok(stream) => Ok(SslStream(Arc::new(Mutex::new(stream)))), Err(err) => Err(hyper::Error::Ssl(Box::new(err))), } } } /// An `SslServer` implementation using OpenSSL. #[derive(Clone)] pub struct OpensslServer(SslAcceptor); impl From<SslAcceptor> for OpensslServer { fn from(acceptor: SslAcceptor) -> OpensslServer { OpensslServer(acceptor) } } impl<T> SslServer<T> for OpensslServer where T: NetworkStream + Clone + Sync + Send + Debug { type Stream = SslStream<T>; fn wrap_server(&self, stream: T) -> hyper::Result<SslStream<T>> { match self.0.accept(stream) { Ok(stream) => Ok(SslStream(Arc::new(Mutex::new(stream)))), Err(err) => Err(hyper::Error::Ssl(Box::new(err))), } } } /// A Hyper SSL stream. #[derive(Clone)] pub struct SslStream<T>(Arc<Mutex<ssl::SslStream<T>>>); impl<T: Read + Write> Read for SslStream<T> { fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> { self.0.lock().read(buf) } } impl<T: Read + Write> Write for SslStream<T> { fn write(&mut self, buf: &[u8]) -> io::Result<usize> { self.0.lock().write(buf) } fn flush(&mut self) -> io::Result<()> { self.0.lock().flush() } } impl<T: NetworkStream> NetworkStream for SslStream<T> { fn peer_addr(&mut self) -> io::Result<SocketAddr> { self.0.lock().get_mut().peer_addr() } fn set_read_timeout(&self, dur: Option<Duration>) -> io::Result<()> { self.0.lock().get_ref().set_read_timeout(dur) } fn set_write_timeout(&self, dur: Option<Duration>) -> io::Result<()> { self.0.lock().get_ref().set_write_timeout(dur) } } #[cfg(test)] mod test { use hyper::{Client, Server}; use hyper::server::{Request, Response, Fresh}; use hyper::net::HttpsConnector; use openssl::ssl::{SslMethod, SslAcceptorBuilder, SslConnectorBuilder}; use openssl::pkey::PKey; use openssl::x509::X509; use std::io::Read; use std::mem; use {OpensslClient, OpensslServer}; #[test] fn google() { let ssl = OpensslClient::new().unwrap(); let connector = HttpsConnector::new(ssl); let client = Client::with_connector(connector); let mut resp = client.get("https://google.com").send().unwrap(); assert!(resp.status.is_success()); let mut body = vec![]; resp.read_to_end(&mut body).unwrap(); } #[test] fn server() { let cert = include_bytes!("../test/cert.pem"); let key = include_bytes!("../test/key.pem"); let cert = X509::from_pem(cert).unwrap(); let key = PKey::private_key_from_pem(key).unwrap(); let acceptor = SslAcceptorBuilder::mozilla_intermediate(SslMethod::tls(), &key, &cert, None::<X509>) .unwrap() .build(); let ssl = OpensslServer::from(acceptor); let server = Server::https("127.0.0.1:0", ssl).unwrap(); let listening = server.handle(|_: Request, resp: Response<Fresh>| { resp.send(b"hello").unwrap() }).unwrap(); let port = listening.socket.port(); mem::forget(listening); let mut connector = SslConnectorBuilder::new(SslMethod::tls()).unwrap(); connector.builder_mut().cert_store_mut().add_cert(cert).unwrap(); let ssl = OpensslClient::from(connector.build()); let connector = HttpsConnector::new(ssl); let client = Client::with_connector(connector); let mut resp = client.get(&format!("https://localhost:{}", port)) .send() .unwrap(); let mut body = vec![]; resp.read_to_end(&mut body).unwrap(); assert_eq!(body, b"hello"); } }
// Run-time: // status: signal use std::process; fn main() { unsafe { let ptr = std::ptr::null::<usize>(); *ptr + 1; } }
#[macro_use] extern crate log; use azure_core::prelude::*; use azure_storage::core::prelude::*; use azure_storage_queues::prelude::*; use std::error::Error; #[tokio::main] async fn main() -> Result<(), Box<dyn Error + Send + Sync>> { // First we retrieve the account name and master key from environment variables. let account = std::env::var("STORAGE_ACCOUNT").expect("Set env variable STORAGE_ACCOUNT first!"); let master_key = std::env::var("STORAGE_MASTER_KEY").expect("Set env variable STORAGE_MASTER_KEY first!"); let queue_name = std::env::args() .nth(1) .expect("Please pass the queue name as first parameter"); let http_client = new_http_client(); let storage_account = StorageAccountClient::new_access_key(http_client.clone(), &account, &master_key); let queue = storage_account.as_queue_client(queue_name); trace!("putting message"); let response = queue .put_message() .client_request_id("optional correlation token") .execute(format!("Azure SDK for Rust rocks! {}", chrono::Utc::now())) .await?; println!("response == {:#?}", response); Ok(()) }
use std::collections::HashMap; use std::fmt; use std::ops::Deref; use std::borrow::{Cow}; use tuple::{TupleElements, Map}; use decorum::R32; use indexmap::set::IndexSet; use crate::{R, FontError}; use crate::parsers::{token, Token, comment, space, hex_string}; #[cfg(feature="unstable")] use slotmap::SlotMap; #[cfg(not(feature="unstable"))] use slotmap::DenseSlotMap as SlotMap; new_key_type! { pub struct DictKey; pub struct ArrayKey; pub struct StringKey; } #[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)] pub struct LitKey(usize); #[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)] pub enum Item { Null, Bool(bool), Int(i32), Real(R32), Dict(DictKey), Array(ArrayKey), String(StringKey), Name(LitKey), Literal(LitKey), Operator(Operator), Mark, File } #[cfg(feature="unstable")] fn recursive_trap(ptr: usize, f: &mut fmt::Formatter<'_>, func: impl FnOnce(&mut fmt::Formatter<'_>) -> fmt::Result) -> fmt::Result { use std::cell::RefCell; #[thread_local] static STACK: RefCell<Vec<usize>> = RefCell::new(Vec::new()); { let stack = &mut *STACK.borrow_mut(); if stack.contains(&ptr) { return write!(f, "..."); } stack.push(ptr); } func(f)?; let ptr2 = STACK.borrow_mut().pop().unwrap(); assert_eq!(ptr, ptr2); Ok(()) } #[derive(Copy, Clone)] pub struct RefDict<'a> { vm: &'a Vm, dict: &'a Dictionary } impl<'a> RefDict<'a> { pub fn iter(&self) -> impl Iterator<Item=(RefItem<'a>, RefItem<'a>)> { let vm = self.vm; self.dict.iter().map(move |(k, v)| (RefItem::new(vm, *k), RefItem::new(vm, *v))) } pub fn get(&self, key: &str) -> Option<RefItem<'a>> { self.vm.literals.get_full(key.as_bytes()) .and_then(|(index, _)| self.dict.get(&Item::Literal(LitKey(index)))) .map(|&item| RefItem::new(self.vm, item)) } pub fn get_int(&self, key: &str) -> Option<i32> { self.get(key).and_then(|i| i.as_int()) } pub fn get_str(&self, key: &str) -> Option<&'a str> { self.get(key).and_then(|i| i.as_str()) } pub fn get_dict(&self, key: &str) -> Option<RefDict<'a>> { self.get(key).and_then(|i| i.as_dict()) } pub fn get_array(&self, key: &str) -> Option<RefArray<'a>> { self.get(key).and_then(|i| i.as_array()) } pub fn len(&self) -> usize { self.dict.len() } pub fn string_entries(&self) -> impl Iterator<Item=(&'a str, RefItem<'a>)> { self.iter().filter_map(|(key, val)| { match key { RefItem::Literal(s) => std::str::from_utf8(s).ok().map(|s| (s, val)), _ => None } }) } } #[cfg(feature="unstable")] impl<'a> fmt::Debug for RefDict<'a> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let vm = self.vm; recursive_trap(self.dict as *const _ as usize, f, move |f| { f.debug_map().entries( self.dict.iter() .map(move |(&k, &v)| (RefItem::new(vm, k), RefItem::new(vm, v))) ) .finish() }) } } #[derive(Copy, Clone)] pub struct RefArray<'a> { vm: &'a Vm, array: &'a Array } impl<'a> RefArray<'a> { pub fn iter(&self) -> impl Iterator<Item=RefItem<'a>> { let vm = self.vm; self.array.iter() .map(move |&item| (RefItem::new(vm, item))) } pub fn get(&self, index: usize) -> Option<RefItem<'a>> { self.array.get(index) .map(|&item| RefItem::new(self.vm, item)) } pub fn len(&self) -> usize { self.array.len() } } #[cfg(feature="unstable")] impl<'a> fmt::Debug for RefArray<'a> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let vm = self.vm; recursive_trap(self.array as *const _ as usize, f, move |f| { f.debug_list().entries(self.array.iter().map(move |&item| RefItem::new(vm, item))) .finish() }) } } pub enum RefItem<'a> { Recursive, Null, Bool(bool), Int(i32), Real(f32), Dict(RefDict<'a>), Array(RefArray<'a>), String(&'a [u8]), Name(&'a [u8]), Literal(&'a [u8]), Operator(Operator), Mark, File } fn print_string(s: &[u8]) -> Cow<str> { String::from_utf8_lossy(&s[.. s.len().min(100)]) } impl<'a> RefItem<'a> { fn new(vm: &'a Vm, item: Item) -> RefItem<'a> { match item { Item::Null => RefItem::Null, Item::Bool(b) => RefItem::Bool(b), Item::Int(i) => RefItem::Int(i), Item::Real(r) => RefItem::Real(r.into()), Item::Dict(key) => RefItem::Dict(RefDict { vm, dict: vm.get_dict(key) }), Item::Array(key) => RefItem::Array(RefArray { vm, array: vm.get_array(key) }), Item::String(key) => RefItem::String(vm.get_string(key)), Item::Name(key) => RefItem::Name(vm.get_lit(key)), Item::Literal(key) => RefItem::Literal(vm.get_lit(key)), Item::Operator(op) => RefItem::Operator(op), Item::Mark => RefItem::Mark, Item::File => RefItem::File } } pub fn as_dict(&self) -> Option<RefDict<'a>> { match *self { RefItem::Dict(dict) => Some(dict), _ => None } } pub fn as_array(&self) -> Option<RefArray<'a>> { match *self { RefItem::Array(array) => Some(array), _ => None } } pub fn as_bytes(&self) -> Option<&'a [u8]> { match *self { RefItem::String(bytes) | RefItem::Name(bytes) | RefItem::Literal(bytes) => Some(bytes), _ => None } } pub fn as_str(&self) -> Option<&'a str> { self.as_bytes().and_then(|b| std::str::from_utf8(b).ok()) } pub fn as_f32(&self) -> Option<f32> { match *self { RefItem::Int(i) => Some(i as f32), RefItem::Real(r) => Some(r.into()), _ => None } } pub fn as_int(&self) -> Option<i32> { match *self { RefItem::Int(i) => Some(i), _ => None } } } impl<'a> fmt::Debug for RefItem<'a> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { RefItem::Recursive => write!(f, "..."), RefItem::Null => write!(f, "Null"), RefItem::Mark => write!(f, "Mark"), RefItem::File => write!(f, "File"), RefItem::Operator(op) => op.fmt(f), RefItem::Bool(b) => b.fmt(f), RefItem::Int(i) => i.fmt(f), RefItem::Real(r) => r.fmt(f), #[cfg(feature="unstable")] RefItem::Dict(dict) => dict.fmt(f), #[cfg(feature="unstable")] RefItem::Array(array) => array.fmt(f), RefItem::String(s) => write!(f, "({:?})", print_string(s)), RefItem::Literal(s) => write!(f, "/{:?}", print_string(s)), RefItem::Name(s) => write!(f, "{:?}", print_string(s)), _ => Ok(()) } } } type Array = Vec<Item>; type Dictionary = HashMap<Item, Item>; #[derive(Debug)] struct Mode { write: bool, execute: bool, read: bool } impl Mode { fn all() -> Mode { Mode { write: true, execute: true, read: true } } fn read_only(&mut self) { self.write = false; } fn execute_only(&mut self) { self.read = false; self.write = false; } fn noaccess(&mut self) { self.write = false; self.execute = false; self.read = false; } } macro_rules! operators { ($($key:expr => $name:ident),*; $($key2:expr => $name2:ident),*) => ( #[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)] pub enum Operator { $($name),* } const OPERATOR_MAP: &[(&'static str, Operator)] = &[ $( ($key, Operator::$name), )* $( ($key2, Operator::$name2), )* ]; ) } operators!{ "abs" => Abs, "add" => Add, "array" => Array, "begin" => Begin, "currentdict" => CurrentDict, "currentfile" => CurrentFile, "cleartomark" => ClearToMark, "closefile" => CloseFile, "count" => Count, "copy" => Copy, "cvx" => Cvx, "definefont" => DefineFont, "for" => For, "def" => Def, "dict" => Dict, "dup" => Dup, "end" => End, "exch" => Exch, "executeonly" => ExecuteOnly, "eexec" => Eexec, "exec" => Exec, "eq" => Eq, "false" => False, "get" => Get, "if" => If, "ifelse" => IfElse, "index" => Index, "internaldict" => InternalDict, "known" => Known, "length" => Length, "maxlength" => MaxLength, "mark" => Mark, "mul" => Mul, "noaccess" => NoAccess, "not" => Not, "pop" => Pop, "put" => Put, "readonly" => ReadOnly, "readstring" => ReadString, "string" => String, "sub" => Sub, "true" => True, "]" => EndArray; "[" => Mark } pub struct Input<'a> { data: &'a [u8], open: bool } impl<'a> Input<'a> { pub fn new(data: &'a [u8]) -> Input<'a> { Input { data, open: true } } pub fn len(&self) -> usize { self.data.len() } fn take(&mut self, n: usize) -> &'a [u8] { let (first, second) = self.data.split_at(n); self.data = second; first } fn advance(&mut self, n: usize) { self.data = self.data.get(n ..).unwrap_or(&[]); } // true if buf.len() bytes were read // false if EOF (buf will be truncated) fn read_to(&mut self, buf: &mut Vec<u8>) -> bool { if self.len() >= buf.len() { let len = buf.len(); // normal case buf.copy_from_slice(self.take(len)); true } else { let len = self.len(); buf.truncate(len); buf.copy_from_slice(self.take(len)); false } } fn try_parse<T, E>(&mut self, parser: impl Fn(&'a [u8]) -> Result<(&'a [u8], T), E>) -> Option<T> { match parser(self.data) { Ok((i, t)) => { let n = self.data.len() - i.len(); self.advance(n); Some(t) }, Err(_) => { let slice = &self.data[.. self.data.len().min(20)]; trace!("input: {:?} {:?}", String::from_utf8_lossy(slice), slice); None } } } } impl<'a> Deref for Input<'a> { type Target = [u8]; fn deref(&self) -> &[u8] { self.data } } pub struct Vm { dicts: SlotMap<DictKey, (Dictionary, Mode)>, arrays: SlotMap<ArrayKey, (Array, Mode)>, strings: SlotMap<StringKey, (Vec<u8>, Mode)>, literals: IndexSet<Vec<u8>>, fonts: HashMap<String, DictKey>, dict_stack: Vec<DictKey>, stack: Vec<Item>, internal_dict: DictKey } impl Vm { pub fn new() -> Vm { let mut dicts = SlotMap::with_key(); let error_dict_key = dicts.insert((Dictionary::new(), Mode { read: true, write: true, execute: false })); let internal_dict_key = dicts.insert((Dictionary::new(), Mode { read: true, write: true, execute: false })); let mut vm = Vm { dicts, arrays: SlotMap::with_key(), strings: SlotMap::with_key(), literals: IndexSet::new(), fonts: HashMap::new(), dict_stack: Vec::new(), stack: Vec::new(), internal_dict: internal_dict_key }; let mut system_dict: Dictionary = OPERATOR_MAP.iter() .map(|&(name, op)| (Item::Literal(vm.make_lit(name.as_bytes())), Item::Operator(op))) .collect(); let mut user_dict = Dictionary::new(); { let font_dict = vm.make_dict(Dictionary::new(), Mode { write: true, execute: false, read: true }); user_dict.insert(Item::Literal(vm.make_lit(b"FontDirectory")), Item::Dict(font_dict)); // StandardEncoding … use crate::cff::{STANDARD_STRINGS, STANDARD_ENCODING}; let arr = STANDARD_ENCODING.iter().map(|&sid| Item::Literal(vm.make_lit(STANDARD_STRINGS[sid as usize].as_bytes())) ).collect(); let standard_encoding = vm.make_array(arr, Mode { write: false, execute: false, read: true }); user_dict.insert(Item::Literal(vm.make_lit(b"StandardEncoding")), Item::Array(standard_encoding)); } let user_dict_key = vm.make_dict(user_dict, Mode { write: true, execute: false, read: true }); { let key = vm.make_lit(b"userdict"); system_dict.insert(Item::Literal(key), Item::Dict(user_dict_key)); let key = vm.make_lit(b"errordict"); system_dict.insert(Item::Literal(key), Item::Dict(error_dict_key)); } // systemdict fuckery { let system_dict_key = vm.make_dict(system_dict, Mode { write: false, execute: false, read: true }); vm.push_dict(system_dict_key); let lit = vm.make_lit(b"systemdict"); vm.dicts[system_dict_key].0.insert(Item::Literal(lit), Item::Dict(system_dict_key)); } vm.push_dict(user_dict_key); vm } pub fn fonts<'a>(&'a self) -> impl Iterator<Item=(&'a str, RefDict<'a>)> { self.fonts.iter().map(move |(key, &dict)| ( key.as_str(), RefDict { vm: self, dict: self.get_dict(dict) } )) } fn pop_tuple<T>(&mut self) -> Result<T, FontError> where T: TupleElements<Element=Item> { let range = self.stack.len() - T::N ..; Ok(expect!(T::from_iter(self.stack.drain(range)), "not enough data on the stack")) } fn pop(&mut self) -> Item { self.stack.pop().expect("empty stack") } fn push(&mut self, item: Item) { self.stack.push(item); } fn push_dict(&mut self, dict: DictKey) { self.dict_stack.push(dict); } fn make_lit(&mut self, lit: &[u8]) -> LitKey { if let Some((index, _)) = self.literals.get_full(lit) { return LitKey(index); } let (index, _) = self.literals.insert_full(lit.into()); LitKey(index) } fn get_lit(&self, LitKey(index): LitKey) -> &[u8] { self.literals.get_index(index).expect("no such key").as_slice() } fn make_array(&mut self, array: Array, mode: Mode) -> ArrayKey { self.arrays.insert((array, mode)) } fn make_string(&mut self, s: Vec<u8>) -> StringKey { self.strings.insert((s, Mode::all())) } fn make_dict(&mut self, dict: Dictionary, mode: Mode) -> DictKey { self.dicts.insert((dict, mode)) } fn get_string(&self, key: StringKey) -> &[u8] { &self.strings.get(key).unwrap().0 } fn get_string_mut(&mut self, key: StringKey) -> &mut Vec<u8> { &mut self.strings.get_mut(key).unwrap().0 } fn get_array(&self, key: ArrayKey) -> &Array { match self.arrays.get(key).expect("no item for key") { (ref array, _) => array } } fn get_array_mut(&mut self, key: ArrayKey) -> &mut Array { match self.arrays.get_mut(key).expect("no item for key") { (ref mut array, Mode { write: true, .. }) => array, _ => panic!("array is locked") } } fn exec_array(&mut self, key: ArrayKey, input: &mut Input) { let array = match self.arrays.get(key).expect("no item for key") { (ref array, Mode { execute: true, .. } ) => array.clone(), _ => panic!("not executable") }; for item in &array { self.exec(item.clone(), input); } } fn get_dict(&self, key: DictKey) -> &Dictionary { match self.dicts.get(key).expect("no item for key") { (ref dict, _) => dict } } fn get_dict_mut(&mut self, key: DictKey) -> &mut Dictionary { match self.dicts.get_mut(key).expect("no item for key") { (ref mut dict, Mode { write: true, .. }) => dict, _ => panic!("dict is locked") } } fn pop_dict(&mut self) { self.dict_stack.pop(); } fn current_dict_mut(&mut self) -> &mut Dictionary { let &key = self.dict_stack.last().expect("no current dict"); self.get_dict_mut(key) } pub fn stack(&self) -> &[Item] { &self.stack } // resolve name items. or keep them unchanged if unresolved fn resolve(&self, item: Item) -> Option<Item> { for &dict_key in self.dict_stack.iter().rev() { let dict = self.get_dict(dict_key); if let Some(&val) = dict.get(&item) { return Some(val.clone()); } } None } fn transform_token(&mut self, token: Token) -> Item { match token { Token::Int(i) => Item::Int(i), Token::Real(r) => Item::Real(r), Token::Literal(name) => Item::Literal(self.make_lit(name)), Token::Name(name) => Item::Name(self.make_lit(name)), Token::String(vec) => Item::String(self.make_string(vec)), Token::Procedure(tokens) => { let array = tokens.into_iter().map(|t| self.transform_token(t)).collect(); Item::Array(self.make_array(array, Mode::all())) } } } pub fn exec_token(&mut self, token: Token, input: &mut Input) -> Result<(), FontError> { let item = self.transform_token(token); trace!("exec_token {:?}", self.display(item)); match item { Item::Operator(op) => self.exec_operator(op, input)?, Item::Name(key) => { let item = match self.resolve(Item::Literal(key)) { Some(item) => item, None => error!("unimplemented token {:?}", String::from_utf8_lossy(self.get_lit(key))) }; self.exec_expand(item, input)?; } item => self.push(item) } Ok(()) } fn exec_expand(&mut self, item: Item, input: &mut Input) -> Result<(), FontError> { trace!("exec_expand {:?}", self.display(item)); match item { Item::Operator(op) => { self.exec_operator(op, input)?; } Item::Name(key) => { let item = expect!(self.resolve(Item::Literal(key)), "undefined"); self.exec(item, input)?; } Item::Array(key) => { // check that the array is executable if !expect!(self.arrays.get(key), "no item for key").1.execute { self.push(item); } else { let mut pos = 0; loop { match self.arrays.get(key).expect("no item for key") { (ref items, Mode { execute: true, .. }) => { match items.get(pos) { Some(&item) => self.exec(item, input)?, None => break } }, _ => error!("exec: array is not executable") } pos += 1; } } } item => { self.push(item); } } Ok(()) } fn exec(&mut self, item: Item, input: &mut Input) -> Result<(), FontError> { trace!("exec {:?}", self.display(item)); /* loop { let mut s = String::new(); std::io::stdin().read_line(&mut s); let s = s.trim(); match s { "s" => self.print_stack(), "d" => self.print_current_dict(), "" => break, _ => println!("unknown command. known are: 's' for print stack, 'd' for print current dict, empty to continue") } } */ match item { Item::Operator(op) => self.exec_operator(op, input)?, Item::Name(key) => { let item = self.resolve(Item::Literal(key)).expect("undefined"); self.exec_expand(item, input)?; } item => self.push(item) } Ok(()) } #[deny(unreachable_patterns)] fn exec_operator(&mut self, op: Operator, input: &mut Input) -> Result<(), FontError> { match op { Operator::Array => { match self.pop() { Item::Int(i) if i >= 0 => { let key = self.make_array(vec![Item::Null; i as usize], Mode::all()); self.push(Item::Array(key)); } i => error!("array: invalid count: {:?}", self.display(i)) } } Operator::Begin => { match self.pop() { Item::Dict(dict) => self.push_dict(dict), item => error!("begin: unespected item {:?}", self.display(item)) } } Operator::CurrentDict => { let &key = self.dict_stack.last().expect("no current dictionary"); self.push(Item::Dict(key)); } Operator::DefineFont => { match self.pop_tuple()? { (Item::Literal(lit), Item::Dict(dict_key)) => { let font_name = String::from_utf8(self.get_lit(lit).to_owned()) .expect("Font name is not valid UTF-8"); let (_, ref mut mode) = self.dicts.get_mut(dict_key).unwrap(); mode.read_only(); self.fonts.insert(font_name, dict_key); self.push(Item::Dict(dict_key)); } args => error!("definefont: invalid args {:?}", self.display_tuple(args)) } } Operator::InternalDict => { match self.pop() { Item::Int(1183615869) => { let dict = Item::Dict(self.internal_dict); self.push(dict); } i => error!("internaldict: invalid argument: {:?}", self.display(i)) } } Operator::For => { match self.pop_tuple()? { (Item::Int(initial), Item::Int(increment), Item::Int(limit), Item::Array(procedure)) => { match increment { i if i > 0 => require!(limit > initial), i if i < 0 => require!(limit < initial), _ => error!("zero increment") } // proc would be allowed to modify the procedure array… let proc_array = self.get_array(procedure).clone(); let mut val = initial; while val < limit { self.push(Item::Int(val)); for item in &proc_array { self.exec(item.clone(), input); } val += increment; } }, args => error!("for: invalid args {:?}", self.display_tuple(args)) } } Operator::If => { match self.pop_tuple()? { (Item::Bool(cond), Item::Array(proc)) => { if cond { self.exec_array(proc, input); } } args => error!("if: invalid args {:?}", self.display_tuple(args)) } } Operator::IfElse => { match self.pop_tuple()? { (Item::Bool(cond), Item::Array(proc_a), Item::Array(proc_b)) => { let proc = if cond { proc_a } else { proc_b }; self.exec_array(proc, input); } args => error!("ifelse: invalid args {:?}", self.display_tuple(args)) } } Operator::Exec => { let item = self.pop(); self.exec(item, input); } Operator::Eq => { let (a, b) = self.pop_tuple()?; self.push(Item::Bool(a == b)); } Operator::Cvx => { let item = self.pop(); let item = self.resolve(item).unwrap_or(item); self.push(item); } Operator::Def => { let (key, val) = self.pop_tuple()?; self.current_dict_mut().insert(key, val); } Operator::Dict => { match self.pop() { Item::Int(n) if n >= 0 => { let dict = self.make_dict(Dictionary::with_capacity(n as usize), Mode::all()); self.push(Item::Dict(dict)); } arg => error!("dict: unsupported {:?}", self.display(arg)) } } Operator::Known => { match self.pop_tuple()? { (Item::Dict(dict), key) => { let dict = self.get_dict(dict); let known = dict.contains_key(&key); self.push(Item::Bool(known)) }, args => error!("known: invalid args {:?}", self.display_tuple(args)) } } Operator::String => { match self.pop() { Item::Int(n) if n >= 0 => { let string = self.make_string(vec![0; n as usize]); self.push(Item::String(string)); }, len => error!("string: unsupported {:?}", self.display(len)) } }, Operator::ReadString => { match self.pop_tuple()? { (Item::File, Item::String(key)) => { let string = self.get_string_mut(key); let flag = input.read_to(string); expect!(input.try_parse(space), "Failed to parse space"); self.push(Item::String(key)); self.push(Item::Bool(flag)); }, args => error!("readstring: invalid arguments {:?}", self.display_tuple(args)) } } Operator::Dup => { let v = self.pop(); self.push(v.clone()); self.push(v); }, Operator::Copy => { let last = self.pop(); match last { Item::Int(i) if i >= 0 => { let n = i as usize; let len = self.stack.len(); let start = self.stack.len() - n; for i in start .. len { let item = self.stack[i]; self.push(item); } }, _ => { let first = self.pop(); match (first, last) { (Item::Array(a), Item::Array(b)) => { // ugly, but avoids RefCells let a = self.get_array(a).clone(); self.get_array_mut(b)[.. a.len()].copy_from_slice(&a); self.push(last); } (Item::Dict(a), Item::Dict(b)) => { let a = self.get_dict(a).clone(); self.get_dict_mut(b).extend(a); self.push(last); } (Item::String(a), Item::String(b)) => { let a = self.get_string(a).to_owned(); expect!(self.get_string_mut(b).get_mut(.. a.len()), "out of bounds").copy_from_slice(&a); self.push(last); } args => error!("copy: invalid arguments {:?}", self.display_tuple(args)) } } } } Operator::Pop => { self.pop(); } Operator::End => self.pop_dict(), Operator::Exch => { let (a, b) = self.pop_tuple()?; self.push(b); self.push(a); } Operator::False => self.push(Item::Bool(false)), Operator::True => self.push(Item::Bool(true)), Operator::Not => { match self.pop() { Item::Bool(b) => self.push(Item::Bool(!b)), Item::Int(i) => self.push(Item::Int(!i)), arg => error!("not: invalid argument {:?}", self.display(arg)) } } Operator::Index => match self.pop() { Item::Int(idx) if idx >= 0 => { let n = self.stack.len(); let item = self.stack.get(n - idx as usize - 1).expect("out of bounds").clone(); self.push(item); }, arg => error!("index: invalid argument {:?}", self.display(arg)) } Operator::Get => match self.pop_tuple()? { (Item::Array(key), Item::Int(index)) if index >= 0 => { let &item = self.get_array(key).get(index as usize).expect("out of bounds"); self.push(item); } (Item::String(key), Item::Int(index)) if index >= 0 => { let &byte = self.get_string(key).get(index as usize).expect("out of bounds"); self.push(Item::Int(byte as i32)); } (Item::Dict(dict_key), key) => { let &item = self.get_dict(dict_key).get(&key).expect("no such entry"); self.push(item); } args => error!("get: invalid arguments {:?}", self.display_tuple(args)) } Operator::Put => { let (a, b, c) = self.pop_tuple()?; let a = self.resolve(a).unwrap_or(a); match (a, b, c) { (Item::Array(array), Item::Int(idx), any) => { *expect!(self.get_array_mut(array).get_mut(idx as usize), "out of bounds") = any; } (Item::Dict(dict), key, any) => { self.get_dict_mut(dict).insert(key, any); } args => error!("put: unsupported args {:?})", self.display_tuple(args)) } } Operator::Count => { let n = self.stack.len(); self.push(Item::Int(n as i32)); } Operator::Length => { let len = match self.pop() { Item::Array(key) => self.get_array(key).len(), Item::Dict(key) => self.get_dict(key).len(), Item::String(key) => self.get_string(key).len(), Item::Name(lit) => self.get_lit(lit).len(), arg => error!("length: invalid argument {:?}", self.display(arg)) }; self.push(Item::Int(len as i32)); } Operator::MaxLength => { match self.pop() { Item::Dict(key) => { let cap = self.get_dict(key).capacity(); self.push(Item::Int(cap as i32)); } arg => error!("maxlength: invalid argument {:?}", self.display(arg)) } } Operator::ReadOnly => { let item = self.pop(); match item { Item::Array(key) => self.arrays[key].1.read_only(), Item::Dict(key) => self.dicts[key].1.read_only(), Item::String(key) => self.strings[key].1.read_only(), i => error!("can't make {:?} readonly", self.display(i)) } self.push(item); }, Operator::ExecuteOnly => { let item = self.pop(); match item { Item::Array(key) => self.arrays[key].1.execute_only(), Item::Dict(key) => self.dicts[key].1.execute_only(), Item::String(key) => self.strings[key].1.execute_only(), i => error!("can't make {:?} executeonly", self.display(i)) } self.push(item); }, Operator::NoAccess => { let item = self.pop(); match item { Item::Array(key) => self.arrays[key].1.noaccess(), Item::Dict(key) => self.dicts[key].1.noaccess(), Item::String(key) => self.strings[key].1.noaccess(), i => error!("can't make {:?} executeonly", self.display(i)) } self.push(item); } Operator::EndArray => { let start = expect!( self.stack.iter().rposition(|item| *item == Item::Mark), "unmatched ]" ); let array = self.stack.drain(start ..).skip(1).collect(); // skip the Mark let key = self.make_array(array, Mode::all()); self.push(Item::Array(key)); }, Operator::Mark => self.push(Item::Mark), Operator::ClearToMark => { let start = expect!( self.stack.iter().rposition(|item| *item == Item::Mark), "unmatched mark" ); self.stack.drain(start ..); } Operator::CurrentFile => self.push(Item::File), Operator::CloseFile => { match self.pop() { Item::File => { input.open = false; }, arg => error!("closefile: invalid arg {:?})", self.display(arg)) } } Operator::Eexec => { match self.pop() { Item::File => { use crate::eexec::Decoder; match input.try_parse(hex_string) { Some(mut data) if data.len() > 4 => { Decoder::file().decode_inline(&mut data); debug!("data: {}", String::from_utf8_lossy(&data)); self.parse_and_exec(slice!(data, 4..)); } _ => { let decoded = Decoder::file().decode(input.data, 4); let skip = self.parse_and_exec(&decoded)? + 4; input.advance(skip); } }; }, Item::String(_) => { unimplemented!() // let mut input = Input::new(self.get_string(key)); // self.parse_and_exec(&mut input); }, arg => error!("eexec: unsupported arg {:?})", self.display(arg)) } } Operator::Abs => { let out = match self.pop() { Item::Real(r) => Item::Real(R32::from(r.into_inner().abs())), Item::Int(i32::MIN) => Item::Real(-R32::from(i32::MIN as f32)), Item::Int(i) => Item::Int(i.abs()), arg => error!("abs: unsupported arg {:?})", self.display(arg)) }; self.push(out); } Operator::Add => { let out = match self.pop_tuple()? { (Item::Int(a), Item::Int(b)) => match a.checked_add(b) { Some(c) => Item::Int(c), None => Item::Real(R32::from(a as f32) + R32::from(b as f32)) }, (Item::Real(a), Item::Real(b)) => Item::Real(a + b), (Item::Int(a), Item::Real(b)) | (Item::Real(b), Item::Int(a)) => Item::Real(R32::from(a as f32) + b), (arg1, arg2) => error!("add: unsupported args {:?} {:?})", self.display(arg1), self.display(arg2)) }; self.push(out); } Operator::Sub => { let out = match self.pop_tuple()? { (Item::Int(a), Item::Int(b)) => match a.checked_sub(b) { Some(c) => Item::Int(c), None => Item::Real(R32::from(a as f32) - R32::from(b as f32)) }, (Item::Real(a), Item::Real(b)) => Item::Real(a - b), (Item::Int(a), Item::Real(b)) => Item::Real(R32::from(a as f32) - b), (Item::Real(a), Item::Int(b)) => Item::Real(a - R32::from(b as f32)), (arg1, arg2) => error!("sub: unsupported args {:?} {:?})", self.display(arg1), self.display(arg2)) }; self.push(out); } Operator::Mul => { let out = match self.pop_tuple()? { (Item::Int(a), Item::Int(b)) => match a.checked_mul(b) { Some(c) => Item::Int(c), None => Item::Real(R32::from(a as f32) * R32::from(b as f32)) }, (Item::Real(a), Item::Real(b)) => Item::Real(a * b), (Item::Int(a), Item::Real(b)) | (Item::Real(b), Item::Int(a)) => Item::Real(R32::from(a as f32) * b), (arg1, arg2) => error!("mul: unsupported args {:?} {:?})", self.display(arg1), self.display(arg2)) }; self.push(out); } } Ok(()) } pub fn display(&self, item: Item) -> RefItem { RefItem::new(self, item) } pub fn display_tuple<'a, T>(&'a self, tuple: T) -> T::Output where T: TupleElements<Element=Item>, T: Map<RefItem<'a>> { tuple.map(|item| RefItem::new(self, item)) } pub fn print_stack(&self) { for (i, &item) in self.stack.iter().rev().enumerate().rev() { println!("stack[{}]: {:?}", i, self.display(item)); } } pub fn step(&mut self, input: &mut Input) -> Result<(), FontError> { expect!(input.try_parse(space), "Failed to parse space"); if let Some(_) = input.try_parse(comment) { return Ok(()); } if input.len() == 0 { return Ok(()); } let tk = expect!(input.try_parse(token), "Failed to parse token"); trace!("token: {:?}", tk); self.exec_token(tk, input) } // returns the number of bytes processed pub fn parse_and_exec(&mut self, data: &[u8]) -> Result<usize, FontError> { let input_size = data.len(); let mut input = Input::new(data); // skip leading whitespace while input.len() > 0 && input.open { self.step(&mut input)?; } Ok(input_size - input.len()) } }
use rand::Rng; use serde::{Deserialize, Serialize}; use std::iter; use crate::blueprints::Blueprint; use crate::vectors; #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] pub struct Component { pub activity: usize, pub label: String, pub binding_sites: Vec<Vec<f32>>, // express new components pub binding_sites_groups: Vec<usize>, pub weak_binding_sites: Vec<Vec<f32>>, // must connect to already-expressed components pub weak_binding_sites_groups: Vec<usize>, } impl Component { fn activity_vector(&self, total_activities: usize) -> Vec<f32> { let mut result = vec![0.; total_activities]; result[self.activity] = 1.; result } pub fn from_blueprint(blueprint: &Blueprint, total_activities: usize) -> Component { let binding_sites_groups = blueprint.binding_sites.clone(); let binding_sites = random_binding_sites(&blueprint.binding_sites, total_activities); let weak_binding_sites_groups = blueprint.weak_binding_sites.clone(); let weak_binding_sites = random_binding_sites(&blueprint.weak_binding_sites, total_activities); Component { activity: blueprint.activity, label: blueprint.label.clone(), binding_sites, binding_sites_groups, weak_binding_sites, weak_binding_sites_groups, } } pub fn profile(&self, total_activities: usize, input_bias: f32) -> Vec<f32> { let activity_vector = self.activity_vector(total_activities); let binding_sites_both: Vec<Vec<f32>> = self .binding_sites .iter() .cloned() .chain(self.weak_binding_sites.iter().cloned()) .collect(); if binding_sites_both.is_empty() { activity_vector } else { let activity_scaled = vectors::scale(&activity_vector, 1. - input_bias); let bindings_average = vectors::average(&binding_sites_both); let bindings_scaled = vectors::scale(&bindings_average, input_bias); vectors::sum(&activity_scaled, &bindings_scaled) } } } fn random_binding_sites(from: &Vec<usize>, total_activities: usize) -> Vec<Vec<f32>> { let mut rng = rand::thread_rng(); from.iter() .map(|_| { iter::repeat_with(|| rng.gen::<f32>()) .take(total_activities) .collect() }) .collect() }
#[doc = "Register `HDPLSR` reader"] pub type R = crate::R<HDPLSR_SPEC>; #[doc = "Field `HDPL` reader - temporal isolation level This bitfield returns the current temporal isolation level."] pub type HDPL_R = crate::FieldReader; impl R { #[doc = "Bits 0:7 - temporal isolation level This bitfield returns the current temporal isolation level."] #[inline(always)] pub fn hdpl(&self) -> HDPL_R { HDPL_R::new((self.bits & 0xff) as u8) } } #[doc = "SBS temporal isolation status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`hdplsr::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct HDPLSR_SPEC; impl crate::RegisterSpec for HDPLSR_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`hdplsr::R`](R) reader structure"] impl crate::Readable for HDPLSR_SPEC {} #[doc = "`reset()` method sets HDPLSR to value 0"] impl crate::Resettable for HDPLSR_SPEC { const RESET_VALUE: Self::Ux = 0; }
mod service; pub use service::*;
use crate::types::{DateTime, NonZeroU64, StdRng}; pub trait NanoSecondGenerator { fn gen_ns(&mut self, rng: &mut StdRng, dt: DateTime) -> Option<NonZeroU64>; }
use std::fs::create_dir; use std::path::PathBuf; use indicatif::ProgressBar; use walkdir::DirEntry; use crate::errors::{BoilrError, StandardResult}; use crate::utils::types::FileContent; use crate::utils::{create_and_write_file, prompt_overwrite_if_exist}; use crate::TEMPLATE_DIR_NAME; pub fn reconstruct( from_path: &PathBuf, path: &PathBuf, folders: &[DirEntry], ) -> StandardResult<()> { prompt_overwrite_if_exist(&path, true)?; create_dir(path).map_err(|source| BoilrError::WriteError { source, path: path.clone(), })?; let progress = ProgressBar::new_spinner(); progress.set_message("[3/4] Reconstructing template directories..."); for folder in progress.wrap_iter(folders.iter()) { let new_path = path.join( folder .path() .strip_prefix(from_path.join(TEMPLATE_DIR_NAME))?, ); create_dir(&new_path).map_err(|source| BoilrError::WriteError { source, path: new_path.to_path_buf(), })?; } progress.finish_and_clear(); Ok(()) } pub fn write(path: &PathBuf, files: &[(PathBuf, FileContent)]) -> StandardResult<()> { let progress = ProgressBar::new_spinner(); progress.set_message("[4/4] Writing files to output..."); for (file_path, file_content) in progress.wrap_iter(files.iter()) { let path = path.join(file_path); create_and_write_file(&path, file_content)?; } progress.finish_and_clear(); Ok(()) }
pub use self::parsing::parse; mod lex; mod preproc; mod parsing; #[deriving(Show)] enum Error { IOError(::std::io::IoError), BadCharacter(char), SyntaxError(String), } #[must_use] type ParseResult<T> = Result<T,Error>; // vim: ft=rust
/* * Datadog API V1 Collection * * Collection of all Datadog Public endpoints. * * The version of the OpenAPI document: 1.0 * Contact: support@datadoghq.com * Generated by: https://openapi-generator.tech */ /// SyntheticsCiTestMetadataGit : Git information. #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SyntheticsCiTestMetadataGit { /// Branch name. #[serde(rename = "branch", skip_serializing_if = "Option::is_none")] pub branch: Option<String>, /// Commit SHA. #[serde(rename = "commit_sha", skip_serializing_if = "Option::is_none")] pub commit_sha: Option<String>, } impl SyntheticsCiTestMetadataGit { /// Git information. pub fn new() -> SyntheticsCiTestMetadataGit { SyntheticsCiTestMetadataGit { branch: None, commit_sha: None, } } }
//! An element-tree style XML library //! //! # Examples //! //! ## Reading //! //! ``` //! use treexml::Document; //! //! let doc_raw = r#" //! <?xml version="1.1" encoding="UTF-8"?> //! <table> //! <fruit type="apple">worm</fruit> //! <vegetable /> //! </table> //! "#; //! //! let doc = Document::parse(doc_raw.as_bytes()).unwrap(); //! let root = doc.root.unwrap(); //! //! let fruit = root.find_child(|tag| tag.name == "fruit").unwrap().clone(); //! println!("{} [{:?}] = {}", fruit.name, fruit.attributes, fruit.text().unwrap()); //! ``` //! //! ## Writing //! //! ``` //! use treexml::{Document, Element, Node}; //! //! let mut root = Element::new("root"); //! let mut child = Element::new("child"); //! child.children.push(Node::Text("contents".to_owned())); //! root.children.push(Node::Element(child)); //! //! let doc = Document{ //! root: Some(root), //! .. Document::default() //! }; //! //! println!("{}", doc); //! ``` //! //! extern crate linked_hash_map; extern crate xml; use std::borrow::Cow; use std::fmt; use std::io::{Read, Write}; use std::iter::Filter; use std::str::FromStr; use std::string::ToString; use linked_hash_map::LinkedHashMap; use xml::common::XmlVersion as BaseXmlVersion; pub use builder::*; pub use errors::*; mod errors; mod builder; /// Enumeration of XML versions /// /// This exists solely because `xml-rs`'s `XmlVersion` doesn't implement Debug #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub enum XmlVersion { /// XML Version 1.0 Version10, /// XML Version 1.1 Version11, } impl From<BaseXmlVersion> for XmlVersion { fn from(value: BaseXmlVersion) -> XmlVersion { match value { BaseXmlVersion::Version10 => XmlVersion::Version10, BaseXmlVersion::Version11 => XmlVersion::Version11, } } } impl From<XmlVersion> for BaseXmlVersion { fn from(value: XmlVersion) -> BaseXmlVersion { match value { XmlVersion::Version10 => BaseXmlVersion::Version10, XmlVersion::Version11 => BaseXmlVersion::Version11, } } } #[derive(Debug, Clone, PartialEq, Eq)] pub enum Node { Element(Element), Text(String), CData(String), Comment(String), } /// An XML element #[derive(Debug, Clone, PartialEq, Eq)] pub struct Element { /// Tag prefix, used for namespacing: `xsl` in `xsl:for-each` pub prefix: Option<String>, /// Tag name: `for-each` in `xsl:for-each` pub name: String, /// Tag attributes pub attributes: LinkedHashMap<String, String>, /// A vector of child elements pub children: Vec<Node>, } impl Default for Element { fn default() -> Self { Element { prefix: None, name: "tag".to_owned(), attributes: LinkedHashMap::new(), children: Vec::new(), } } } #[derive(Clone)] pub struct ElementIter<'a> { iter: std::slice::Iter<'a, Node> } impl<'a> Iterator for ElementIter<'a> { type Item = &'a Element; #[inline] fn next(&mut self) -> Option<&'a Element> { for node in self.iter.by_ref() { if let &Node::Element(ref element) = node { return Some(element); } } None } #[inline] fn size_hint(&self) -> (usize, Option<usize>) { let (_, upper) = self.iter.size_hint(); (0, upper) } } pub struct ElementIterMut<'a> { iter: std::slice::IterMut<'a, Node> } impl<'a> Iterator for ElementIterMut<'a> { type Item = &'a mut Element; #[inline] fn next(&mut self) -> Option<&'a mut Element> { for node in self.iter.by_ref() { if let &mut Node::Element(ref mut element) = node { return Some(element); } } None } #[inline] fn size_hint(&self) -> (usize, Option<usize>) { let (_, upper) = self.iter.size_hint(); (0, upper) } } impl Element { /// Create a new `Element` with the tag name `name` pub fn new<S: ToString>(name: S) -> Element { Element { name: name.to_string(), ..Element::default() } } /// Parse the contents of an element fn parse<R: Read>( &mut self, mut reader: &mut xml::reader::EventReader<R>, ) -> Result<(), Error> { use xml::reader::XmlEvent; loop { let ev = reader.next()?; match ev { XmlEvent::StartElement { name, attributes, .. } => { let mut attr_map = LinkedHashMap::new(); for attr in attributes { let attr_name = match attr.name.prefix { Some(prefix) => format!("{}:{}", prefix, attr.name.local_name), None => attr.name.local_name, }; attr_map.insert(attr_name, attr.value); } let mut child = Element { prefix: name.prefix, name: name.local_name, attributes: attr_map, ..Element::default() }; child.parse(&mut reader)?; self.children.push(Node::Element(child)); } XmlEvent::EndElement { name } => { if name.prefix == self.prefix && name.local_name == self.name { return Ok(()); } else { // This should never happen, since the base xml library will panic first panic!("Unexpected closing tag: {}, expected {}", name, self.name); } } XmlEvent::Characters(s) => { self.children.push(Node::Text(s)); } XmlEvent::CData(s) => { self.children.push(Node::CData(s)); } XmlEvent::Comment(s) => { self.children.push(Node::Comment(s)); } XmlEvent::StartDocument { .. } | XmlEvent::EndDocument | XmlEvent::ProcessingInstruction { .. } | XmlEvent::Whitespace(_) => {} } } } /// Write an element and its contents to `writer` pub fn write<W: Write>(&self, writer: &mut xml::writer::EventWriter<W>) -> Result<(), Error> { use xml::attribute::Attribute; use xml::name::Name; use xml::namespace::Namespace; use xml::writer::XmlEvent; let name = Name::local(&self.name); let mut attributes = Vec::with_capacity(self.attributes.len()); for (k, v) in &self.attributes { attributes.push(Attribute { name: Name::local(k), value: v, }); } let namespace = Namespace::empty(); writer.write(XmlEvent::StartElement { name, attributes: Cow::Owned(attributes), namespace: Cow::Owned(namespace), })?; for child in &self.children { match *child { Node::Text(ref s) => writer.write(XmlEvent::Characters(&s[..]))?, Node::CData(ref s) => writer.write(XmlEvent::CData(&s[..]))?, Node::Comment(ref s) => writer.write(XmlEvent::Comment(&s[..]))?, Node::Element(ref e) => e.write(writer)?, } } writer.write(XmlEvent::EndElement { name: Some(name) })?; Ok(()) } #[inline] pub fn iter_child_elements<'a>(&'a self) -> ElementIter<'a> { ElementIter { iter: self.children.iter() } } #[inline] pub fn iter_child_elements_mut<'a>(&'a mut self) -> ElementIterMut<'a> { ElementIterMut { iter: self.children.iter_mut() } } /// Find a single child of the current `Element`, given a predicate pub fn find_child<P>(&self, predicate: P) -> Option<&Element> where P: for<'r> Fn(&'r &Element) -> bool, { self.iter_child_elements().find(predicate) } /// Find a single child of the current `Element`, given a predicate; returns a mutable borrow pub fn find_child_mut<P>(&mut self, predicate: P) -> Option<&mut Element> where P: for<'r> FnMut(&'r &mut Element) -> bool, { self.iter_child_elements_mut().find(predicate) } /// Traverse element using an xpath-like string: root/child/a pub fn find(&self, path: &str) -> Result<&Element, Error> { if path == "" || path == "/" { Ok(self) } else { Self::find_path(&path.split('/').collect::<Vec<&str>>(), path, self) } } /// Traverse element using an xpath-like string: root/child/a pub fn find_mut(&mut self, path: &str) -> Result<&mut Element, Error> { if path == "" || path == "/" { Ok(self) } else { Self::find_path_mut(&path.split('/').collect::<Vec<&str>>(), path, self) } } pub fn find_value<T: FromStr>(&self, path: &str) -> Result<Option<T>, Error> { let el = self.find(path)?; match el.text() { Some(text) => { match T::from_str(text.as_str()) { Err(_) => Err(errors::Error::ValueFromStr { t: text.to_string(), }.into()), Ok(value) => Ok(Some(value)), } } None => Ok(None) } } fn find_path<'a>( path: &[&str], original: &str, tree: &'a Element, ) -> Result<&'a Element, Error> { if path.is_empty() { return Ok(tree); } match tree.find_child(|t| t.name == path[0]) { Some(element) => Self::find_path(&path[1..], original, element), None => Err(errors::Error::ElementNotFound { t: original.into() }.into()), } } fn find_path_mut<'a>(path: &[&str], original: &str, tree: &'a mut Element) -> Result<&'a mut Element, Error> { if path.is_empty() { return Ok(tree); } match tree.find_child_mut(|t| t.name == path[0]) { Some(element) => Self::find_path_mut(&path[1..], original, element), None => Err(errors::Error::ElementNotFound { t: original.into() }.into()), } } /// Filters the children of the current `Element`, given a predicate pub fn filter_children<P>(&self, predicate: P) -> Filter<ElementIter, P> where P: for<'r> Fn(&'r &Element) -> bool, { self.iter_child_elements().filter(predicate) } /// Filters the children of the current `Element`, given a predicate; returns a mutable iterator pub fn filter_children_mut<P>(&mut self, predicate: P) -> Filter<ElementIterMut, P> where P: for<'r> FnMut(&'r &mut Element) -> bool, { self.iter_child_elements_mut().filter(predicate) } pub fn text(&self) -> Option<String> { let mut text = String::new(); for node in &self.children { match node { &Node::Text(ref t) => text.push_str(t.as_str()), _ => {} } } if text.is_empty() { None } else { Some(text) } } } impl fmt::Display for Element { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let doc = Document { root: Some(self.clone()), ..Document::default() }; let mut v = Vec::<u8>::new(); doc.write_with(&mut v, false, " ", true).unwrap(); let s = String::from_utf8(v).unwrap(); f.write_str(&s[..]) } } /// An XML document #[derive(Debug, Clone, PartialEq, Eq)] pub struct Document { /// Version of the XML document pub version: XmlVersion, /// Encoding of the XML document pub encoding: String, /// Root tag of the XML document pub root: Option<Element>, } impl Default for Document { fn default() -> Self { Document { version: XmlVersion::Version10, encoding: "UTF-8".to_owned(), root: None, } } } impl Document { /// Create a new `Document` with default values pub fn new() -> Document { Document { ..Document::default() } } /// Create a new `Document` with an Element or ElementBuilder at its root. pub fn build(root: &mut ElementBuilder) -> Self { Document { root: Some(root.element()), ..Self::default() } } /// Parse data from a reader to construct an XML document /// /// # Failures /// /// Passes any errors that the `xml-rs` library returns up the stack pub fn parse<R: Read>(r: R) -> Result<Document, Error> { use xml::reader::{EventReader, ParserConfig, XmlEvent}; let mut config = ParserConfig::new(); config.ignore_comments = false; let mut reader = EventReader::new_with_config(r, config); let mut doc = Document::new(); loop { let ev = reader.next()?; match ev { XmlEvent::StartDocument { version, encoding, .. } => { doc.version = XmlVersion::from(version); doc.encoding = encoding; } XmlEvent::StartElement { name, attributes, .. } => { // Start of the root element let mut attr_map = LinkedHashMap::new(); for attr in attributes { let attr_name = match attr.name.prefix { Some(prefix) => format!("{}:{}", prefix, attr.name.local_name), None => attr.name.local_name, }; attr_map.insert(attr_name, attr.value); } let mut root = Element { prefix: name.prefix, name: name.local_name, attributes: attr_map, ..Element::default() }; root.parse(&mut reader)?; doc.root = Some(root); } XmlEvent::EndDocument => break, _ => {} } } Ok(doc) } pub fn write<W: Write>(&self, mut w: &mut W) -> Result<(), Error> { self.write_with(&mut w, true, " ", true) } /// Writes a document to `w` pub fn write_with<W: Write>( &self, w: &mut W, document_decl: bool, indent_str: &'static str, indent: bool, ) -> Result<(), Error> { use xml::writer::{EmitterConfig, XmlEvent}; let mut writer = EmitterConfig::new() .perform_indent(indent) .write_document_declaration(document_decl) .indent_string(indent_str) .create_writer(w); if document_decl { writer.write(XmlEvent::StartDocument { version: self.version.into(), encoding: Some(&self.encoding), standalone: None, })?; } if let Some(ref e) = self.root { e.write(&mut writer)?; } Ok(()) } } impl fmt::Display for Document { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let mut v = Vec::<u8>::new(); self.write(&mut v).unwrap(); let s = String::from_utf8(v).unwrap(); f.write_str(&s[..]) } }
#[derive(Debug)] pub enum TokenType { // Single-character tokens. LPAREN, RPAREN, LBRACE, RBRACE, COMMA, DOT, MINUS, PLUS, SEMI, SLASH, STAR, // One or two character tokens. EX, EXEQ, EQ, EQEQ, GT, GTEQ, LT, LTEQ, // Literals. ID(String), STR(String), NUM(i32), // Keywords. AND, CLASS, ELSE, FALSE, FUN, FOR, IF, NIL, OR, PRINT, RETURN, SUPER, THIS, TRUE, VAR, WHILE, EOF } pub struct Token { tt: TokenType, line: usize, } impl Token { pub fn new(tt: TokenType, line: usize) -> Self { Self { tt: tt, line: line } } } impl ToString for Token { fn to_string(&self) -> String { format!("{:?} {}", self.tt, self.line) } }
use axum::body::Bytes; use axum::http::StatusCode; use axum::{routing::get, Router}; use std::net::SocketAddr; const VERSION: &str = env!("CARGO_PKG_VERSION"); const PACKAGE_NAME: &str = env!("CARGO_PKG_NAME"); const DEFAULT_PORT: u16 = 3000; async fn hello() -> String { format!("{} {}", PACKAGE_NAME, VERSION) } async fn text_to_svgbob(body: Bytes) -> Result<String, StatusCode> { if let Ok(input) = String::from_utf8(body.to_vec()) { let svg = svgbob::to_svg(&input); Ok(svg) } else { Err(StatusCode::BAD_REQUEST) } } #[tokio::main] async fn main() { let port = std::env::var("PORT") .ok() .and_then(|port| port.parse::<u16>().ok()) .unwrap_or(DEFAULT_PORT); let app = Router::new().route("/", get(hello).post(text_to_svgbob)); let socket: SocketAddr = ([0, 0, 0, 0], port).into(); println!("Running {} {} at: http://{}", PACKAGE_NAME, VERSION, socket); axum::Server::bind(&socket) .serve(app.into_make_service()) .await .expect("Error starting server"); }
use std::panic; use rocket::{self, http::{ContentType, Header, Status}, local::Client}; use diesel::connection::SimpleConnection; use horus_server::{self, routes::paste::*}; use test::{run_test, sql::*}; #[test] fn does_show() { run(|| { let client = get_client(); let req = client.get(String::from("/") + PASTE_ID); let mut response = req.dispatch(); assert_eq!( response.content_type(), Some(ContentType::HTML), "Bad content-type. Expected HTML, got {:?}", response.content_type() ); assert_eq!( response.status(), Status::Ok, "Bad response status, expected 200 OK, got {}", response.status() ); assert!( response.body_string().unwrap().contains(PASTE_DATA), "Body did not contain paste data." ); }); } #[test] fn does_list() { run(|| { let client = get_client(); let mut url = String::from("/"); url.push_str(USER_ID.to_string().as_str()); url.push_str("/list/0"); let req = client.get(url.as_str()).header(api_key_header()); let mut response = req.dispatch(); assert_eq!( response.status(), Status::Ok, "Bad response status, expected 200 OK, got {}", response.status() ); assert_eq!( response.content_type(), Some(ContentType::JSON), "Bad content-type. Expected JSON, got {:?}", response.content_type() ); let res = response.body_string().unwrap(); assert!( res.contains(PASTE_DATA), "Couldn't find {} in JSON response. Response was: \n{}", PASTE_DATA, res ); }); } #[test] fn creates_new() { run(|| { let body = r#"{"is_expiry":false, "paste_data":"test_paste","title":"Example Title"}"#; let client = get_client(); let req = client .post("/new") .header(api_key_header()) .header(Header::new("content-type", "application/json")) .body(body); let res = req.dispatch(); assert_eq!(res.status(), Status::Created); let loc = res.headers().get_one("location").unwrap(); let id = loc.replace("/paste/", ""); assert_eq!( loc, String::from("/paste/") + &id, "Got unexpected body: {}", body ); // Now run a get on it let req = client.get(String::from("/") + &id); let mut response = req.dispatch(); assert_eq!( response.content_type(), Some(ContentType::HTML), "Bad content-type. Expected HTML, got {:?}", response.content_type() ); assert_eq!( response.status(), Status::Ok, "Bad response status, expected 200 OK, got {}", response.status() ); let body = response.body_string().unwrap(); assert!( body.contains("test_paste"), "Body did not contain paste data." ); assert!(body.contains("Example Title")); }); } #[test] fn deletes_correctly() { run(|| { let client = get_client(); let req = client .delete(String::from("/") + PASTE_ID) .header(api_key_header()); let res = req.dispatch(); assert_eq!( res.status(), Status::Ok, "Got bad status: expected 200 OK, got {}", res.status() ); let req = client.get(String::from("/") + PASTE_ID); let res = req.dispatch(); assert_eq!( res.status(), Status::NotFound, "Got bad status:: expected 404 NOT FOUND, got {}", res.status() ); }); } #[test] fn updates_correctly() { run(|| { let body = format!( r#"{{"id":"{id}", "paste_data": "new_data", "duration_type": "days", "duration_val": -1}}"#, id = PASTE_ID); let client = get_client(); let req = client .put(String::from("/") + PASTE_ID) .header(api_key_header()) .header(Header::new("content-type", "application/json")) .body(body); let response = req.dispatch(); assert_eq!(response.status(), Status::Accepted); let req = client.get(String::from("/") + PASTE_ID); let mut response = req.dispatch(); assert_eq!(response.status(), Status::Ok); assert!(response.body_string().unwrap().contains("new_data")); }); } fn run<T>(test: T) -> () where T: FnOnce() -> () + panic::UnwindSafe, { run_test(test, setup_db, unsetup_db); } fn setup_db() { let conn = horus_server::dbtools::get_db_conn_requestless().unwrap(); let mut setup_sql = String::new(); setup_sql.push_str(sql_insert_user().as_str()); setup_sql.push_str(sql_insert_session().as_str()); setup_sql.push_str(sql_insert_license().as_str()); setup_sql.push_str(sql_insert_paste().as_str()); conn.batch_execute(&setup_sql).unwrap(); } fn unsetup_db() { let conn = horus_server::dbtools::get_db_conn_requestless().unwrap(); // No need to delete everything, a user delete cascades. let unsetup_sql = sql_delete_user(); conn.batch_execute(&unsetup_sql).unwrap(); } fn get_client() -> Client { use rocket_contrib::Template; let rocket = rocket::ignite() .attach(Template::fairing()) .mount("/", routes![show, list, new, delete, update]) .manage(horus_server::dbtools::init_pool()); Client::new(rocket).expect("valid rocket instance") }
use super::AtomicLocation; use std::path; impl AtomicLocation { pub fn get_path(&self) -> &path::Path { path::Path::new(self.get_str()) } // FIXME: This needs to be returned in two parts or as a path pub fn get_str(&self) -> &str { match self { AtomicLocation::Base => "atomic", AtomicLocation::CreateComplete => "cc", AtomicLocation::CreateWorking => "cw", AtomicLocation::ReplaceWorking => "rw", AtomicLocation::ReplaceComplete => "rc", AtomicLocation::ReplacePrevious => "rp", AtomicLocation::ReplaceRemove => "rr", AtomicLocation::StoreWorking => "sw", AtomicLocation::StoreComplete => "sc", } } }
extern crate bindgen; #[macro_use] extern crate log; extern crate docopt; #[macro_use] extern crate rustc_serialize; use bindgen::{Builder, LinkType, Logger}; use std::io::{self, Write}; use std::fs::File; use std::process::exit; #[derive(Debug)] struct StdLogger; impl Logger for StdLogger { fn error(&self, msg: &str) { error!("{}", msg); } fn warn(&self, msg: &str) { warn!("{}", msg); } } const USAGE: &'static str = " Generate C bindings for Rust. Usage: bindgen [options] <file> bindgen (-h | --help) Options: -h, --help Display this help message. --link=<library> Link to a dynamic library, can be provided multiple times. <library> is in the format `[kind=]lib`, where `kind` is one of `static`, `dynamic` or `framework`. --output=<output> Write bindings to <output> (- is stdout). [default: -] --match=<name> Only output bindings for definitions from files whose name contains <name> If multiple -match options are provided, files matching any rule are bound to. --builtins Output bindings for builtin definitions (for example __builtin_va_list) --emit-clang-ast Output the ast (for debugging purposes) --override-enum-type=<type> Override enum type, type name could be uchar schar ushort sshort uint sint ulong slong ulonglong slonglong --clang-options=<opts> Options to clang. "; #[derive(Debug, RustcDecodable)] struct Args { arg_file: String, flag_link: String, flag_output: String, flag_match: Option<String>, flag_builtins: bool, flag_emit_clang_ast: bool, flag_override_enum_type: String, flag_clang_options: String, } fn args_to_opts(args: Args, builder: &mut Builder) { builder.header(args.arg_file) .emit_ast(args.flag_emit_clang_ast) .override_enum_ty(args.flag_override_enum_type) .clang_arg(args.flag_clang_options); if let Some(s) = args.flag_match { builder.match_pat(s); } if args.flag_builtins { builder.builtins(); } let mut parts = args.flag_link.split('='); let (lib, kind) = match (parts.next(), parts.next()) { (Some(lib), None) => (lib, LinkType::Dynamic), (Some(kind), Some(lib)) => { (lib, match kind { "static" => LinkType::Static, "dynamic" => LinkType::Dynamic, "framework" => LinkType::Framework, _ => { println!("Link type unknown: {}", kind); exit(1); } }) } _ => { println!("Wrong link format: {}", args.flag_link); exit(1); } }; builder.link(lib, kind); } fn get_output(o: &str) -> Box<Write> { if o == "-" { Box::new(io::stdout()) } else { Box::new(File::open(o).expect(&format!("\"{}\" unreadable", o))) } } pub fn main() { let args: Args = docopt::Docopt::new(USAGE) .and_then(|d| d.decode()) .unwrap_or_else(|e| e.exit()); debug!("{:?}", args); let output = get_output(&args.flag_output); let logger = StdLogger; let mut builder = Builder::default(); builder.log(&logger); args_to_opts(args, &mut builder); debug!("{:?}", builder); match builder.generate() { Ok(bindings) => { match bindings.write(output) { Ok(()) => (), Err(e) => { logger.error(&format!("Unable to write bindings to file. {}", e)[..]); exit(-1); } } } Err(()) => exit(-1), } }
use addressing; use sphinx::route::NodeAddressBytes; use std::error::Error; use std::net::SocketAddr; use tokio::prelude::*; #[derive(Debug)] pub struct MixPeer { connection: SocketAddr, } impl MixPeer { // note that very soon `next_hop_address` will be changed to `next_hop_metadata` pub fn new(next_hop_address: NodeAddressBytes) -> MixPeer { let next_hop_socket_address = addressing::socket_address_from_encoded_bytes(next_hop_address.to_bytes()); MixPeer { connection: next_hop_socket_address, } } pub async fn send(&self, bytes: Vec<u8>) -> Result<(), Box<dyn Error>> { let next_hop_address = self.connection.clone(); let mut stream = tokio::net::TcpStream::connect(next_hop_address).await?; stream.write_all(&bytes).await?; Ok(()) } pub fn to_string(&self) -> String { self.connection.to_string() } }
extern crate futures; extern crate tokio_core; extern crate websocket; use websocket::async::Server; use websocket::message::OwnedMessage; use websocket::server::InvalidConnection; use futures::{Future, Sink, Stream}; use tokio_core::reactor::Core; fn main() { let mut core = Core::new().unwrap(); let handle = core.handle(); // bind to the server let server = Server::bind("127.0.0.1:9002", &handle).unwrap(); // time to build the server's future // this will be a struct containing everything the server is going to do // a stream of incoming connections let f = server.incoming() // we don't wanna save the stream if it drops .map_err(|InvalidConnection { error, .. }| error) .for_each(|(upgrade, addr)| { // accept the request to be a ws connection println!("Got a connection from: {}", addr); let f = upgrade .accept() .and_then(|(s, _)| { // simple echo server impl let (sink, stream) = s.split(); stream .take_while(|m| Ok(!m.is_close())) .filter_map(|m| { match m { OwnedMessage::Ping(p) => Some(OwnedMessage::Pong(p)), OwnedMessage::Pong(_) => None, _ => Some(m), } }) .forward(sink) .and_then(|(_, sink)| { sink.send(OwnedMessage::Close(None)) }) }); handle.spawn(f.map_err(move |e| println!("{}: '{:?}'", addr, e)) .map(move |_| println!("{} closed.", addr))); Ok(()) }); core.run(f).unwrap(); }
//! //! This is a Rust implementation of the [bip39][bip39-standard] standard for Bitcoin HD wallet //! mnemonic phrases. //! //! //! [bip39-standard]: https://github.com/bitcoin/bips/blob/master/bip-0039.mediawiki //! //! ## Quickstart //! //! ```rust //! use bip39::{Mnemonic, MnemonicType, Language, Seed}; //! //! /// create a new randomly generated mnemonic phrase //! let mnemonic = Mnemonic::new(MnemonicType::Words12, Language::English); //! //! /// get the phrase //! let phrase: &str = mnemonic.phrase(); //! println!("phrase: {}", phrase); //! //! /// get the HD wallet seed //! let seed = Seed::new(&mnemonic, ""); //! //! // get the HD wallet seed as raw bytes //! let seed_bytes: &[u8] = seed.as_bytes(); //! //! // print the HD wallet seed as a hex string //! println!("{:X}", seed); //! ``` //! mod error; mod language; mod mnemonic; mod mnemonic_type; mod seed; mod util; mod crypto; pub use error::ErrorKind; pub use language::Language; pub use mnemonic::Mnemonic; pub use mnemonic_type::MnemonicType; pub use seed::Seed;
use crate::custom_types::exceptions::{arithmetic_error, index_error}; use crate::from_bool::FromBool; use crate::int_var::IntVar; use crate::operator::Operator; use crate::rational_var::RationalVar; use crate::runtime::Runtime; use crate::string_var::StringVar; use crate::variable::{InnerVar, Variable}; use num::traits::Pow; use num::{BigRational, One, ToPrimitive, Zero}; pub type QuickResult = Result<Variable, ()>; pub fn quick_add(this: Variable, other: Variable, runtime: &mut Runtime) -> QuickResult { match this { Variable::Normal(InnerVar::Null()) => unimplemented!(), Variable::Normal(InnerVar::Bool(b)) => { Result::Ok((IntVar::from(other) + u8::from_bool(b)).into()) } Variable::Normal(InnerVar::Bigint(i)) => Result::Ok((i + IntVar::from(other)).into()), Variable::Normal(InnerVar::String(s)) => { let result = s.as_owned() + &other.str(runtime)?; QuickResult::Ok(StringVar::from(result).into()) } Variable::Normal(InnerVar::Decimal(d1)) => { if let Variable::Normal(InnerVar::Decimal(d2)) = other { QuickResult::Ok((d1 + d2).into()) } else { unimplemented!() } } Variable::Normal(InnerVar::Char(_)) => unimplemented!(), Variable::Normal(InnerVar::Type(_)) => unimplemented!(), Variable::Normal(InnerVar::Standard(v)) => { v.call_operator(Operator::Add, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Normal(InnerVar::Tuple(_)) => unimplemented!(), Variable::Normal(InnerVar::Method(_)) => unimplemented!(), Variable::Normal(InnerVar::Function(_)) => unimplemented!(), Variable::Normal(InnerVar::Custom(c)) => { c.into_inner() .call_op(Operator::Add, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Normal(InnerVar::Union(u)) => { u.call_operator(Operator::Add, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Option(_) => unimplemented!(), } } pub fn quick_sub(this: Variable, other: Variable, runtime: &mut Runtime) -> QuickResult { match this { Variable::Normal(InnerVar::Null()) => unimplemented!(), Variable::Normal(InnerVar::Bool(b)) => Result::Ok( if b { IntVar::from(1) - IntVar::from(other) } else { IntVar::from(other) } .into(), ), Variable::Normal(InnerVar::Bigint(i)) => Result::Ok((i - IntVar::from(other)).into()), Variable::Normal(InnerVar::String(_)) => unimplemented!(), Variable::Normal(InnerVar::Decimal(d1)) => { if let Variable::Normal(InnerVar::Decimal(d2)) = other { QuickResult::Ok((d1 - d2).into()) } else { unimplemented!() } } Variable::Normal(InnerVar::Char(_)) => unimplemented!(), Variable::Normal(InnerVar::Type(_)) => unimplemented!(), Variable::Normal(InnerVar::Standard(v)) => { v.call_operator(Operator::Subtract, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Normal(InnerVar::Tuple(_)) => unimplemented!(), Variable::Normal(InnerVar::Method(_)) => unimplemented!(), Variable::Normal(InnerVar::Function(_)) => unimplemented!(), Variable::Normal(InnerVar::Custom(c)) => { c.into_inner() .call_op(Operator::Subtract, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Normal(InnerVar::Union(u)) => { u.call_operator(Operator::Subtract, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Option(_) => unimplemented!(), } } pub fn quick_u_minus(this: Variable, runtime: &mut Runtime) -> QuickResult { match this { Variable::Normal(InnerVar::Null()) => unimplemented!(), Variable::Normal(InnerVar::Bool(b)) => Result::Ok(IntVar::from_bool(b).into()), Variable::Normal(InnerVar::Bigint(i)) => Result::Ok((-i).into()), Variable::Normal(InnerVar::String(_)) => unimplemented!(), Variable::Normal(InnerVar::Decimal(d)) => Result::Ok((-d).into()), Variable::Normal(InnerVar::Char(_)) => unimplemented!(), Variable::Normal(InnerVar::Type(_)) => unimplemented!(), Variable::Normal(InnerVar::Standard(v)) => { v.call_operator(Operator::USubtract, Vec::new(), runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Normal(InnerVar::Tuple(_)) => unimplemented!(), Variable::Normal(InnerVar::Method(_)) => unimplemented!(), Variable::Normal(InnerVar::Function(_)) => unimplemented!(), Variable::Normal(InnerVar::Custom(c)) => { c.into_inner() .call_op(Operator::USubtract, Vec::new(), runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Normal(InnerVar::Union(u)) => { u.call_operator(Operator::USubtract, Vec::new(), runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Option(_) => unimplemented!(), } } pub fn quick_mul(this: Variable, other: Variable, runtime: &mut Runtime) -> QuickResult { match this { Variable::Normal(InnerVar::Null()) => unimplemented!(), Variable::Normal(InnerVar::Bool(b)) => { Result::Ok(if b { IntVar::from(other) } else { Zero::zero() }.into()) } Variable::Normal(InnerVar::Bigint(i)) => Result::Ok((i * IntVar::from(other)).into()), Variable::Normal(InnerVar::String(s)) => { let big_var = IntVar::from(other); let result = match big_var.to_usize() { Option::Some(val) => val, Option::None => return mul_err(big_var, runtime), }; Result::Ok(StringVar::from(s.repeat(result)).into()) } Variable::Normal(InnerVar::Decimal(d1)) => { if let Variable::Normal(InnerVar::Decimal(d2)) = other { QuickResult::Ok((d1 * d2).into()) } else { unimplemented!() } } Variable::Normal(InnerVar::Char(_)) => unimplemented!(), Variable::Normal(InnerVar::Type(_)) => unimplemented!(), Variable::Normal(InnerVar::Standard(v)) => { v.call_operator(Operator::Multiply, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Normal(InnerVar::Tuple(_)) => unimplemented!(), Variable::Normal(InnerVar::Method(_)) => unimplemented!(), Variable::Normal(InnerVar::Function(_)) => unimplemented!(), Variable::Normal(InnerVar::Custom(c)) => { c.into_inner() .call_op(Operator::Multiply, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Normal(InnerVar::Union(u)) => { u.call_operator(Operator::Multiply, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Option(_) => unimplemented!(), } } fn mul_err(big_var: IntVar, runtime: &mut Runtime) -> QuickResult { runtime.throw_quick_native( arithmetic_error(), format!( "Too many string repetitions: max number of shifts \ for a non-empty string is {}, attempted to shift by {}", usize::MAX, big_var, ), ) } pub fn quick_div(this: Variable, other: Variable, runtime: &mut Runtime) -> QuickResult { match this { Variable::Normal(InnerVar::Null()) => unimplemented!(), Variable::Normal(InnerVar::Bool(b)) => Result::Ok( RationalVar::from(BigRational::new( if b { 1 } else { 0 }.into(), IntVar::from(other).into(), )) .into(), ), Variable::Normal(InnerVar::Bigint(i)) => Result::Ok( RationalVar::from(BigRational::new(i.into(), IntVar::from(other).into())).into(), ), Variable::Normal(InnerVar::String(_)) => unimplemented!(), Variable::Normal(InnerVar::Decimal(d1)) => { if let Variable::Normal(InnerVar::Decimal(d2)) = other { QuickResult::Ok((d1 / d2).into()) } else { unimplemented!() } } Variable::Normal(InnerVar::Char(_)) => unimplemented!(), Variable::Normal(InnerVar::Type(_)) => unimplemented!(), Variable::Normal(InnerVar::Standard(v)) => { v.call_operator(Operator::Divide, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Normal(InnerVar::Tuple(_)) => unimplemented!(), Variable::Normal(InnerVar::Method(_)) => unimplemented!(), Variable::Normal(InnerVar::Function(_)) => unimplemented!(), Variable::Normal(InnerVar::Custom(c)) => { c.into_inner() .call_op(Operator::Divide, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Normal(InnerVar::Union(u)) => { u.call_operator(Operator::Divide, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Option(_) => unimplemented!(), } } pub fn quick_floor_div(this: Variable, other: Variable, runtime: &mut Runtime) -> QuickResult { match this { Variable::Normal(InnerVar::Null()) => unimplemented!(), Variable::Normal(InnerVar::Bool(b)) => Result::Ok( if b { let var = IntVar::from(other); if var.is_zero() { return div_zero_error(runtime); } else if var.is_one() { One::one() } else if (-var).is_one() { -IntVar::one() } else { Zero::zero() } } else { Zero::zero() } .into(), ), Variable::Normal(InnerVar::Bigint(i)) => { let other = IntVar::from(other); if other.is_zero() { div_zero_error(runtime) } else { Result::Ok((i / other).into()) } } Variable::Normal(InnerVar::String(_)) => unimplemented!(), Variable::Normal(InnerVar::Decimal(d1)) => { if let Variable::Normal(InnerVar::Decimal(d2)) = other { QuickResult::Ok(IntVar::from((d1 / d2).to_integer()).into()) } else { unimplemented!() } } Variable::Normal(InnerVar::Char(_)) => unimplemented!(), Variable::Normal(InnerVar::Type(_)) => unimplemented!(), Variable::Normal(InnerVar::Standard(v)) => { v.call_operator(Operator::FloorDiv, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Normal(InnerVar::Tuple(_)) => unimplemented!(), Variable::Normal(InnerVar::Method(_)) => unimplemented!(), Variable::Normal(InnerVar::Function(_)) => unimplemented!(), Variable::Normal(InnerVar::Custom(c)) => { c.into_inner() .call_op(Operator::FloorDiv, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Normal(InnerVar::Union(u)) => { u.call_operator(Operator::FloorDiv, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Option(_) => unimplemented!(), } } fn div_zero_error<T>(runtime: &mut Runtime) -> Result<T, ()> { runtime.throw_quick_native(arithmetic_error(), "Cannot divide by zero") } pub fn quick_mod(this: Variable, other: Variable, runtime: &mut Runtime) -> QuickResult { match this { Variable::Normal(InnerVar::Null()) => unimplemented!(), Variable::Normal(InnerVar::Bool(b)) => { let other = IntVar::from(other); if !other.is_zero() { Result::Ok(IntVar::from_bool(b).into()) } else { mod_zero_error(runtime) } } Variable::Normal(InnerVar::Bigint(i)) => { let other = IntVar::from(other); if other.is_zero() { mod_zero_error(runtime) } else { Result::Ok((i % other).into()) } } Variable::Normal(InnerVar::String(_)) => unimplemented!(), Variable::Normal(InnerVar::Decimal(d1)) => { if let Variable::Normal(InnerVar::Decimal(d2)) = other { if d2.is_zero() { mod_zero_error(runtime) } else { QuickResult::Ok((d1 % d2).into()) } } else { unimplemented!() } } Variable::Normal(InnerVar::Char(_)) => unimplemented!(), Variable::Normal(InnerVar::Type(_)) => unimplemented!(), Variable::Normal(InnerVar::Standard(v)) => { v.call_operator(Operator::Modulo, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Normal(InnerVar::Tuple(_)) => unimplemented!(), Variable::Normal(InnerVar::Method(_)) => unimplemented!(), Variable::Normal(InnerVar::Function(_)) => unimplemented!(), Variable::Normal(InnerVar::Custom(c)) => { c.into_inner() .call_op(Operator::Modulo, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Normal(InnerVar::Union(u)) => { u.call_operator(Operator::Modulo, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Option(_) => unimplemented!(), } } fn mod_zero_error<T>(runtime: &mut Runtime) -> Result<T, ()> { runtime.throw_quick_native(arithmetic_error(), "Cannot modulo by zero") } pub fn quick_subscript(this: Variable, other: Variable, runtime: &mut Runtime) -> QuickResult { match this { Variable::Normal(InnerVar::Null()) => unimplemented!(), Variable::Normal(InnerVar::Bool(_)) => unimplemented!(), Variable::Normal(InnerVar::Bigint(_)) => unimplemented!(), Variable::Normal(InnerVar::String(val)) => { let index = IntVar::from(other).to_usize().unwrap(); match val.chars().nth(index) { Option::None => { runtime.push_native(); runtime.throw_quick_native(index_error(), "Index out of bounds") } Option::Some(value) => Result::Ok(value.into()), } } Variable::Normal(InnerVar::Decimal(_)) => unimplemented!(), Variable::Normal(InnerVar::Char(_)) => unimplemented!(), Variable::Normal(InnerVar::Type(t)) => { if let Variable::Normal(InnerVar::Type(_)) = other { Result::Ok(t.into()) // FIXME } else { panic!("Type indexing only supported for other types") } } Variable::Normal(InnerVar::Standard(v)) => { v.call_operator(Operator::GetAttr, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Normal(InnerVar::Tuple(_)) => unimplemented!(), Variable::Normal(InnerVar::Method(_)) => unimplemented!(), Variable::Normal(InnerVar::Function(_)) => unimplemented!(), Variable::Normal(InnerVar::Custom(c)) => { c.into_inner() .call_op(Operator::GetAttr, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Normal(InnerVar::Union(u)) => { u.call_operator(Operator::GetAttr, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Option(_) => unimplemented!(), } } pub fn quick_power(this: Variable, other: Variable, runtime: &mut Runtime) -> QuickResult { match this { Variable::Normal(InnerVar::Null()) => unimplemented!(), Variable::Normal(InnerVar::Bool(b)) => { IntVar::from(other); // Since this can be only 1 or 0, no Result::Ok(IntVar::from(if b { 1 } else { 0 }).into()) } Variable::Normal(InnerVar::Bigint(i)) => Result::Ok(i.pow(other.into()).into()), Variable::Normal(InnerVar::String(_)) => unimplemented!(), Variable::Normal(InnerVar::Decimal(_)) => unimplemented!(), Variable::Normal(InnerVar::Char(_)) => unimplemented!(), Variable::Normal(InnerVar::Type(_)) => unimplemented!(), Variable::Normal(InnerVar::Standard(v)) => { v.call_operator(Operator::Power, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Normal(InnerVar::Tuple(_)) => unimplemented!(), Variable::Normal(InnerVar::Method(_)) => unimplemented!(), Variable::Normal(InnerVar::Function(_)) => unimplemented!(), Variable::Normal(InnerVar::Custom(c)) => { c.into_inner() .call_op(Operator::Power, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Normal(InnerVar::Union(u)) => { u.call_operator(Operator::Power, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Option(_) => unimplemented!(), } } pub fn quick_left_bitshift(this: Variable, other: Variable, runtime: &mut Runtime) -> QuickResult { match this { Variable::Normal(InnerVar::Null()) => unimplemented!(), Variable::Normal(InnerVar::Bool(b)) => { q_lshift(IntVar::from_bool(b), other.into(), runtime) } Variable::Normal(InnerVar::Bigint(i)) => q_lshift(i, other.into(), runtime), Variable::Normal(InnerVar::String(_)) => unimplemented!(), Variable::Normal(InnerVar::Decimal(_)) => unimplemented!(), Variable::Normal(InnerVar::Char(_)) => unimplemented!(), Variable::Normal(InnerVar::Type(_)) => unimplemented!(), Variable::Normal(InnerVar::Standard(v)) => { v.call_operator(Operator::LeftBitshift, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Normal(InnerVar::Tuple(_)) => unimplemented!(), Variable::Normal(InnerVar::Method(_)) => unimplemented!(), Variable::Normal(InnerVar::Function(_)) => unimplemented!(), Variable::Normal(InnerVar::Custom(c)) => { c.into_inner() .call_op(Operator::LeftBitshift, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Normal(InnerVar::Union(u)) => { u.call_operator(Operator::LeftBitshift, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Option(_) => unimplemented!(), } } fn q_lshift(this: IntVar, other: IntVar, runtime: &mut Runtime) -> QuickResult { let other_usize = shift_to_usize(other, runtime)?; Result::Ok((this << other_usize).into()) } pub fn quick_right_bitshift(this: Variable, other: Variable, runtime: &mut Runtime) -> QuickResult { match this { Variable::Normal(InnerVar::Null()) => unimplemented!(), Variable::Normal(InnerVar::Bool(b)) => { q_rshift(IntVar::from_bool(b), other.into(), runtime) } Variable::Normal(InnerVar::Bigint(i)) => q_rshift(i, other.into(), runtime), Variable::Normal(InnerVar::String(_)) => unimplemented!(), Variable::Normal(InnerVar::Decimal(_)) => unimplemented!(), Variable::Normal(InnerVar::Char(_)) => unimplemented!(), Variable::Normal(InnerVar::Type(_)) => unimplemented!(), Variable::Normal(InnerVar::Standard(v)) => { v.call_operator(Operator::RightBitshift, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Normal(InnerVar::Tuple(_)) => unimplemented!(), Variable::Normal(InnerVar::Method(_)) => unimplemented!(), Variable::Normal(InnerVar::Function(_)) => unimplemented!(), Variable::Normal(InnerVar::Custom(c)) => { c.into_inner() .call_op(Operator::RightBitshift, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Normal(InnerVar::Union(u)) => { u.call_operator(Operator::RightBitshift, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Option(_) => unimplemented!(), } } fn q_rshift(this: IntVar, other: IntVar, runtime: &mut Runtime) -> QuickResult { let other_usize = shift_to_usize(other, runtime)?; Result::Ok((this >> other_usize).into()) } fn shift_to_usize(big_var: IntVar, runtime: &mut Runtime) -> Result<usize, ()> { Result::Ok(match big_var.to_usize() { Option::Some(val) => val, Option::None => runtime.throw_quick_native(arithmetic_error(), shift_err(big_var))?, }) } fn shift_err(big_val: IntVar) -> StringVar { format!( "Attempted bitshift of {}, which is more than the max allowed shift {}", big_val, usize::MAX ) .into() } pub fn quick_bitwise_and(this: Variable, other: Variable, runtime: &mut Runtime) -> QuickResult { match this { Variable::Normal(InnerVar::Null()) => unimplemented!(), Variable::Normal(InnerVar::Bool(b)) => Result::Ok((b & other.into_bool(runtime)?).into()), Variable::Normal(InnerVar::Bigint(i)) => Result::Ok((i & IntVar::from(other)).into()), Variable::Normal(InnerVar::String(_)) => unimplemented!(), Variable::Normal(InnerVar::Decimal(_)) => unimplemented!(), Variable::Normal(InnerVar::Char(_)) => unimplemented!(), Variable::Normal(InnerVar::Type(_)) => unimplemented!(), Variable::Normal(InnerVar::Standard(v)) => { v.call_operator(Operator::BitwiseAnd, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Normal(InnerVar::Tuple(_)) => unimplemented!(), Variable::Normal(InnerVar::Method(_)) => unimplemented!(), Variable::Normal(InnerVar::Function(_)) => unimplemented!(), Variable::Normal(InnerVar::Custom(c)) => { c.into_inner() .call_op(Operator::BitwiseAnd, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Normal(InnerVar::Union(u)) => { u.call_operator(Operator::BitwiseAnd, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Option(_) => unimplemented!(), } } pub fn quick_bitwise_or(this: Variable, other: Variable, runtime: &mut Runtime) -> QuickResult { match this { Variable::Normal(InnerVar::Null()) => unimplemented!(), Variable::Normal(InnerVar::Bool(b)) => Result::Ok((b | other.into_bool(runtime)?).into()), Variable::Normal(InnerVar::Bigint(i)) => Result::Ok((i | IntVar::from(other)).into()), Variable::Normal(InnerVar::String(_)) => unimplemented!(), Variable::Normal(InnerVar::Decimal(_)) => unimplemented!(), Variable::Normal(InnerVar::Char(_)) => unimplemented!(), Variable::Normal(InnerVar::Type(_)) => unimplemented!(), Variable::Normal(InnerVar::Standard(v)) => { v.call_operator(Operator::BitwiseOr, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Normal(InnerVar::Tuple(_)) => unimplemented!(), Variable::Normal(InnerVar::Method(_)) => unimplemented!(), Variable::Normal(InnerVar::Function(_)) => unimplemented!(), Variable::Normal(InnerVar::Custom(c)) => { c.into_inner() .call_op(Operator::BitwiseOr, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Normal(InnerVar::Union(u)) => { u.call_operator(Operator::BitwiseOr, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Option(_) => unimplemented!(), } } pub fn quick_bitwise_xor(this: Variable, other: Variable, runtime: &mut Runtime) -> QuickResult { match this { Variable::Normal(InnerVar::Null()) => unimplemented!(), Variable::Normal(InnerVar::Bool(b)) => Result::Ok((b ^ other.into_bool(runtime)?).into()), Variable::Normal(InnerVar::Bigint(i)) => Result::Ok((i ^ IntVar::from(other)).into()), Variable::Normal(InnerVar::String(_)) => unimplemented!(), Variable::Normal(InnerVar::Decimal(_)) => unimplemented!(), Variable::Normal(InnerVar::Char(_)) => unimplemented!(), Variable::Normal(InnerVar::Type(_)) => unimplemented!(), Variable::Normal(InnerVar::Standard(v)) => { v.call_operator(Operator::BitwiseXor, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Normal(InnerVar::Tuple(_)) => unimplemented!(), Variable::Normal(InnerVar::Method(_)) => unimplemented!(), Variable::Normal(InnerVar::Function(_)) => unimplemented!(), Variable::Normal(InnerVar::Custom(c)) => { c.into_inner() .call_op(Operator::BitwiseXor, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Normal(InnerVar::Union(u)) => { u.call_operator(Operator::BitwiseXor, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Option(_) => unimplemented!(), } } pub fn quick_bitwise_not(this: Variable, runtime: &mut Runtime) -> QuickResult { match this { Variable::Normal(InnerVar::Null()) => unimplemented!(), Variable::Normal(InnerVar::Bool(b)) => QuickResult::Ok((!b).into()), Variable::Normal(InnerVar::Bigint(i)) => QuickResult::Ok((!i).into()), Variable::Normal(InnerVar::String(_)) => unimplemented!(), Variable::Normal(InnerVar::Decimal(_)) => unimplemented!(), Variable::Normal(InnerVar::Char(_)) => unimplemented!(), Variable::Normal(InnerVar::Type(_)) => unimplemented!(), Variable::Normal(InnerVar::Standard(v)) => { v.call_operator(Operator::BitwiseNot, Vec::new(), runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Normal(InnerVar::Tuple(_)) => unimplemented!(), Variable::Normal(InnerVar::Method(_)) => unimplemented!(), Variable::Normal(InnerVar::Function(_)) => unimplemented!(), Variable::Normal(InnerVar::Custom(c)) => { c.into_inner() .call_op(Operator::BitwiseNot, Vec::new(), runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Normal(InnerVar::Union(u)) => { u.call_operator(Operator::BitwiseNot, Vec::new(), runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Option(_) => unimplemented!(), } } pub fn quick_equals(this: Variable, other: Variable, runtime: &mut Runtime) -> QuickResult { match this { Variable::Normal(InnerVar::Standard(v)) => { v.call_operator(Operator::Equals, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Normal(InnerVar::Custom(c)) => { c.into_inner() .call_op(Operator::Equals, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Normal(InnerVar::Union(u)) => { u.call_operator(Operator::Equals, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Option(a) => { if let Variable::Option(b) = other { match (a.into(), b.into()) { (Option::Some(this), Option::Some(other)) => quick_equals(this, other, runtime), (Option::None, Option::None) => QuickResult::Ok(true.into()), _ => QuickResult::Ok(false.into()), } } else { QuickResult::Ok(false.into()) } } Variable::Normal(InnerVar::Tuple(t)) => { if let Variable::Normal(InnerVar::Tuple(t2)) = other { if t.len() != t2.len() { return QuickResult::Ok(false.into()); } for (x, y) in t.iter().zip(&t2) { if !x.clone().equals(y.clone(), runtime)? { return QuickResult::Ok(false.into()); } } QuickResult::Ok(true.into()) } else { QuickResult::Ok(false.into()) } } _ => QuickResult::Ok((this == other).into()), } } pub fn quick_less_than(this: Variable, other: Variable, runtime: &mut Runtime) -> QuickResult { match this { Variable::Normal(InnerVar::Null()) => unimplemented!(), Variable::Normal(InnerVar::Bool(b)) => { Result::Ok((IntVar::from(if b { 1 } else { 0 }) < IntVar::from(other)).into()) } Variable::Normal(InnerVar::Bigint(i)) => Result::Ok((i < IntVar::from(other)).into()), Variable::Normal(InnerVar::String(_)) => unimplemented!(), Variable::Normal(InnerVar::Decimal(d1)) => { if let Variable::Normal(InnerVar::Decimal(d2)) = other { QuickResult::Ok((d1 < d2).into()) } else { unimplemented!() } } Variable::Normal(InnerVar::Char(_)) => unimplemented!(), Variable::Normal(InnerVar::Type(_)) => unimplemented!(), Variable::Normal(InnerVar::Standard(v)) => { v.call_operator(Operator::LessThan, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Normal(InnerVar::Tuple(_)) => unimplemented!(), Variable::Normal(InnerVar::Method(_)) => unimplemented!(), Variable::Normal(InnerVar::Function(_)) => unimplemented!(), Variable::Normal(InnerVar::Custom(c)) => { c.into_inner() .call_op(Operator::LessThan, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Normal(InnerVar::Union(u)) => { u.call_operator(Operator::LessThan, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Option(_) => unimplemented!(), } } pub fn quick_greater_than(this: Variable, other: Variable, runtime: &mut Runtime) -> QuickResult { match this { Variable::Normal(InnerVar::Null()) => unimplemented!(), Variable::Normal(InnerVar::Bool(b)) => { Result::Ok((IntVar::from(if b { 1 } else { 0 }) > IntVar::from(other)).into()) } Variable::Normal(InnerVar::Bigint(i)) => Result::Ok((i > IntVar::from(other)).into()), Variable::Normal(InnerVar::String(_)) => unimplemented!(), Variable::Normal(InnerVar::Decimal(d1)) => { if let Variable::Normal(InnerVar::Decimal(d2)) = other { QuickResult::Ok((d1 > d2).into()) } else { unimplemented!() } } Variable::Normal(InnerVar::Char(_)) => unimplemented!(), Variable::Normal(InnerVar::Type(_)) => unimplemented!(), Variable::Normal(InnerVar::Standard(v)) => { v.call_operator(Operator::GreaterThan, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Normal(InnerVar::Tuple(_)) => unimplemented!(), Variable::Normal(InnerVar::Method(_)) => unimplemented!(), Variable::Normal(InnerVar::Function(_)) => unimplemented!(), Variable::Normal(InnerVar::Custom(c)) => { c.into_inner() .call_op(Operator::GreaterThan, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Normal(InnerVar::Union(u)) => { u.call_operator(Operator::GreaterThan, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Option(_) => unimplemented!(), } } pub fn quick_less_equal(this: Variable, other: Variable, runtime: &mut Runtime) -> QuickResult { match this { Variable::Normal(InnerVar::Null()) => unimplemented!(), Variable::Normal(InnerVar::Bool(b)) => { Result::Ok((IntVar::from(if b { 1 } else { 0 }) <= IntVar::from(other)).into()) } Variable::Normal(InnerVar::Bigint(i)) => Result::Ok((i <= IntVar::from(other)).into()), Variable::Normal(InnerVar::String(_)) => unimplemented!(), Variable::Normal(InnerVar::Decimal(d1)) => { if let Variable::Normal(InnerVar::Decimal(d2)) = other { QuickResult::Ok((d1 <= d2).into()) } else { unimplemented!() } } Variable::Normal(InnerVar::Char(_)) => unimplemented!(), Variable::Normal(InnerVar::Type(_)) => unimplemented!(), Variable::Normal(InnerVar::Standard(v)) => { v.call_operator(Operator::LessEqual, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Normal(InnerVar::Tuple(_)) => unimplemented!(), Variable::Normal(InnerVar::Method(_)) => unimplemented!(), Variable::Normal(InnerVar::Function(_)) => unimplemented!(), Variable::Normal(InnerVar::Custom(c)) => { c.into_inner() .call_op(Operator::LessEqual, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Normal(InnerVar::Union(u)) => { u.call_operator(Operator::LessEqual, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Option(_) => unimplemented!(), } } pub fn quick_greater_equal(this: Variable, other: Variable, runtime: &mut Runtime) -> QuickResult { match this { Variable::Normal(InnerVar::Null()) => unimplemented!(), Variable::Normal(InnerVar::Bool(b)) => { Result::Ok((IntVar::from(if b { 1 } else { 0 }) >= IntVar::from(other)).into()) } Variable::Normal(InnerVar::Bigint(i)) => Result::Ok((i >= IntVar::from(other)).into()), Variable::Normal(InnerVar::String(_)) => unimplemented!(), Variable::Normal(InnerVar::Decimal(d1)) => { if let Variable::Normal(InnerVar::Decimal(d2)) = other { QuickResult::Ok((d1 >= d2).into()) } else { unimplemented!() } } Variable::Normal(InnerVar::Char(_)) => unimplemented!(), Variable::Normal(InnerVar::Type(_)) => unimplemented!(), Variable::Normal(InnerVar::Standard(v)) => { v.call_operator(Operator::GreaterEqual, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Normal(InnerVar::Tuple(_)) => unimplemented!(), Variable::Normal(InnerVar::Method(_)) => unimplemented!(), Variable::Normal(InnerVar::Function(_)) => unimplemented!(), Variable::Normal(InnerVar::Custom(c)) => { c.into_inner() .call_op(Operator::GreaterEqual, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Normal(InnerVar::Union(u)) => { u.call_operator(Operator::GreaterEqual, vec![other], runtime)?; QuickResult::Ok(runtime.pop_return()) } Variable::Option(_) => unimplemented!(), } }
pub trait CoordinateEncoder { fn decode(&self, index: usize) -> (u32, u32); fn encode(&self, x: i32, y: i32) -> Option<usize>; } pub struct LoopingEncoder { pub dimensions: (u32, u32), } impl CoordinateEncoder for LoopingEncoder { fn decode(&self, index: usize) -> (u32, u32) { ( index as u32 % self.dimensions.0, index as u32 / self.dimensions.0, ) } fn encode(&self, x: i32, y: i32) -> Option<usize> { // If location is negative loop back to end of corresponding coordinate space. let x = if x < 0 { (self.dimensions.0 as i32 + x) as u32 } else { x as u32 % self.dimensions.0 }; let y = if y < 0 { (self.dimensions.1 as i32 + y) as u32 } else { y as u32 }; // Perform a modulo on the length of the tiles vector to loop coordinate space. Some( (y * self.dimensions.0 + x) as usize % (self.dimensions.0 * self.dimensions.1) as usize, ) } } impl Clone for LoopingEncoder { fn clone(&self) -> LoopingEncoder { LoopingEncoder { dimensions: self.dimensions, } } } pub struct FlatEncoder { pub dimensions: (u32, u32), } impl CoordinateEncoder for FlatEncoder { fn decode(&self, index: usize) -> (u32, u32) { ( index as u32 % self.dimensions.0, index as u32 / self.dimensions.0, ) } fn encode(&self, x: i32, y: i32) -> Option<usize> { if x >= 0 && x < self.dimensions.0 as i32 && y >= 0 && y < self.dimensions.1 as i32 { Some((y as u32 * self.dimensions.0 + x as u32) as usize) } else { None } } } impl Clone for FlatEncoder { fn clone(&self) -> FlatEncoder { FlatEncoder { dimensions: self.dimensions, } } } #[derive(Copy)] pub struct ScreenSpaceEncoder { pub dimensions: (u32, u32), } impl Clone for ScreenSpaceEncoder { fn clone(&self) -> ScreenSpaceEncoder { ScreenSpaceEncoder { dimensions: self.dimensions, } } } impl ScreenSpaceEncoder { pub fn updateDimensions(&mut self, width: u32, height: u32) { self.dimensions = (width, height); } pub fn decode(&self, x: f32, y: f32) -> (f32, f32) { //from normal space return ( map(x, (-1.0, 1.0), (0.0, self.dimensions.0 as f32)), map(y, (-1.0, 1.0), (0.0, self.dimensions.1 as f32)), ); } pub fn encode(&self, x: f32, y: f32) -> (f32, f32) { //from screenspace return ( map(x, (0.0, self.dimensions.0 as f32), (-1.0, 1.0)), map(y, (0.0, self.dimensions.1 as f32), (-1.0, 1.0)), ); } } fn map(index: f32, a: (f32, f32), b: (f32, f32)) -> f32 { return (index - a.0) / (a.1 - a.0) * (b.1 - b.0) + b.0; }
mod api; mod connection; mod utils;
extern crate clap; extern crate ctrlc; extern crate fatfs; extern crate tempfile; extern crate wait_timeout; use std::io::Write; use std::path::PathBuf; use std::process::{Child, Command}; use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::Arc; use std::time::Duration; use wait_timeout::ChildExt; fn main() { let matches = clap::App::new("uefi-run") .version(env!("CARGO_PKG_VERSION")) .author("Richard Wiedenhöft <richard@wiedenhoeft.xyz>") .about("Runs UEFI executables in qemu.") .setting(clap::AppSettings::TrailingVarArg) .setting(clap::AppSettings::DontDelimitTrailingValues) .arg( clap::Arg::with_name("efi_exe") .value_name("FILE") .required(true) .help("EFI executable"), ) .arg( clap::Arg::with_name("bios_path") .value_name("bios_path") .default_value("OVMF.fd") .help("BIOS image") .short("b") .long("bios"), ) .arg( clap::Arg::with_name("qemu_path") .value_name("qemu_path") .default_value("qemu-system-x86_64") .help("Path to qemu executable") .short("q") .long("qemu"), ) .arg( clap::Arg::with_name("size") .value_name("size") .default_value("10") .help("Size of the image in MiB") .short("s") .long("size"), ) .arg( clap::Arg::with_name("qemu_args") .value_name("qemu_args") .required(false) .help("Additional arguments for qemu") .multiple(true), ) .get_matches(); // Parse options let efi_exe = matches.value_of("efi_exe").unwrap(); let bios_path = matches.value_of("bios_path").unwrap(); let qemu_path = matches.value_of("qemu_path").unwrap(); let size: u64 = matches .value_of("size") .map(|v| v.parse().expect("Failed to parse --size argument")) .unwrap(); let user_qemu_args = matches.values_of("qemu_args").unwrap_or_default(); // Install termination signal handler. This ensures that the destructor of // `temp_dir` which is constructed in the next step is really called and // the files are cleaned up properly. let terminating = Arc::new(AtomicBool::new(false)); { let term = terminating.clone(); ctrlc::set_handler(move || { println!("uefi-run terminating..."); // Tell the main thread to stop waiting. term.store(true, Ordering::SeqCst); }) .expect("Error setting termination handler"); } // Create temporary dir for the image file. let temp_dir = tempfile::tempdir().expect("Unable to create temporary directory"); let temp_dir_path = PathBuf::from(temp_dir.path()); // Path to the image file let image_file_path = { let mut path_buf = temp_dir_path; path_buf.push("image.fat"); path_buf }; { // Create image file let image_file = std::fs::OpenOptions::new() .read(true) .write(true) .create_new(true) .open(&image_file_path) .expect("Image file creation failed"); // Truncate image to `size` MiB image_file .set_len(size * 0x10_0000) .expect("Truncating image file failed"); // Format file as FAT fatfs::format_volume(&image_file, fatfs::FormatVolumeOptions::new()) .expect("Formatting image file failed"); // Open the FAT fs. let fs = fatfs::FileSystem::new(&image_file, fatfs::FsOptions::new()) .expect("Failed to open filesystem"); // Create run.efi let efi_exe_contents = std::fs::read(efi_exe).unwrap(); let mut run_efi = fs.root_dir().create_file("run.efi").unwrap(); run_efi.truncate().unwrap(); run_efi.write_all(&efi_exe_contents).unwrap(); // Create startup.nsh let mut startup_nsh = fs.root_dir().create_file("startup.nsh").unwrap(); startup_nsh.truncate().unwrap(); startup_nsh .write_all(include_bytes!("startup.nsh")) .unwrap(); } let mut qemu_args = vec![ "-drive".into(), format!( "file={},index=0,media=disk,format=raw", image_file_path.display() ), "-bios".into(), bios_path.into(), "-net".into(), "none".into(), ]; qemu_args.extend(user_qemu_args.map(|x| x.into())); // Run qemu. let mut child = Command::new(qemu_path) .args(qemu_args) .spawn() .expect("Failed to start qemu"); // Wait for qemu to exit or signal. let mut qemu_exit_code; loop { qemu_exit_code = wait_qemu(&mut child, Duration::from_millis(500)); if qemu_exit_code.is_some() || terminating.load(Ordering::SeqCst) { break; } } // The above loop may have been broken by a signal if qemu_exit_code.is_none() { // In this case we wait for qemu to exit for one second qemu_exit_code = wait_qemu(&mut child, Duration::from_secs(1)); } // Qemu may still be running if qemu_exit_code.is_none() { // In this case we need to kill it child .kill() .or_else(|e| match e.kind() { // Not running anymore std::io::ErrorKind::InvalidInput => Ok(()), _ => Err(e), }) .expect("Unable to kill qemu process"); qemu_exit_code = wait_qemu(&mut child, Duration::from_secs(1)); } let exit_code = qemu_exit_code.expect("qemu should have exited by now but did not"); std::process::exit(exit_code); } /// Wait for the process to exit for `duration`. /// /// Returns `true` if the process exited and false if the timeout expired. fn wait_qemu(child: &mut Child, duration: Duration) -> Option<i32> { let wait_result = child .wait_timeout(duration) .expect("Failed to wait on child process"); match wait_result { None => { // Child still alive. None } Some(exit_status) => Some(exit_status.code().unwrap_or(0)), } }
// Copyright (c) Facebook, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. // // A test that visits the Place::Projection case of Visitor::visit_place // and the ProjectionElem::Index case of Visitor::visit_projection_elem. pub fn foo(arr: &mut [i32], i: usize) { arr[i] = 123; //~ possible array index out of bounds // If we get here i is known to be within bounds, so no warning below. bar(arr, i); } fn bar(arr: &mut [i32], i: usize) { arr[i] = 123; debug_assert!(arr[i] == 123); }
use std::time::Duration; use js_sys::{Promise}; use web_sys::{window}; use wasm_bindgen_futures::JsFuture; use wasm_bindgen::prelude::*; /// This is the wasm version of the sleep function. /// For now it is the only way to sleep. /// The precision of this function is 1ms. pub async fn sleep(duration: Duration) { let promise = Promise::new(&mut |yes, _| { window().unwrap().set_timeout_with_callback_and_timeout_and_arguments_0(&yes, duration.as_millis() as i32).unwrap(); }); let js_fut = JsFuture::from(promise); js_fut.await.unwrap(); } #[wasm_bindgen] extern "C" { #[wasm_bindgen(js_namespace = console)] pub fn log(s: &str); #[wasm_bindgen(js_namespace = console)] pub fn error(s: &str); } #[macro_export] /// A println-like macro. /// **Warning**: This is very slow. macro_rules! log { ($($t:tt)*) => ($crate::system::log(&format_args!($($t)*).to_string())) } #[macro_export] /// A eprintln-like macro. /// **Warning**: This is **extremely** slow. macro_rules! elog { ($($t:tt)*) => ($crate::system::error(&format_args!($($t)*).to_string())) }
extern crate pulldown_cmark; use pulldown_cmark::{html, Parser, Event, Tag}; use std::borrow::Cow::{Owned}; fn main() { let markdown_str = r#"# Hello 人間は愚かな生物。 [俺のブログ](https://blog.himanoa.net) "#; let parser = Parser::new(markdown_str).map(|event| match event { Event::Start(Tag::Link(url, title)) => { let replaced_url = url.replace("https", "http"); Event::Start(Tag::Link(Owned(replaced_url), title)) }, _ => event }); let mut html_buf = String::new(); html::push_html(&mut html_buf, parser); println!("{}", html_buf); }
/// An enum to represent all characters in the AlphabeticPresentationForms block. #[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)] pub enum AlphabeticPresentationForms { /// \u{fb00}: 'ff' LatinSmallLigatureFf, /// \u{fb01}: 'fi' LatinSmallLigatureFi, /// \u{fb02}: 'fl' LatinSmallLigatureFl, /// \u{fb03}: 'ffi' LatinSmallLigatureFfi, /// \u{fb04}: 'ffl' LatinSmallLigatureFfl, /// \u{fb05}: 'ſt' LatinSmallLigatureLongST, /// \u{fb06}: 'st' LatinSmallLigatureSt, /// \u{fb13}: 'ﬓ' ArmenianSmallLigatureMenNow, /// \u{fb14}: 'ﬔ' ArmenianSmallLigatureMenEch, /// \u{fb15}: 'ﬕ' ArmenianSmallLigatureMenIni, /// \u{fb16}: 'ﬖ' ArmenianSmallLigatureVewNow, /// \u{fb17}: 'ﬗ' ArmenianSmallLigatureMenXeh, /// \u{fb1d}: 'יִ' HebrewLetterYodWithHiriq, /// \u{fb1e}: 'ﬞ' HebrewPointJudeoDashSpanishVarika, /// \u{fb1f}: 'ײַ' HebrewLigatureYiddishYodYodPatah, /// \u{fb20}: 'ﬠ' HebrewLetterAlternativeAyin, /// \u{fb21}: 'ﬡ' HebrewLetterWideAlef, /// \u{fb22}: 'ﬢ' HebrewLetterWideDalet, /// \u{fb23}: 'ﬣ' HebrewLetterWideHe, /// \u{fb24}: 'ﬤ' HebrewLetterWideKaf, /// \u{fb25}: 'ﬥ' HebrewLetterWideLamed, /// \u{fb26}: 'ﬦ' HebrewLetterWideFinalMem, /// \u{fb27}: 'ﬧ' HebrewLetterWideResh, /// \u{fb28}: 'ﬨ' HebrewLetterWideTav, /// \u{fb29}: '﬩' HebrewLetterAlternativePlusSign, /// \u{fb2a}: 'שׁ' HebrewLetterShinWithShinDot, /// \u{fb2b}: 'שׂ' HebrewLetterShinWithSinDot, /// \u{fb2c}: 'שּׁ' HebrewLetterShinWithDageshAndShinDot, /// \u{fb2d}: 'שּׂ' HebrewLetterShinWithDageshAndSinDot, /// \u{fb2e}: 'אַ' HebrewLetterAlefWithPatah, /// \u{fb2f}: 'אָ' HebrewLetterAlefWithQamats, /// \u{fb30}: 'אּ' HebrewLetterAlefWithMapiq, /// \u{fb31}: 'בּ' HebrewLetterBetWithDagesh, /// \u{fb32}: 'גּ' HebrewLetterGimelWithDagesh, /// \u{fb33}: 'דּ' HebrewLetterDaletWithDagesh, /// \u{fb34}: 'הּ' HebrewLetterHeWithMapiq, /// \u{fb35}: 'וּ' HebrewLetterVavWithDagesh, /// \u{fb36}: 'זּ' HebrewLetterZayinWithDagesh, /// \u{fb38}: 'טּ' HebrewLetterTetWithDagesh, /// \u{fb39}: 'יּ' HebrewLetterYodWithDagesh, /// \u{fb3a}: 'ךּ' HebrewLetterFinalKafWithDagesh, /// \u{fb3b}: 'כּ' HebrewLetterKafWithDagesh, /// \u{fb3c}: 'לּ' HebrewLetterLamedWithDagesh, /// \u{fb3e}: 'מּ' HebrewLetterMemWithDagesh, /// \u{fb40}: 'נּ' HebrewLetterNunWithDagesh, /// \u{fb41}: 'סּ' HebrewLetterSamekhWithDagesh, /// \u{fb43}: 'ףּ' HebrewLetterFinalPeWithDagesh, /// \u{fb44}: 'פּ' HebrewLetterPeWithDagesh, /// \u{fb46}: 'צּ' HebrewLetterTsadiWithDagesh, /// \u{fb47}: 'קּ' HebrewLetterQofWithDagesh, /// \u{fb48}: 'רּ' HebrewLetterReshWithDagesh, /// \u{fb49}: 'שּ' HebrewLetterShinWithDagesh, /// \u{fb4a}: 'תּ' HebrewLetterTavWithDagesh, /// \u{fb4b}: 'וֹ' HebrewLetterVavWithHolam, /// \u{fb4c}: 'בֿ' HebrewLetterBetWithRafe, /// \u{fb4d}: 'כֿ' HebrewLetterKafWithRafe, /// \u{fb4e}: 'פֿ' HebrewLetterPeWithRafe, } impl Into<char> for AlphabeticPresentationForms { fn into(self) -> char { match self { AlphabeticPresentationForms::LatinSmallLigatureFf => 'ff', AlphabeticPresentationForms::LatinSmallLigatureFi => 'fi', AlphabeticPresentationForms::LatinSmallLigatureFl => 'fl', AlphabeticPresentationForms::LatinSmallLigatureFfi => 'ffi', AlphabeticPresentationForms::LatinSmallLigatureFfl => 'ffl', AlphabeticPresentationForms::LatinSmallLigatureLongST => 'ſt', AlphabeticPresentationForms::LatinSmallLigatureSt => 'st', AlphabeticPresentationForms::ArmenianSmallLigatureMenNow => 'ﬓ', AlphabeticPresentationForms::ArmenianSmallLigatureMenEch => 'ﬔ', AlphabeticPresentationForms::ArmenianSmallLigatureMenIni => 'ﬕ', AlphabeticPresentationForms::ArmenianSmallLigatureVewNow => 'ﬖ', AlphabeticPresentationForms::ArmenianSmallLigatureMenXeh => 'ﬗ', AlphabeticPresentationForms::HebrewLetterYodWithHiriq => 'יִ', AlphabeticPresentationForms::HebrewPointJudeoDashSpanishVarika => 'ﬞ', AlphabeticPresentationForms::HebrewLigatureYiddishYodYodPatah => 'ײַ', AlphabeticPresentationForms::HebrewLetterAlternativeAyin => 'ﬠ', AlphabeticPresentationForms::HebrewLetterWideAlef => 'ﬡ', AlphabeticPresentationForms::HebrewLetterWideDalet => 'ﬢ', AlphabeticPresentationForms::HebrewLetterWideHe => 'ﬣ', AlphabeticPresentationForms::HebrewLetterWideKaf => 'ﬤ', AlphabeticPresentationForms::HebrewLetterWideLamed => 'ﬥ', AlphabeticPresentationForms::HebrewLetterWideFinalMem => 'ﬦ', AlphabeticPresentationForms::HebrewLetterWideResh => 'ﬧ', AlphabeticPresentationForms::HebrewLetterWideTav => 'ﬨ', AlphabeticPresentationForms::HebrewLetterAlternativePlusSign => '﬩', AlphabeticPresentationForms::HebrewLetterShinWithShinDot => 'שׁ', AlphabeticPresentationForms::HebrewLetterShinWithSinDot => 'שׂ', AlphabeticPresentationForms::HebrewLetterShinWithDageshAndShinDot => 'שּׁ', AlphabeticPresentationForms::HebrewLetterShinWithDageshAndSinDot => 'שּׂ', AlphabeticPresentationForms::HebrewLetterAlefWithPatah => 'אַ', AlphabeticPresentationForms::HebrewLetterAlefWithQamats => 'אָ', AlphabeticPresentationForms::HebrewLetterAlefWithMapiq => 'אּ', AlphabeticPresentationForms::HebrewLetterBetWithDagesh => 'בּ', AlphabeticPresentationForms::HebrewLetterGimelWithDagesh => 'גּ', AlphabeticPresentationForms::HebrewLetterDaletWithDagesh => 'דּ', AlphabeticPresentationForms::HebrewLetterHeWithMapiq => 'הּ', AlphabeticPresentationForms::HebrewLetterVavWithDagesh => 'וּ', AlphabeticPresentationForms::HebrewLetterZayinWithDagesh => 'זּ', AlphabeticPresentationForms::HebrewLetterTetWithDagesh => 'טּ', AlphabeticPresentationForms::HebrewLetterYodWithDagesh => 'יּ', AlphabeticPresentationForms::HebrewLetterFinalKafWithDagesh => 'ךּ', AlphabeticPresentationForms::HebrewLetterKafWithDagesh => 'כּ', AlphabeticPresentationForms::HebrewLetterLamedWithDagesh => 'לּ', AlphabeticPresentationForms::HebrewLetterMemWithDagesh => 'מּ', AlphabeticPresentationForms::HebrewLetterNunWithDagesh => 'נּ', AlphabeticPresentationForms::HebrewLetterSamekhWithDagesh => 'סּ', AlphabeticPresentationForms::HebrewLetterFinalPeWithDagesh => 'ףּ', AlphabeticPresentationForms::HebrewLetterPeWithDagesh => 'פּ', AlphabeticPresentationForms::HebrewLetterTsadiWithDagesh => 'צּ', AlphabeticPresentationForms::HebrewLetterQofWithDagesh => 'קּ', AlphabeticPresentationForms::HebrewLetterReshWithDagesh => 'רּ', AlphabeticPresentationForms::HebrewLetterShinWithDagesh => 'שּ', AlphabeticPresentationForms::HebrewLetterTavWithDagesh => 'תּ', AlphabeticPresentationForms::HebrewLetterVavWithHolam => 'וֹ', AlphabeticPresentationForms::HebrewLetterBetWithRafe => 'בֿ', AlphabeticPresentationForms::HebrewLetterKafWithRafe => 'כֿ', AlphabeticPresentationForms::HebrewLetterPeWithRafe => 'פֿ', } } } impl std::convert::TryFrom<char> for AlphabeticPresentationForms { type Error = (); fn try_from(c: char) -> Result<Self, Self::Error> { match c { 'ff' => Ok(AlphabeticPresentationForms::LatinSmallLigatureFf), 'fi' => Ok(AlphabeticPresentationForms::LatinSmallLigatureFi), 'fl' => Ok(AlphabeticPresentationForms::LatinSmallLigatureFl), 'ffi' => Ok(AlphabeticPresentationForms::LatinSmallLigatureFfi), 'ffl' => Ok(AlphabeticPresentationForms::LatinSmallLigatureFfl), 'ſt' => Ok(AlphabeticPresentationForms::LatinSmallLigatureLongST), 'st' => Ok(AlphabeticPresentationForms::LatinSmallLigatureSt), 'ﬓ' => Ok(AlphabeticPresentationForms::ArmenianSmallLigatureMenNow), 'ﬔ' => Ok(AlphabeticPresentationForms::ArmenianSmallLigatureMenEch), 'ﬕ' => Ok(AlphabeticPresentationForms::ArmenianSmallLigatureMenIni), 'ﬖ' => Ok(AlphabeticPresentationForms::ArmenianSmallLigatureVewNow), 'ﬗ' => Ok(AlphabeticPresentationForms::ArmenianSmallLigatureMenXeh), 'יִ' => Ok(AlphabeticPresentationForms::HebrewLetterYodWithHiriq), 'ﬞ' => Ok(AlphabeticPresentationForms::HebrewPointJudeoDashSpanishVarika), 'ײַ' => Ok(AlphabeticPresentationForms::HebrewLigatureYiddishYodYodPatah), 'ﬠ' => Ok(AlphabeticPresentationForms::HebrewLetterAlternativeAyin), 'ﬡ' => Ok(AlphabeticPresentationForms::HebrewLetterWideAlef), 'ﬢ' => Ok(AlphabeticPresentationForms::HebrewLetterWideDalet), 'ﬣ' => Ok(AlphabeticPresentationForms::HebrewLetterWideHe), 'ﬤ' => Ok(AlphabeticPresentationForms::HebrewLetterWideKaf), 'ﬥ' => Ok(AlphabeticPresentationForms::HebrewLetterWideLamed), 'ﬦ' => Ok(AlphabeticPresentationForms::HebrewLetterWideFinalMem), 'ﬧ' => Ok(AlphabeticPresentationForms::HebrewLetterWideResh), 'ﬨ' => Ok(AlphabeticPresentationForms::HebrewLetterWideTav), '﬩' => Ok(AlphabeticPresentationForms::HebrewLetterAlternativePlusSign), 'שׁ' => Ok(AlphabeticPresentationForms::HebrewLetterShinWithShinDot), 'שׂ' => Ok(AlphabeticPresentationForms::HebrewLetterShinWithSinDot), 'שּׁ' => Ok(AlphabeticPresentationForms::HebrewLetterShinWithDageshAndShinDot), 'שּׂ' => Ok(AlphabeticPresentationForms::HebrewLetterShinWithDageshAndSinDot), 'אַ' => Ok(AlphabeticPresentationForms::HebrewLetterAlefWithPatah), 'אָ' => Ok(AlphabeticPresentationForms::HebrewLetterAlefWithQamats), 'אּ' => Ok(AlphabeticPresentationForms::HebrewLetterAlefWithMapiq), 'בּ' => Ok(AlphabeticPresentationForms::HebrewLetterBetWithDagesh), 'גּ' => Ok(AlphabeticPresentationForms::HebrewLetterGimelWithDagesh), 'דּ' => Ok(AlphabeticPresentationForms::HebrewLetterDaletWithDagesh), 'הּ' => Ok(AlphabeticPresentationForms::HebrewLetterHeWithMapiq), 'וּ' => Ok(AlphabeticPresentationForms::HebrewLetterVavWithDagesh), 'זּ' => Ok(AlphabeticPresentationForms::HebrewLetterZayinWithDagesh), 'טּ' => Ok(AlphabeticPresentationForms::HebrewLetterTetWithDagesh), 'יּ' => Ok(AlphabeticPresentationForms::HebrewLetterYodWithDagesh), 'ךּ' => Ok(AlphabeticPresentationForms::HebrewLetterFinalKafWithDagesh), 'כּ' => Ok(AlphabeticPresentationForms::HebrewLetterKafWithDagesh), 'לּ' => Ok(AlphabeticPresentationForms::HebrewLetterLamedWithDagesh), 'מּ' => Ok(AlphabeticPresentationForms::HebrewLetterMemWithDagesh), 'נּ' => Ok(AlphabeticPresentationForms::HebrewLetterNunWithDagesh), 'סּ' => Ok(AlphabeticPresentationForms::HebrewLetterSamekhWithDagesh), 'ףּ' => Ok(AlphabeticPresentationForms::HebrewLetterFinalPeWithDagesh), 'פּ' => Ok(AlphabeticPresentationForms::HebrewLetterPeWithDagesh), 'צּ' => Ok(AlphabeticPresentationForms::HebrewLetterTsadiWithDagesh), 'קּ' => Ok(AlphabeticPresentationForms::HebrewLetterQofWithDagesh), 'רּ' => Ok(AlphabeticPresentationForms::HebrewLetterReshWithDagesh), 'שּ' => Ok(AlphabeticPresentationForms::HebrewLetterShinWithDagesh), 'תּ' => Ok(AlphabeticPresentationForms::HebrewLetterTavWithDagesh), 'וֹ' => Ok(AlphabeticPresentationForms::HebrewLetterVavWithHolam), 'בֿ' => Ok(AlphabeticPresentationForms::HebrewLetterBetWithRafe), 'כֿ' => Ok(AlphabeticPresentationForms::HebrewLetterKafWithRafe), 'פֿ' => Ok(AlphabeticPresentationForms::HebrewLetterPeWithRafe), _ => Err(()), } } } impl Into<u32> for AlphabeticPresentationForms { fn into(self) -> u32 { let c: char = self.into(); let hex = c .escape_unicode() .to_string() .replace("\\u{", "") .replace("}", ""); u32::from_str_radix(&hex, 16).unwrap() } } impl std::convert::TryFrom<u32> for AlphabeticPresentationForms { type Error = (); fn try_from(u: u32) -> Result<Self, Self::Error> { if let Ok(c) = char::try_from(u) { Self::try_from(c) } else { Err(()) } } } impl Iterator for AlphabeticPresentationForms { type Item = Self; fn next(&mut self) -> Option<Self> { let index: u32 = (*self).into(); use std::convert::TryFrom; Self::try_from(index + 1).ok() } } impl AlphabeticPresentationForms { /// The character with the lowest index in this unicode block pub fn new() -> Self { AlphabeticPresentationForms::LatinSmallLigatureFf } /// The character's name, in sentence case pub fn name(&self) -> String { let s = std::format!("AlphabeticPresentationForms{:#?}", self); string_morph::to_sentence_case(&s) } }
use crate::error::{Error, ErrorType}; use crate::json::{ParseTokens, StackTokens, JSON}; use crate::scanner::Scanner; pub fn validate_begin_object( json_document: &mut JSON, scanner: &mut Scanner, ) -> Result<(), ()> { match &json_document.last_parsed_token { Some(last_parsed_token) => match last_parsed_token { ParseTokens::ValueSeparator => { match json_document.stack.last() { Some(token) => match token { StackTokens::BeginObject => { // Invalid use of object as object name. let last_parsed_index = scanner.current().index; let err = Error::new(ErrorType::E143, last_parsed_index, last_parsed_index + 1); json_document.errors.push(err); Err(()) } StackTokens::BeginArray => { json_document.object_has_valid_member = true; json_document.last_parsed_token = Some(ParseTokens::BeginObject); json_document.stack.push(StackTokens::BeginObject); json_document.object_member_names.push(Vec::new()); Ok(()) } _ => Ok(()), }, None => Ok(()), // unreachable } } ParseTokens::BeginArray => { json_document.object_has_valid_member = true; json_document.last_parsed_token = Some(ParseTokens::BeginObject); json_document.stack.push(StackTokens::BeginObject); json_document.object_member_names.push(Vec::new()); Ok(()) } ParseTokens::NameSeparator => { json_document.object_has_valid_member = true; json_document.last_parsed_token = Some(ParseTokens::BeginObject); json_document.stack.pop(); json_document.stack.push(StackTokens::BeginObject); json_document.object_member_names.push(Vec::new()); Ok(()) } _ => { // Illegal begin-object after JSON value. let last_parsed_index = scanner.current().index; let err = Error::new(ErrorType::E130, last_parsed_index, last_parsed_index + 1); json_document.errors.push(err); Err(()) } }, None => { json_document.object_has_valid_member = true; json_document.last_parsed_token = Some(ParseTokens::BeginObject); json_document.stack.push(StackTokens::BeginObject); json_document.object_member_names.push(Vec::new()); json_document.root_value_parsed = true; Ok(()) } } }
#![doc = "generated by AutoRust 0.1.0"] #![allow(unused_mut)] #![allow(unused_variables)] #![allow(unused_imports)] use super::{models, API_VERSION}; #[non_exhaustive] #[derive(Debug, thiserror :: Error)] #[allow(non_camel_case_types)] pub enum Error { #[error(transparent)] Service_GetProperties(#[from] service::get_properties::Error), #[error(transparent)] Service_SetProperties(#[from] service::set_properties::Error), #[error(transparent)] Service_GetStatistics(#[from] service::get_statistics::Error), #[error(transparent)] Service_ListContainersSegment(#[from] service::list_containers_segment::Error), #[error(transparent)] Service_GetUserDelegationKey(#[from] service::get_user_delegation_key::Error), #[error(transparent)] Service_GetAccountInfo(#[from] service::get_account_info::Error), #[error(transparent)] Service_SubmitBatch(#[from] service::submit_batch::Error), #[error(transparent)] Service_FilterBlobs(#[from] service::filter_blobs::Error), #[error(transparent)] Container_GetProperties(#[from] container::get_properties::Error), #[error(transparent)] Container_Create(#[from] container::create::Error), #[error(transparent)] Container_Delete(#[from] container::delete::Error), #[error(transparent)] Container_SetMetadata(#[from] container::set_metadata::Error), #[error(transparent)] Container_GetAccessPolicy(#[from] container::get_access_policy::Error), #[error(transparent)] Container_SetAccessPolicy(#[from] container::set_access_policy::Error), #[error(transparent)] Container_Restore(#[from] container::restore::Error), #[error(transparent)] Container_Rename(#[from] container::rename::Error), #[error(transparent)] Container_SubmitBatch(#[from] container::submit_batch::Error), #[error(transparent)] Container_AcquireLease(#[from] container::acquire_lease::Error), #[error(transparent)] Container_ReleaseLease(#[from] container::release_lease::Error), #[error(transparent)] Container_RenewLease(#[from] container::renew_lease::Error), #[error(transparent)] Container_BreakLease(#[from] container::break_lease::Error), #[error(transparent)] Container_ChangeLease(#[from] container::change_lease::Error), #[error(transparent)] Container_ListBlobFlatSegment(#[from] container::list_blob_flat_segment::Error), #[error(transparent)] Container_ListBlobHierarchySegment(#[from] container::list_blob_hierarchy_segment::Error), #[error(transparent)] Container_GetAccountInfo(#[from] container::get_account_info::Error), #[error(transparent)] Blob_Download(#[from] blob::download::Error), #[error(transparent)] Blob_Delete(#[from] blob::delete::Error), #[error(transparent)] Blob_GetProperties(#[from] blob::get_properties::Error), #[error(transparent)] PageBlob_Create(#[from] page_blob::create::Error), #[error(transparent)] AppendBlob_Create(#[from] append_blob::create::Error), #[error(transparent)] BlockBlob_Upload(#[from] block_blob::upload::Error), #[error(transparent)] BlockBlob_PutBlobFromUrl(#[from] block_blob::put_blob_from_url::Error), #[error(transparent)] Blob_Undelete(#[from] blob::undelete::Error), #[error(transparent)] Blob_SetExpiry(#[from] blob::set_expiry::Error), #[error(transparent)] Blob_SetHttpHeaders(#[from] blob::set_http_headers::Error), #[error(transparent)] Blob_SetImmutabilityPolicy(#[from] blob::set_immutability_policy::Error), #[error(transparent)] Blob_DeleteImmutabilityPolicy(#[from] blob::delete_immutability_policy::Error), #[error(transparent)] Blob_SetLegalHold(#[from] blob::set_legal_hold::Error), #[error(transparent)] Blob_SetMetadata(#[from] blob::set_metadata::Error), #[error(transparent)] Blob_AcquireLease(#[from] blob::acquire_lease::Error), #[error(transparent)] Blob_ReleaseLease(#[from] blob::release_lease::Error), #[error(transparent)] Blob_RenewLease(#[from] blob::renew_lease::Error), #[error(transparent)] Blob_ChangeLease(#[from] blob::change_lease::Error), #[error(transparent)] Blob_BreakLease(#[from] blob::break_lease::Error), #[error(transparent)] Blob_CreateSnapshot(#[from] blob::create_snapshot::Error), #[error(transparent)] Blob_StartCopyFromUrl(#[from] blob::start_copy_from_url::Error), #[error(transparent)] Blob_CopyFromUrl(#[from] blob::copy_from_url::Error), #[error(transparent)] Blob_AbortCopyFromUrl(#[from] blob::abort_copy_from_url::Error), #[error(transparent)] Blob_SetTier(#[from] blob::set_tier::Error), #[error(transparent)] Blob_GetAccountInfo(#[from] blob::get_account_info::Error), #[error(transparent)] BlockBlob_StageBlock(#[from] block_blob::stage_block::Error), #[error(transparent)] BlockBlob_StageBlockFromUrl(#[from] block_blob::stage_block_from_url::Error), #[error(transparent)] BlockBlob_GetBlockList(#[from] block_blob::get_block_list::Error), #[error(transparent)] BlockBlob_CommitBlockList(#[from] block_blob::commit_block_list::Error), #[error(transparent)] PageBlob_UploadPages(#[from] page_blob::upload_pages::Error), #[error(transparent)] PageBlob_ClearPages(#[from] page_blob::clear_pages::Error), #[error(transparent)] PageBlob_UploadPagesFromUrl(#[from] page_blob::upload_pages_from_url::Error), #[error(transparent)] PageBlob_GetPageRanges(#[from] page_blob::get_page_ranges::Error), #[error(transparent)] PageBlob_GetPageRangesDiff(#[from] page_blob::get_page_ranges_diff::Error), #[error(transparent)] PageBlob_Resize(#[from] page_blob::resize::Error), #[error(transparent)] PageBlob_UpdateSequenceNumber(#[from] page_blob::update_sequence_number::Error), #[error(transparent)] PageBlob_CopyIncremental(#[from] page_blob::copy_incremental::Error), #[error(transparent)] AppendBlob_AppendBlock(#[from] append_blob::append_block::Error), #[error(transparent)] AppendBlob_AppendBlockFromUrl(#[from] append_blob::append_block_from_url::Error), #[error(transparent)] AppendBlob_Seal(#[from] append_blob::seal::Error), #[error(transparent)] Blob_Query(#[from] blob::query::Error), #[error(transparent)] Blob_GetTags(#[from] blob::get_tags::Error), #[error(transparent)] Blob_SetTags(#[from] blob::set_tags::Error), } pub mod service { use super::{models, API_VERSION}; pub async fn get_properties( operation_config: &crate::OperationConfig, restype: &str, comp: &str, timeout: Option<i64>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, ) -> std::result::Result<models::StorageServiceProperties, get_properties::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/?restype=service&comp=properties", operation_config.base_path(),); let mut url = url::Url::parse(url_str).map_err(get_properties::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get_properties::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("restype", restype); url.query_pairs_mut().append_pair("comp", comp); if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get_properties::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(get_properties::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::StorageServiceProperties = serde_json::from_slice(rsp_body).map_err(|source| get_properties::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body).map_err(|source| get_properties::Error::DeserializeError(source, rsp_body.clone()))?; Err(get_properties::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod get_properties { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn set_properties( operation_config: &crate::OperationConfig, restype: &str, comp: &str, storage_service_properties: &models::StorageServiceProperties, timeout: Option<i64>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, ) -> std::result::Result<(), set_properties::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/?restype=service&comp=properties", operation_config.base_path(),); let mut url = url::Url::parse(url_str).map_err(set_properties::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(set_properties::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("restype", restype); url.query_pairs_mut().append_pair("comp", comp); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(storage_service_properties).map_err(set_properties::Error::SerializeError)?; if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(set_properties::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(set_properties::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::ACCEPTED => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body).map_err(|source| set_properties::Error::DeserializeError(source, rsp_body.clone()))?; Err(set_properties::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod set_properties { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get_statistics( operation_config: &crate::OperationConfig, restype: &str, comp: &str, timeout: Option<i64>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, ) -> std::result::Result<models::StorageServiceStats, get_statistics::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/?restype=service&comp=stats", operation_config.base_path(),); let mut url = url::Url::parse(url_str).map_err(get_statistics::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get_statistics::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("restype", restype); url.query_pairs_mut().append_pair("comp", comp); if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get_statistics::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(get_statistics::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::StorageServiceStats = serde_json::from_slice(rsp_body).map_err(|source| get_statistics::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body).map_err(|source| get_statistics::Error::DeserializeError(source, rsp_body.clone()))?; Err(get_statistics::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod get_statistics { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_containers_segment( operation_config: &crate::OperationConfig, comp: &str, prefix: Option<&str>, marker: Option<&str>, maxresults: Option<i64>, include: &[&str], timeout: Option<i64>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, ) -> std::result::Result<models::ListContainersSegmentResponse, list_containers_segment::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/?comp=list", operation_config.base_path(),); let mut url = url::Url::parse(url_str).map_err(list_containers_segment::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_containers_segment::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("comp", comp); if let Some(prefix) = prefix { url.query_pairs_mut().append_pair("prefix", prefix); } if let Some(marker) = marker { url.query_pairs_mut().append_pair("marker", marker); } if let Some(maxresults) = maxresults { url.query_pairs_mut().append_pair("maxresults", maxresults.to_string().as_str()); } if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(list_containers_segment::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_containers_segment::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::ListContainersSegmentResponse = serde_json::from_slice(rsp_body) .map_err(|source| list_containers_segment::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body) .map_err(|source| list_containers_segment::Error::DeserializeError(source, rsp_body.clone()))?; Err(list_containers_segment::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod list_containers_segment { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get_user_delegation_key( operation_config: &crate::OperationConfig, restype: &str, comp: &str, key_info: &models::KeyInfo, timeout: Option<i64>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, ) -> std::result::Result<models::UserDelegationKey, get_user_delegation_key::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/?restype=service&comp=userdelegationkey", operation_config.base_path(),); let mut url = url::Url::parse(url_str).map_err(get_user_delegation_key::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get_user_delegation_key::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("restype", restype); url.query_pairs_mut().append_pair("comp", comp); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(key_info).map_err(get_user_delegation_key::Error::SerializeError)?; if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(get_user_delegation_key::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(get_user_delegation_key::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::UserDelegationKey = serde_json::from_slice(rsp_body) .map_err(|source| get_user_delegation_key::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body) .map_err(|source| get_user_delegation_key::Error::DeserializeError(source, rsp_body.clone()))?; Err(get_user_delegation_key::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod get_user_delegation_key { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get_account_info( operation_config: &crate::OperationConfig, restype: &str, comp: &str, x_ms_version: &str, ) -> std::result::Result<(), get_account_info::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/?restype=account&comp=properties", operation_config.base_path(),); let mut url = url::Url::parse(url_str).map_err(get_account_info::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get_account_info::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("restype", restype); url.query_pairs_mut().append_pair("comp", comp); req_builder = req_builder.header("x-ms-version", x_ms_version); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get_account_info::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(get_account_info::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body) .map_err(|source| get_account_info::Error::DeserializeError(source, rsp_body.clone()))?; Err(get_account_info::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod get_account_info { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn submit_batch( operation_config: &crate::OperationConfig, comp: &str, body: &serde_json::Value, content_length: i64, content_type: &str, timeout: Option<i64>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, ) -> std::result::Result<serde_json::Value, submit_batch::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/?comp=batch", operation_config.base_path(),); let mut url = url::Url::parse(url_str).map_err(submit_batch::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(submit_batch::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("comp", comp); let req_body = azure_core::to_json(body).map_err(submit_batch::Error::SerializeError)?; req_builder = req_builder.header("Content-Length", content_length); req_builder = req_builder.header("Content-Type", content_type); if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(submit_batch::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(submit_batch::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: serde_json::Value = serde_json::from_slice(rsp_body).map_err(|source| submit_batch::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body).map_err(|source| submit_batch::Error::DeserializeError(source, rsp_body.clone()))?; Err(submit_batch::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod submit_batch { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn filter_blobs( operation_config: &crate::OperationConfig, comp: &str, timeout: Option<i64>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, where_: Option<&str>, marker: Option<&str>, maxresults: Option<i64>, ) -> std::result::Result<models::FilterBlobSegment, filter_blobs::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/?comp=blobs", operation_config.base_path(),); let mut url = url::Url::parse(url_str).map_err(filter_blobs::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(filter_blobs::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("comp", comp); if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } if let Some(where_) = where_ { url.query_pairs_mut().append_pair("where", where_); } if let Some(marker) = marker { url.query_pairs_mut().append_pair("marker", marker); } if let Some(maxresults) = maxresults { url.query_pairs_mut().append_pair("maxresults", maxresults.to_string().as_str()); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(filter_blobs::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(filter_blobs::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::FilterBlobSegment = serde_json::from_slice(rsp_body).map_err(|source| filter_blobs::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body).map_err(|source| filter_blobs::Error::DeserializeError(source, rsp_body.clone()))?; Err(filter_blobs::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod filter_blobs { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod container { use super::{models, API_VERSION}; pub async fn get_properties( operation_config: &crate::OperationConfig, container_name: &str, restype: &str, timeout: Option<i64>, x_ms_lease_id: Option<&str>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, ) -> std::result::Result<(), get_properties::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/{}?restype=container", operation_config.base_path(), container_name); let mut url = url::Url::parse(url_str).map_err(get_properties::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get_properties::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("restype", restype); if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } if let Some(x_ms_lease_id) = x_ms_lease_id { req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get_properties::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(get_properties::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body).map_err(|source| get_properties::Error::DeserializeError(source, rsp_body.clone()))?; Err(get_properties::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod get_properties { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn create( operation_config: &crate::OperationConfig, container_name: &str, restype: &str, timeout: Option<i64>, x_ms_meta: Option<&str>, x_ms_blob_public_access: Option<&str>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, x_ms_default_encryption_scope: Option<&str>, x_ms_deny_encryption_scope_override: Option<bool>, ) -> std::result::Result<(), create::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/{}?restype=container", operation_config.base_path(), container_name); let mut url = url::Url::parse(url_str).map_err(create::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(create::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("restype", restype); if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } if let Some(x_ms_meta) = x_ms_meta { req_builder = req_builder.header("x-ms-meta", x_ms_meta); } if let Some(x_ms_blob_public_access) = x_ms_blob_public_access { req_builder = req_builder.header("x-ms-blob-public-access", x_ms_blob_public_access); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } if let Some(x_ms_default_encryption_scope) = x_ms_default_encryption_scope { req_builder = req_builder.header("x-ms-default-encryption-scope", x_ms_default_encryption_scope); } if let Some(x_ms_deny_encryption_scope_override) = x_ms_deny_encryption_scope_override { req_builder = req_builder.header( "x-ms-deny-encryption-scope-override", x_ms_deny_encryption_scope_override.to_string(), ); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(create::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(create::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::CREATED => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?; Err(create::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod create { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn delete( operation_config: &crate::OperationConfig, container_name: &str, restype: &str, timeout: Option<i64>, x_ms_lease_id: Option<&str>, if_modified_since: Option<&str>, if_unmodified_since: Option<&str>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, ) -> std::result::Result<(), delete::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/{}?restype=container", operation_config.base_path(), container_name); let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(delete::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("restype", restype); if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } if let Some(x_ms_lease_id) = x_ms_lease_id { req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id); } if let Some(if_modified_since) = if_modified_since { req_builder = req_builder.header("If-Modified-Since", if_modified_since); } if let Some(if_unmodified_since) = if_unmodified_since { req_builder = req_builder.header("If-Unmodified-Since", if_unmodified_since); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::ACCEPTED => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?; Err(delete::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod delete { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn set_metadata( operation_config: &crate::OperationConfig, container_name: &str, restype: &str, comp: &str, timeout: Option<i64>, x_ms_lease_id: Option<&str>, x_ms_meta: Option<&str>, if_modified_since: Option<&str>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, ) -> std::result::Result<(), set_metadata::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/{}?restype=container&comp=metadata", operation_config.base_path(), container_name ); let mut url = url::Url::parse(url_str).map_err(set_metadata::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(set_metadata::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("restype", restype); url.query_pairs_mut().append_pair("comp", comp); if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } if let Some(x_ms_lease_id) = x_ms_lease_id { req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id); } if let Some(x_ms_meta) = x_ms_meta { req_builder = req_builder.header("x-ms-meta", x_ms_meta); } if let Some(if_modified_since) = if_modified_since { req_builder = req_builder.header("If-Modified-Since", if_modified_since); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(set_metadata::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(set_metadata::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body).map_err(|source| set_metadata::Error::DeserializeError(source, rsp_body.clone()))?; Err(set_metadata::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod set_metadata { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get_access_policy( operation_config: &crate::OperationConfig, container_name: &str, restype: &str, comp: &str, timeout: Option<i64>, x_ms_lease_id: Option<&str>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, ) -> std::result::Result<models::SignedIdentifiers, get_access_policy::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/{}?restype=container&comp=acl", operation_config.base_path(), container_name); let mut url = url::Url::parse(url_str).map_err(get_access_policy::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get_access_policy::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("restype", restype); url.query_pairs_mut().append_pair("comp", comp); if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } if let Some(x_ms_lease_id) = x_ms_lease_id { req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get_access_policy::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(get_access_policy::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::SignedIdentifiers = serde_json::from_slice(rsp_body) .map_err(|source| get_access_policy::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body) .map_err(|source| get_access_policy::Error::DeserializeError(source, rsp_body.clone()))?; Err(get_access_policy::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod get_access_policy { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn set_access_policy( operation_config: &crate::OperationConfig, container_name: &str, restype: &str, comp: &str, container_acl: Option<&models::SignedIdentifiers>, timeout: Option<i64>, x_ms_lease_id: Option<&str>, x_ms_blob_public_access: Option<&str>, if_modified_since: Option<&str>, if_unmodified_since: Option<&str>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, ) -> std::result::Result<(), set_access_policy::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/{}?restype=container&comp=acl", operation_config.base_path(), container_name); let mut url = url::Url::parse(url_str).map_err(set_access_policy::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(set_access_policy::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("restype", restype); url.query_pairs_mut().append_pair("comp", comp); let req_body = if let Some(container_acl) = container_acl { req_builder = req_builder.header("content-type", "application/json"); azure_core::to_json(container_acl).map_err(set_access_policy::Error::SerializeError)? } else { bytes::Bytes::from_static(azure_core::EMPTY_BODY) }; if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } if let Some(x_ms_lease_id) = x_ms_lease_id { req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id); } if let Some(x_ms_blob_public_access) = x_ms_blob_public_access { req_builder = req_builder.header("x-ms-blob-public-access", x_ms_blob_public_access); } if let Some(if_modified_since) = if_modified_since { req_builder = req_builder.header("If-Modified-Since", if_modified_since); } if let Some(if_unmodified_since) = if_unmodified_since { req_builder = req_builder.header("If-Unmodified-Since", if_unmodified_since); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(set_access_policy::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(set_access_policy::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body) .map_err(|source| set_access_policy::Error::DeserializeError(source, rsp_body.clone()))?; Err(set_access_policy::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod set_access_policy { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn restore( operation_config: &crate::OperationConfig, container_name: &str, restype: &str, comp: &str, timeout: Option<i64>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, x_ms_deleted_container_name: Option<&str>, x_ms_deleted_container_version: Option<&str>, ) -> std::result::Result<(), restore::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/{}?restype=container&comp=undelete", operation_config.base_path(), container_name ); let mut url = url::Url::parse(url_str).map_err(restore::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(restore::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("restype", restype); url.query_pairs_mut().append_pair("comp", comp); if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } if let Some(x_ms_deleted_container_name) = x_ms_deleted_container_name { req_builder = req_builder.header("x-ms-deleted-container-name", x_ms_deleted_container_name); } if let Some(x_ms_deleted_container_version) = x_ms_deleted_container_version { req_builder = req_builder.header("x-ms-deleted-container-version", x_ms_deleted_container_version); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(restore::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(restore::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::CREATED => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body).map_err(|source| restore::Error::DeserializeError(source, rsp_body.clone()))?; Err(restore::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod restore { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn rename( operation_config: &crate::OperationConfig, container_name: &str, restype: &str, comp: &str, timeout: Option<i64>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, x_ms_source_container_name: &str, x_ms_source_lease_id: Option<&str>, ) -> std::result::Result<(), rename::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/{}?restype=container&comp=rename", operation_config.base_path(), container_name); let mut url = url::Url::parse(url_str).map_err(rename::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(rename::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("restype", restype); url.query_pairs_mut().append_pair("comp", comp); if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } req_builder = req_builder.header("x-ms-source-container-name", x_ms_source_container_name); if let Some(x_ms_source_lease_id) = x_ms_source_lease_id { req_builder = req_builder.header("x-ms-source-lease-id", x_ms_source_lease_id); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(rename::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(rename::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body).map_err(|source| rename::Error::DeserializeError(source, rsp_body.clone()))?; Err(rename::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod rename { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn submit_batch( operation_config: &crate::OperationConfig, container_name: &str, restype: &str, comp: &str, body: &serde_json::Value, content_length: i64, content_type: &str, timeout: Option<i64>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, ) -> std::result::Result<serde_json::Value, submit_batch::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/{}?restype=container&comp=batch", operation_config.base_path(), container_name); let mut url = url::Url::parse(url_str).map_err(submit_batch::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(submit_batch::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("restype", restype); url.query_pairs_mut().append_pair("comp", comp); let req_body = azure_core::to_json(body).map_err(submit_batch::Error::SerializeError)?; req_builder = req_builder.header("Content-Length", content_length); req_builder = req_builder.header("Content-Type", content_type); if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(submit_batch::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(submit_batch::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::ACCEPTED => { let rsp_body = rsp.body(); let rsp_value: serde_json::Value = serde_json::from_slice(rsp_body).map_err(|source| submit_batch::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body).map_err(|source| submit_batch::Error::DeserializeError(source, rsp_body.clone()))?; Err(submit_batch::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod submit_batch { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn acquire_lease( operation_config: &crate::OperationConfig, container_name: &str, comp: &str, restype: &str, x_ms_lease_action: &str, timeout: Option<i64>, x_ms_lease_duration: Option<i64>, x_ms_proposed_lease_id: Option<&str>, if_modified_since: Option<&str>, if_unmodified_since: Option<&str>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, ) -> std::result::Result<(), acquire_lease::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/{}?comp=lease&restype=container&acquire", operation_config.base_path(), container_name ); let mut url = url::Url::parse(url_str).map_err(acquire_lease::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(acquire_lease::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("comp", comp); url.query_pairs_mut().append_pair("restype", restype); req_builder = req_builder.header("x-ms-lease-action", x_ms_lease_action); if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } if let Some(x_ms_lease_duration) = x_ms_lease_duration { req_builder = req_builder.header("x-ms-lease-duration", x_ms_lease_duration); } if let Some(x_ms_proposed_lease_id) = x_ms_proposed_lease_id { req_builder = req_builder.header("x-ms-proposed-lease-id", x_ms_proposed_lease_id); } if let Some(if_modified_since) = if_modified_since { req_builder = req_builder.header("If-Modified-Since", if_modified_since); } if let Some(if_unmodified_since) = if_unmodified_since { req_builder = req_builder.header("If-Unmodified-Since", if_unmodified_since); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(acquire_lease::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(acquire_lease::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::CREATED => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body).map_err(|source| acquire_lease::Error::DeserializeError(source, rsp_body.clone()))?; Err(acquire_lease::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod acquire_lease { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn release_lease( operation_config: &crate::OperationConfig, container_name: &str, comp: &str, restype: &str, x_ms_lease_action: &str, timeout: Option<i64>, x_ms_lease_id: &str, if_modified_since: Option<&str>, if_unmodified_since: Option<&str>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, ) -> std::result::Result<(), release_lease::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/{}?comp=lease&restype=container&release", operation_config.base_path(), container_name ); let mut url = url::Url::parse(url_str).map_err(release_lease::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(release_lease::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("comp", comp); url.query_pairs_mut().append_pair("restype", restype); req_builder = req_builder.header("x-ms-lease-action", x_ms_lease_action); if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id); if let Some(if_modified_since) = if_modified_since { req_builder = req_builder.header("If-Modified-Since", if_modified_since); } if let Some(if_unmodified_since) = if_unmodified_since { req_builder = req_builder.header("If-Unmodified-Since", if_unmodified_since); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(release_lease::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(release_lease::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body).map_err(|source| release_lease::Error::DeserializeError(source, rsp_body.clone()))?; Err(release_lease::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod release_lease { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn renew_lease( operation_config: &crate::OperationConfig, container_name: &str, comp: &str, restype: &str, x_ms_lease_action: &str, timeout: Option<i64>, x_ms_lease_id: &str, if_modified_since: Option<&str>, if_unmodified_since: Option<&str>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, ) -> std::result::Result<(), renew_lease::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/{}?comp=lease&restype=container&renew", operation_config.base_path(), container_name ); let mut url = url::Url::parse(url_str).map_err(renew_lease::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(renew_lease::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("comp", comp); url.query_pairs_mut().append_pair("restype", restype); req_builder = req_builder.header("x-ms-lease-action", x_ms_lease_action); if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id); if let Some(if_modified_since) = if_modified_since { req_builder = req_builder.header("If-Modified-Since", if_modified_since); } if let Some(if_unmodified_since) = if_unmodified_since { req_builder = req_builder.header("If-Unmodified-Since", if_unmodified_since); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(renew_lease::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(renew_lease::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body).map_err(|source| renew_lease::Error::DeserializeError(source, rsp_body.clone()))?; Err(renew_lease::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod renew_lease { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn break_lease( operation_config: &crate::OperationConfig, container_name: &str, comp: &str, restype: &str, x_ms_lease_action: &str, timeout: Option<i64>, x_ms_lease_break_period: Option<i64>, if_modified_since: Option<&str>, if_unmodified_since: Option<&str>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, ) -> std::result::Result<(), break_lease::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/{}?comp=lease&restype=container&break", operation_config.base_path(), container_name ); let mut url = url::Url::parse(url_str).map_err(break_lease::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(break_lease::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("comp", comp); url.query_pairs_mut().append_pair("restype", restype); req_builder = req_builder.header("x-ms-lease-action", x_ms_lease_action); if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } if let Some(x_ms_lease_break_period) = x_ms_lease_break_period { req_builder = req_builder.header("x-ms-lease-break-period", x_ms_lease_break_period); } if let Some(if_modified_since) = if_modified_since { req_builder = req_builder.header("If-Modified-Since", if_modified_since); } if let Some(if_unmodified_since) = if_unmodified_since { req_builder = req_builder.header("If-Unmodified-Since", if_unmodified_since); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(break_lease::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(break_lease::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::ACCEPTED => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body).map_err(|source| break_lease::Error::DeserializeError(source, rsp_body.clone()))?; Err(break_lease::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod break_lease { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn change_lease( operation_config: &crate::OperationConfig, container_name: &str, comp: &str, restype: &str, x_ms_lease_action: &str, timeout: Option<i64>, x_ms_lease_id: &str, x_ms_proposed_lease_id: &str, if_modified_since: Option<&str>, if_unmodified_since: Option<&str>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, ) -> std::result::Result<(), change_lease::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/{}?comp=lease&restype=container&change", operation_config.base_path(), container_name ); let mut url = url::Url::parse(url_str).map_err(change_lease::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(change_lease::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("comp", comp); url.query_pairs_mut().append_pair("restype", restype); req_builder = req_builder.header("x-ms-lease-action", x_ms_lease_action); if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id); req_builder = req_builder.header("x-ms-proposed-lease-id", x_ms_proposed_lease_id); if let Some(if_modified_since) = if_modified_since { req_builder = req_builder.header("If-Modified-Since", if_modified_since); } if let Some(if_unmodified_since) = if_unmodified_since { req_builder = req_builder.header("If-Unmodified-Since", if_unmodified_since); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(change_lease::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(change_lease::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body).map_err(|source| change_lease::Error::DeserializeError(source, rsp_body.clone()))?; Err(change_lease::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod change_lease { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_blob_flat_segment( operation_config: &crate::OperationConfig, container_name: &str, restype: &str, comp: &str, prefix: Option<&str>, marker: Option<&str>, maxresults: Option<i64>, include: &[&str], timeout: Option<i64>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, ) -> std::result::Result<models::ListBlobsFlatSegmentResponse, list_blob_flat_segment::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/{}?restype=container&comp=list&flat", operation_config.base_path(), container_name ); let mut url = url::Url::parse(url_str).map_err(list_blob_flat_segment::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_blob_flat_segment::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("restype", restype); url.query_pairs_mut().append_pair("comp", comp); if let Some(prefix) = prefix { url.query_pairs_mut().append_pair("prefix", prefix); } if let Some(marker) = marker { url.query_pairs_mut().append_pair("marker", marker); } if let Some(maxresults) = maxresults { url.query_pairs_mut().append_pair("maxresults", maxresults.to_string().as_str()); } if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(list_blob_flat_segment::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_blob_flat_segment::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::ListBlobsFlatSegmentResponse = serde_json::from_slice(rsp_body) .map_err(|source| list_blob_flat_segment::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body) .map_err(|source| list_blob_flat_segment::Error::DeserializeError(source, rsp_body.clone()))?; Err(list_blob_flat_segment::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod list_blob_flat_segment { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_blob_hierarchy_segment( operation_config: &crate::OperationConfig, container_name: &str, restype: &str, comp: &str, prefix: Option<&str>, delimiter: &str, marker: Option<&str>, maxresults: Option<i64>, include: &[&str], timeout: Option<i64>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, ) -> std::result::Result<models::ListBlobsHierarchySegmentResponse, list_blob_hierarchy_segment::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/{}?restype=container&comp=list&hierarchy", operation_config.base_path(), container_name ); let mut url = url::Url::parse(url_str).map_err(list_blob_hierarchy_segment::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_blob_hierarchy_segment::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("restype", restype); url.query_pairs_mut().append_pair("comp", comp); if let Some(prefix) = prefix { url.query_pairs_mut().append_pair("prefix", prefix); } url.query_pairs_mut().append_pair("delimiter", delimiter); if let Some(marker) = marker { url.query_pairs_mut().append_pair("marker", marker); } if let Some(maxresults) = maxresults { url.query_pairs_mut().append_pair("maxresults", maxresults.to_string().as_str()); } if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(list_blob_hierarchy_segment::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_blob_hierarchy_segment::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::ListBlobsHierarchySegmentResponse = serde_json::from_slice(rsp_body) .map_err(|source| list_blob_hierarchy_segment::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body) .map_err(|source| list_blob_hierarchy_segment::Error::DeserializeError(source, rsp_body.clone()))?; Err(list_blob_hierarchy_segment::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod list_blob_hierarchy_segment { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get_account_info( operation_config: &crate::OperationConfig, container_name: &str, restype: &str, comp: &str, x_ms_version: &str, ) -> std::result::Result<(), get_account_info::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/{}?restype=account&comp=properties", operation_config.base_path(), container_name ); let mut url = url::Url::parse(url_str).map_err(get_account_info::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get_account_info::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("restype", restype); url.query_pairs_mut().append_pair("comp", comp); req_builder = req_builder.header("x-ms-version", x_ms_version); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get_account_info::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(get_account_info::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body) .map_err(|source| get_account_info::Error::DeserializeError(source, rsp_body.clone()))?; Err(get_account_info::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod get_account_info { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod blob { use super::{models, API_VERSION}; pub async fn download( operation_config: &crate::OperationConfig, container_name: &str, blob: &str, snapshot: Option<&str>, versionid: Option<&str>, timeout: Option<i64>, x_ms_range: Option<&str>, x_ms_lease_id: Option<&str>, x_ms_range_get_content_md5: Option<bool>, x_ms_range_get_content_crc64: Option<bool>, x_ms_encryption_key: Option<&str>, x_ms_encryption_key_sha256: Option<&str>, x_ms_encryption_algorithm: Option<&str>, if_modified_since: Option<&str>, if_unmodified_since: Option<&str>, if_match: Option<&str>, if_none_match: Option<&str>, x_ms_if_tags: Option<&str>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, ) -> std::result::Result<download::Response, download::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/{}/{}", operation_config.base_path(), container_name, blob); let mut url = url::Url::parse(url_str).map_err(download::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(download::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } if let Some(snapshot) = snapshot { url.query_pairs_mut().append_pair("snapshot", snapshot); } if let Some(versionid) = versionid { url.query_pairs_mut().append_pair("versionid", versionid); } if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } if let Some(x_ms_range) = x_ms_range { req_builder = req_builder.header("x-ms-range", x_ms_range); } if let Some(x_ms_lease_id) = x_ms_lease_id { req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id); } if let Some(x_ms_range_get_content_md5) = x_ms_range_get_content_md5 { req_builder = req_builder.header("x-ms-range-get-content-md5", x_ms_range_get_content_md5.to_string()); } if let Some(x_ms_range_get_content_crc64) = x_ms_range_get_content_crc64 { req_builder = req_builder.header("x-ms-range-get-content-crc64", x_ms_range_get_content_crc64.to_string()); } if let Some(x_ms_encryption_key) = x_ms_encryption_key { req_builder = req_builder.header("x-ms-encryption-key", x_ms_encryption_key); } if let Some(x_ms_encryption_key_sha256) = x_ms_encryption_key_sha256 { req_builder = req_builder.header("x-ms-encryption-key-sha256", x_ms_encryption_key_sha256); } if let Some(x_ms_encryption_algorithm) = x_ms_encryption_algorithm { req_builder = req_builder.header("x-ms-encryption-algorithm", x_ms_encryption_algorithm); } if let Some(if_modified_since) = if_modified_since { req_builder = req_builder.header("If-Modified-Since", if_modified_since); } if let Some(if_unmodified_since) = if_unmodified_since { req_builder = req_builder.header("If-Unmodified-Since", if_unmodified_since); } if let Some(if_match) = if_match { req_builder = req_builder.header("If-Match", if_match); } if let Some(if_none_match) = if_none_match { req_builder = req_builder.header("If-None-Match", if_none_match); } if let Some(x_ms_if_tags) = x_ms_if_tags { req_builder = req_builder.header("x-ms-if-tags", x_ms_if_tags); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(download::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(download::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: serde_json::Value = serde_json::from_slice(rsp_body).map_err(|source| download::Error::DeserializeError(source, rsp_body.clone()))?; Ok(download::Response::Ok200(rsp_value)) } http::StatusCode::PARTIAL_CONTENT => { let rsp_body = rsp.body(); let rsp_value: serde_json::Value = serde_json::from_slice(rsp_body).map_err(|source| download::Error::DeserializeError(source, rsp_body.clone()))?; Ok(download::Response::PartialContent206(rsp_value)) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body).map_err(|source| download::Error::DeserializeError(source, rsp_body.clone()))?; Err(download::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod download { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200(serde_json::Value), PartialContent206(serde_json::Value), } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn delete( operation_config: &crate::OperationConfig, container_name: &str, blob: &str, snapshot: Option<&str>, versionid: Option<&str>, timeout: Option<i64>, x_ms_lease_id: Option<&str>, x_ms_delete_snapshots: Option<&str>, if_modified_since: Option<&str>, if_unmodified_since: Option<&str>, if_match: Option<&str>, if_none_match: Option<&str>, x_ms_if_tags: Option<&str>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, deletetype: Option<&str>, ) -> std::result::Result<(), delete::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/{}/{}", operation_config.base_path(), container_name, blob); let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(delete::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } if let Some(snapshot) = snapshot { url.query_pairs_mut().append_pair("snapshot", snapshot); } if let Some(versionid) = versionid { url.query_pairs_mut().append_pair("versionid", versionid); } if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } if let Some(x_ms_lease_id) = x_ms_lease_id { req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id); } if let Some(x_ms_delete_snapshots) = x_ms_delete_snapshots { req_builder = req_builder.header("x-ms-delete-snapshots", x_ms_delete_snapshots); } if let Some(if_modified_since) = if_modified_since { req_builder = req_builder.header("If-Modified-Since", if_modified_since); } if let Some(if_unmodified_since) = if_unmodified_since { req_builder = req_builder.header("If-Unmodified-Since", if_unmodified_since); } if let Some(if_match) = if_match { req_builder = req_builder.header("If-Match", if_match); } if let Some(if_none_match) = if_none_match { req_builder = req_builder.header("If-None-Match", if_none_match); } if let Some(x_ms_if_tags) = x_ms_if_tags { req_builder = req_builder.header("x-ms-if-tags", x_ms_if_tags); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } if let Some(deletetype) = deletetype { url.query_pairs_mut().append_pair("deletetype", deletetype); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::ACCEPTED => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?; Err(delete::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod delete { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get_properties( operation_config: &crate::OperationConfig, container_name: &str, blob: &str, snapshot: Option<&str>, versionid: Option<&str>, timeout: Option<i64>, x_ms_lease_id: Option<&str>, x_ms_encryption_key: Option<&str>, x_ms_encryption_key_sha256: Option<&str>, x_ms_encryption_algorithm: Option<&str>, if_modified_since: Option<&str>, if_unmodified_since: Option<&str>, if_match: Option<&str>, if_none_match: Option<&str>, x_ms_if_tags: Option<&str>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, ) -> std::result::Result<(), get_properties::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/{}/{}", operation_config.base_path(), container_name, blob); let mut url = url::Url::parse(url_str).map_err(get_properties::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::HEAD); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get_properties::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } if let Some(snapshot) = snapshot { url.query_pairs_mut().append_pair("snapshot", snapshot); } if let Some(versionid) = versionid { url.query_pairs_mut().append_pair("versionid", versionid); } if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } if let Some(x_ms_lease_id) = x_ms_lease_id { req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id); } if let Some(x_ms_encryption_key) = x_ms_encryption_key { req_builder = req_builder.header("x-ms-encryption-key", x_ms_encryption_key); } if let Some(x_ms_encryption_key_sha256) = x_ms_encryption_key_sha256 { req_builder = req_builder.header("x-ms-encryption-key-sha256", x_ms_encryption_key_sha256); } if let Some(x_ms_encryption_algorithm) = x_ms_encryption_algorithm { req_builder = req_builder.header("x-ms-encryption-algorithm", x_ms_encryption_algorithm); } if let Some(if_modified_since) = if_modified_since { req_builder = req_builder.header("If-Modified-Since", if_modified_since); } if let Some(if_unmodified_since) = if_unmodified_since { req_builder = req_builder.header("If-Unmodified-Since", if_unmodified_since); } if let Some(if_match) = if_match { req_builder = req_builder.header("If-Match", if_match); } if let Some(if_none_match) = if_none_match { req_builder = req_builder.header("If-None-Match", if_none_match); } if let Some(x_ms_if_tags) = x_ms_if_tags { req_builder = req_builder.header("x-ms-if-tags", x_ms_if_tags); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get_properties::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(get_properties::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body).map_err(|source| get_properties::Error::DeserializeError(source, rsp_body.clone()))?; Err(get_properties::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod get_properties { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn undelete( operation_config: &crate::OperationConfig, container_name: &str, blob: &str, comp: &str, timeout: Option<i64>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, ) -> std::result::Result<(), undelete::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/{}/{}?comp=undelete", operation_config.base_path(), container_name, blob); let mut url = url::Url::parse(url_str).map_err(undelete::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(undelete::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("comp", comp); if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(undelete::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(undelete::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body).map_err(|source| undelete::Error::DeserializeError(source, rsp_body.clone()))?; Err(undelete::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod undelete { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn set_expiry( operation_config: &crate::OperationConfig, container_name: &str, blob: &str, comp: &str, timeout: Option<i64>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, x_ms_expiry_option: &str, x_ms_expiry_time: Option<&str>, ) -> std::result::Result<(), set_expiry::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/{}/{}?comp=expiry", operation_config.base_path(), container_name, blob); let mut url = url::Url::parse(url_str).map_err(set_expiry::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(set_expiry::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("comp", comp); if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } req_builder = req_builder.header("x-ms-expiry-option", x_ms_expiry_option); if let Some(x_ms_expiry_time) = x_ms_expiry_time { req_builder = req_builder.header("x-ms-expiry-time", x_ms_expiry_time); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(set_expiry::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(set_expiry::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body).map_err(|source| set_expiry::Error::DeserializeError(source, rsp_body.clone()))?; Err(set_expiry::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod set_expiry { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn set_http_headers( operation_config: &crate::OperationConfig, container_name: &str, blob: &str, comp: &str, timeout: Option<i64>, x_ms_blob_cache_control: Option<&str>, x_ms_blob_content_type: Option<&str>, x_ms_blob_content_md5: Option<&str>, x_ms_blob_content_encoding: Option<&str>, x_ms_blob_content_language: Option<&str>, x_ms_lease_id: Option<&str>, if_modified_since: Option<&str>, if_unmodified_since: Option<&str>, if_match: Option<&str>, if_none_match: Option<&str>, x_ms_if_tags: Option<&str>, x_ms_blob_content_disposition: Option<&str>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, ) -> std::result::Result<(), set_http_headers::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/{}/{}?comp=properties&SetHTTPHeaders", operation_config.base_path(), container_name, blob ); let mut url = url::Url::parse(url_str).map_err(set_http_headers::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(set_http_headers::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("comp", comp); if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } if let Some(x_ms_blob_cache_control) = x_ms_blob_cache_control { req_builder = req_builder.header("x-ms-blob-cache-control", x_ms_blob_cache_control); } if let Some(x_ms_blob_content_type) = x_ms_blob_content_type { req_builder = req_builder.header("x-ms-blob-content-type", x_ms_blob_content_type); } if let Some(x_ms_blob_content_md5) = x_ms_blob_content_md5 { req_builder = req_builder.header("x-ms-blob-content-md5", x_ms_blob_content_md5); } if let Some(x_ms_blob_content_encoding) = x_ms_blob_content_encoding { req_builder = req_builder.header("x-ms-blob-content-encoding", x_ms_blob_content_encoding); } if let Some(x_ms_blob_content_language) = x_ms_blob_content_language { req_builder = req_builder.header("x-ms-blob-content-language", x_ms_blob_content_language); } if let Some(x_ms_lease_id) = x_ms_lease_id { req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id); } if let Some(if_modified_since) = if_modified_since { req_builder = req_builder.header("If-Modified-Since", if_modified_since); } if let Some(if_unmodified_since) = if_unmodified_since { req_builder = req_builder.header("If-Unmodified-Since", if_unmodified_since); } if let Some(if_match) = if_match { req_builder = req_builder.header("If-Match", if_match); } if let Some(if_none_match) = if_none_match { req_builder = req_builder.header("If-None-Match", if_none_match); } if let Some(x_ms_if_tags) = x_ms_if_tags { req_builder = req_builder.header("x-ms-if-tags", x_ms_if_tags); } if let Some(x_ms_blob_content_disposition) = x_ms_blob_content_disposition { req_builder = req_builder.header("x-ms-blob-content-disposition", x_ms_blob_content_disposition); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(set_http_headers::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(set_http_headers::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body) .map_err(|source| set_http_headers::Error::DeserializeError(source, rsp_body.clone()))?; Err(set_http_headers::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod set_http_headers { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn set_immutability_policy( operation_config: &crate::OperationConfig, container_name: &str, blob: &str, comp: &str, timeout: Option<i64>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, if_unmodified_since: Option<&str>, x_ms_immutability_policy_until_date: Option<&str>, x_ms_immutability_policy_mode: Option<&str>, ) -> std::result::Result<(), set_immutability_policy::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/{}/{}?comp=immutabilityPolicies", operation_config.base_path(), container_name, blob ); let mut url = url::Url::parse(url_str).map_err(set_immutability_policy::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(set_immutability_policy::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("comp", comp); if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } if let Some(if_unmodified_since) = if_unmodified_since { req_builder = req_builder.header("If-Unmodified-Since", if_unmodified_since); } if let Some(x_ms_immutability_policy_until_date) = x_ms_immutability_policy_until_date { req_builder = req_builder.header("x-ms-immutability-policy-until-date", x_ms_immutability_policy_until_date); } if let Some(x_ms_immutability_policy_mode) = x_ms_immutability_policy_mode { req_builder = req_builder.header("x-ms-immutability-policy-mode", x_ms_immutability_policy_mode); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(set_immutability_policy::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(set_immutability_policy::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body) .map_err(|source| set_immutability_policy::Error::DeserializeError(source, rsp_body.clone()))?; Err(set_immutability_policy::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod set_immutability_policy { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn delete_immutability_policy( operation_config: &crate::OperationConfig, container_name: &str, blob: &str, comp: &str, timeout: Option<i64>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, ) -> std::result::Result<(), delete_immutability_policy::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/{}/{}?comp=immutabilityPolicies", operation_config.base_path(), container_name, blob ); let mut url = url::Url::parse(url_str).map_err(delete_immutability_policy::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(delete_immutability_policy::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("comp", comp); if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(delete_immutability_policy::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(delete_immutability_policy::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body) .map_err(|source| delete_immutability_policy::Error::DeserializeError(source, rsp_body.clone()))?; Err(delete_immutability_policy::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod delete_immutability_policy { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn set_legal_hold( operation_config: &crate::OperationConfig, container_name: &str, blob: &str, comp: &str, timeout: Option<i64>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, x_ms_legal_hold: bool, ) -> std::result::Result<(), set_legal_hold::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/{}/{}?comp=legalhold", operation_config.base_path(), container_name, blob); let mut url = url::Url::parse(url_str).map_err(set_legal_hold::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(set_legal_hold::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("comp", comp); if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } req_builder = req_builder.header("x-ms-legal-hold", x_ms_legal_hold.to_string()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(set_legal_hold::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(set_legal_hold::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body).map_err(|source| set_legal_hold::Error::DeserializeError(source, rsp_body.clone()))?; Err(set_legal_hold::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod set_legal_hold { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn set_metadata( operation_config: &crate::OperationConfig, container_name: &str, blob: &str, comp: &str, timeout: Option<i64>, x_ms_meta: Option<&str>, x_ms_lease_id: Option<&str>, x_ms_encryption_key: Option<&str>, x_ms_encryption_key_sha256: Option<&str>, x_ms_encryption_algorithm: Option<&str>, x_ms_encryption_scope: Option<&str>, if_modified_since: Option<&str>, if_unmodified_since: Option<&str>, if_match: Option<&str>, if_none_match: Option<&str>, x_ms_if_tags: Option<&str>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, ) -> std::result::Result<(), set_metadata::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/{}/{}?comp=metadata", operation_config.base_path(), container_name, blob); let mut url = url::Url::parse(url_str).map_err(set_metadata::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(set_metadata::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("comp", comp); if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } if let Some(x_ms_meta) = x_ms_meta { req_builder = req_builder.header("x-ms-meta", x_ms_meta); } if let Some(x_ms_lease_id) = x_ms_lease_id { req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id); } if let Some(x_ms_encryption_key) = x_ms_encryption_key { req_builder = req_builder.header("x-ms-encryption-key", x_ms_encryption_key); } if let Some(x_ms_encryption_key_sha256) = x_ms_encryption_key_sha256 { req_builder = req_builder.header("x-ms-encryption-key-sha256", x_ms_encryption_key_sha256); } if let Some(x_ms_encryption_algorithm) = x_ms_encryption_algorithm { req_builder = req_builder.header("x-ms-encryption-algorithm", x_ms_encryption_algorithm); } if let Some(x_ms_encryption_scope) = x_ms_encryption_scope { req_builder = req_builder.header("x-ms-encryption-scope", x_ms_encryption_scope); } if let Some(if_modified_since) = if_modified_since { req_builder = req_builder.header("If-Modified-Since", if_modified_since); } if let Some(if_unmodified_since) = if_unmodified_since { req_builder = req_builder.header("If-Unmodified-Since", if_unmodified_since); } if let Some(if_match) = if_match { req_builder = req_builder.header("If-Match", if_match); } if let Some(if_none_match) = if_none_match { req_builder = req_builder.header("If-None-Match", if_none_match); } if let Some(x_ms_if_tags) = x_ms_if_tags { req_builder = req_builder.header("x-ms-if-tags", x_ms_if_tags); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(set_metadata::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(set_metadata::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body).map_err(|source| set_metadata::Error::DeserializeError(source, rsp_body.clone()))?; Err(set_metadata::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod set_metadata { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn acquire_lease( operation_config: &crate::OperationConfig, container_name: &str, blob: &str, comp: &str, x_ms_lease_action: &str, timeout: Option<i64>, x_ms_lease_duration: Option<i64>, x_ms_proposed_lease_id: Option<&str>, if_modified_since: Option<&str>, if_unmodified_since: Option<&str>, if_match: Option<&str>, if_none_match: Option<&str>, x_ms_if_tags: Option<&str>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, ) -> std::result::Result<(), acquire_lease::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/{}/{}?comp=lease&acquire", operation_config.base_path(), container_name, blob); let mut url = url::Url::parse(url_str).map_err(acquire_lease::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(acquire_lease::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("comp", comp); req_builder = req_builder.header("x-ms-lease-action", x_ms_lease_action); if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } if let Some(x_ms_lease_duration) = x_ms_lease_duration { req_builder = req_builder.header("x-ms-lease-duration", x_ms_lease_duration); } if let Some(x_ms_proposed_lease_id) = x_ms_proposed_lease_id { req_builder = req_builder.header("x-ms-proposed-lease-id", x_ms_proposed_lease_id); } if let Some(if_modified_since) = if_modified_since { req_builder = req_builder.header("If-Modified-Since", if_modified_since); } if let Some(if_unmodified_since) = if_unmodified_since { req_builder = req_builder.header("If-Unmodified-Since", if_unmodified_since); } if let Some(if_match) = if_match { req_builder = req_builder.header("If-Match", if_match); } if let Some(if_none_match) = if_none_match { req_builder = req_builder.header("If-None-Match", if_none_match); } if let Some(x_ms_if_tags) = x_ms_if_tags { req_builder = req_builder.header("x-ms-if-tags", x_ms_if_tags); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(acquire_lease::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(acquire_lease::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::CREATED => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body).map_err(|source| acquire_lease::Error::DeserializeError(source, rsp_body.clone()))?; Err(acquire_lease::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod acquire_lease { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn release_lease( operation_config: &crate::OperationConfig, container_name: &str, blob: &str, comp: &str, x_ms_lease_action: &str, timeout: Option<i64>, x_ms_lease_id: &str, if_modified_since: Option<&str>, if_unmodified_since: Option<&str>, if_match: Option<&str>, if_none_match: Option<&str>, x_ms_if_tags: Option<&str>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, ) -> std::result::Result<(), release_lease::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/{}/{}?comp=lease&release", operation_config.base_path(), container_name, blob); let mut url = url::Url::parse(url_str).map_err(release_lease::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(release_lease::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("comp", comp); req_builder = req_builder.header("x-ms-lease-action", x_ms_lease_action); if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id); if let Some(if_modified_since) = if_modified_since { req_builder = req_builder.header("If-Modified-Since", if_modified_since); } if let Some(if_unmodified_since) = if_unmodified_since { req_builder = req_builder.header("If-Unmodified-Since", if_unmodified_since); } if let Some(if_match) = if_match { req_builder = req_builder.header("If-Match", if_match); } if let Some(if_none_match) = if_none_match { req_builder = req_builder.header("If-None-Match", if_none_match); } if let Some(x_ms_if_tags) = x_ms_if_tags { req_builder = req_builder.header("x-ms-if-tags", x_ms_if_tags); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(release_lease::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(release_lease::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body).map_err(|source| release_lease::Error::DeserializeError(source, rsp_body.clone()))?; Err(release_lease::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod release_lease { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn renew_lease( operation_config: &crate::OperationConfig, container_name: &str, blob: &str, comp: &str, x_ms_lease_action: &str, timeout: Option<i64>, x_ms_lease_id: &str, if_modified_since: Option<&str>, if_unmodified_since: Option<&str>, if_match: Option<&str>, if_none_match: Option<&str>, x_ms_if_tags: Option<&str>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, ) -> std::result::Result<(), renew_lease::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/{}/{}?comp=lease&renew", operation_config.base_path(), container_name, blob); let mut url = url::Url::parse(url_str).map_err(renew_lease::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(renew_lease::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("comp", comp); req_builder = req_builder.header("x-ms-lease-action", x_ms_lease_action); if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id); if let Some(if_modified_since) = if_modified_since { req_builder = req_builder.header("If-Modified-Since", if_modified_since); } if let Some(if_unmodified_since) = if_unmodified_since { req_builder = req_builder.header("If-Unmodified-Since", if_unmodified_since); } if let Some(if_match) = if_match { req_builder = req_builder.header("If-Match", if_match); } if let Some(if_none_match) = if_none_match { req_builder = req_builder.header("If-None-Match", if_none_match); } if let Some(x_ms_if_tags) = x_ms_if_tags { req_builder = req_builder.header("x-ms-if-tags", x_ms_if_tags); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(renew_lease::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(renew_lease::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body).map_err(|source| renew_lease::Error::DeserializeError(source, rsp_body.clone()))?; Err(renew_lease::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod renew_lease { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn change_lease( operation_config: &crate::OperationConfig, container_name: &str, blob: &str, comp: &str, x_ms_lease_action: &str, timeout: Option<i64>, x_ms_lease_id: &str, x_ms_proposed_lease_id: &str, if_modified_since: Option<&str>, if_unmodified_since: Option<&str>, if_match: Option<&str>, if_none_match: Option<&str>, x_ms_if_tags: Option<&str>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, ) -> std::result::Result<(), change_lease::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/{}/{}?comp=lease&change", operation_config.base_path(), container_name, blob); let mut url = url::Url::parse(url_str).map_err(change_lease::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(change_lease::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("comp", comp); req_builder = req_builder.header("x-ms-lease-action", x_ms_lease_action); if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id); req_builder = req_builder.header("x-ms-proposed-lease-id", x_ms_proposed_lease_id); if let Some(if_modified_since) = if_modified_since { req_builder = req_builder.header("If-Modified-Since", if_modified_since); } if let Some(if_unmodified_since) = if_unmodified_since { req_builder = req_builder.header("If-Unmodified-Since", if_unmodified_since); } if let Some(if_match) = if_match { req_builder = req_builder.header("If-Match", if_match); } if let Some(if_none_match) = if_none_match { req_builder = req_builder.header("If-None-Match", if_none_match); } if let Some(x_ms_if_tags) = x_ms_if_tags { req_builder = req_builder.header("x-ms-if-tags", x_ms_if_tags); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(change_lease::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(change_lease::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body).map_err(|source| change_lease::Error::DeserializeError(source, rsp_body.clone()))?; Err(change_lease::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod change_lease { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn break_lease( operation_config: &crate::OperationConfig, container_name: &str, blob: &str, comp: &str, x_ms_lease_action: &str, timeout: Option<i64>, x_ms_lease_break_period: Option<i64>, if_modified_since: Option<&str>, if_unmodified_since: Option<&str>, if_match: Option<&str>, if_none_match: Option<&str>, x_ms_if_tags: Option<&str>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, ) -> std::result::Result<(), break_lease::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/{}/{}?comp=lease&break", operation_config.base_path(), container_name, blob); let mut url = url::Url::parse(url_str).map_err(break_lease::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(break_lease::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("comp", comp); req_builder = req_builder.header("x-ms-lease-action", x_ms_lease_action); if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } if let Some(x_ms_lease_break_period) = x_ms_lease_break_period { req_builder = req_builder.header("x-ms-lease-break-period", x_ms_lease_break_period); } if let Some(if_modified_since) = if_modified_since { req_builder = req_builder.header("If-Modified-Since", if_modified_since); } if let Some(if_unmodified_since) = if_unmodified_since { req_builder = req_builder.header("If-Unmodified-Since", if_unmodified_since); } if let Some(if_match) = if_match { req_builder = req_builder.header("If-Match", if_match); } if let Some(if_none_match) = if_none_match { req_builder = req_builder.header("If-None-Match", if_none_match); } if let Some(x_ms_if_tags) = x_ms_if_tags { req_builder = req_builder.header("x-ms-if-tags", x_ms_if_tags); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(break_lease::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(break_lease::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::ACCEPTED => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body).map_err(|source| break_lease::Error::DeserializeError(source, rsp_body.clone()))?; Err(break_lease::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod break_lease { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn create_snapshot( operation_config: &crate::OperationConfig, container_name: &str, blob: &str, comp: &str, timeout: Option<i64>, x_ms_meta: Option<&str>, x_ms_encryption_key: Option<&str>, x_ms_encryption_key_sha256: Option<&str>, x_ms_encryption_algorithm: Option<&str>, x_ms_encryption_scope: Option<&str>, if_modified_since: Option<&str>, if_unmodified_since: Option<&str>, if_match: Option<&str>, if_none_match: Option<&str>, x_ms_if_tags: Option<&str>, x_ms_lease_id: Option<&str>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, ) -> std::result::Result<(), create_snapshot::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/{}/{}?comp=snapshot", operation_config.base_path(), container_name, blob); let mut url = url::Url::parse(url_str).map_err(create_snapshot::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(create_snapshot::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("comp", comp); if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } if let Some(x_ms_meta) = x_ms_meta { req_builder = req_builder.header("x-ms-meta", x_ms_meta); } if let Some(x_ms_encryption_key) = x_ms_encryption_key { req_builder = req_builder.header("x-ms-encryption-key", x_ms_encryption_key); } if let Some(x_ms_encryption_key_sha256) = x_ms_encryption_key_sha256 { req_builder = req_builder.header("x-ms-encryption-key-sha256", x_ms_encryption_key_sha256); } if let Some(x_ms_encryption_algorithm) = x_ms_encryption_algorithm { req_builder = req_builder.header("x-ms-encryption-algorithm", x_ms_encryption_algorithm); } if let Some(x_ms_encryption_scope) = x_ms_encryption_scope { req_builder = req_builder.header("x-ms-encryption-scope", x_ms_encryption_scope); } if let Some(if_modified_since) = if_modified_since { req_builder = req_builder.header("If-Modified-Since", if_modified_since); } if let Some(if_unmodified_since) = if_unmodified_since { req_builder = req_builder.header("If-Unmodified-Since", if_unmodified_since); } if let Some(if_match) = if_match { req_builder = req_builder.header("If-Match", if_match); } if let Some(if_none_match) = if_none_match { req_builder = req_builder.header("If-None-Match", if_none_match); } if let Some(x_ms_if_tags) = x_ms_if_tags { req_builder = req_builder.header("x-ms-if-tags", x_ms_if_tags); } if let Some(x_ms_lease_id) = x_ms_lease_id { req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(create_snapshot::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(create_snapshot::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::CREATED => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body) .map_err(|source| create_snapshot::Error::DeserializeError(source, rsp_body.clone()))?; Err(create_snapshot::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod create_snapshot { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn start_copy_from_url( operation_config: &crate::OperationConfig, container_name: &str, blob: &str, timeout: Option<i64>, x_ms_meta: Option<&str>, x_ms_access_tier: Option<&str>, x_ms_rehydrate_priority: Option<&str>, x_ms_source_if_modified_since: Option<&str>, x_ms_source_if_unmodified_since: Option<&str>, x_ms_source_if_match: Option<&str>, x_ms_source_if_none_match: Option<&str>, x_ms_source_if_tags: Option<&str>, if_modified_since: Option<&str>, if_unmodified_since: Option<&str>, if_match: Option<&str>, if_none_match: Option<&str>, x_ms_if_tags: Option<&str>, x_ms_copy_source: &str, x_ms_lease_id: Option<&str>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, x_ms_tags: Option<&str>, x_ms_seal_blob: Option<bool>, x_ms_immutability_policy_until_date: Option<&str>, x_ms_immutability_policy_mode: Option<&str>, x_ms_legal_hold: Option<bool>, ) -> std::result::Result<(), start_copy_from_url::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/{}/{}?comp=copy", operation_config.base_path(), container_name, blob); let mut url = url::Url::parse(url_str).map_err(start_copy_from_url::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(start_copy_from_url::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } if let Some(x_ms_meta) = x_ms_meta { req_builder = req_builder.header("x-ms-meta", x_ms_meta); } if let Some(x_ms_access_tier) = x_ms_access_tier { req_builder = req_builder.header("x-ms-access-tier", x_ms_access_tier); } if let Some(x_ms_rehydrate_priority) = x_ms_rehydrate_priority { req_builder = req_builder.header("x-ms-rehydrate-priority", x_ms_rehydrate_priority); } if let Some(x_ms_source_if_modified_since) = x_ms_source_if_modified_since { req_builder = req_builder.header("x-ms-source-if-modified-since", x_ms_source_if_modified_since); } if let Some(x_ms_source_if_unmodified_since) = x_ms_source_if_unmodified_since { req_builder = req_builder.header("x-ms-source-if-unmodified-since", x_ms_source_if_unmodified_since); } if let Some(x_ms_source_if_match) = x_ms_source_if_match { req_builder = req_builder.header("x-ms-source-if-match", x_ms_source_if_match); } if let Some(x_ms_source_if_none_match) = x_ms_source_if_none_match { req_builder = req_builder.header("x-ms-source-if-none-match", x_ms_source_if_none_match); } if let Some(x_ms_source_if_tags) = x_ms_source_if_tags { req_builder = req_builder.header("x-ms-source-if-tags", x_ms_source_if_tags); } if let Some(if_modified_since) = if_modified_since { req_builder = req_builder.header("If-Modified-Since", if_modified_since); } if let Some(if_unmodified_since) = if_unmodified_since { req_builder = req_builder.header("If-Unmodified-Since", if_unmodified_since); } if let Some(if_match) = if_match { req_builder = req_builder.header("If-Match", if_match); } if let Some(if_none_match) = if_none_match { req_builder = req_builder.header("If-None-Match", if_none_match); } if let Some(x_ms_if_tags) = x_ms_if_tags { req_builder = req_builder.header("x-ms-if-tags", x_ms_if_tags); } req_builder = req_builder.header("x-ms-copy-source", x_ms_copy_source); if let Some(x_ms_lease_id) = x_ms_lease_id { req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } if let Some(x_ms_tags) = x_ms_tags { req_builder = req_builder.header("x-ms-tags", x_ms_tags); } if let Some(x_ms_seal_blob) = x_ms_seal_blob { req_builder = req_builder.header("x-ms-seal-blob", x_ms_seal_blob.to_string()); } if let Some(x_ms_immutability_policy_until_date) = x_ms_immutability_policy_until_date { req_builder = req_builder.header("x-ms-immutability-policy-until-date", x_ms_immutability_policy_until_date); } if let Some(x_ms_immutability_policy_mode) = x_ms_immutability_policy_mode { req_builder = req_builder.header("x-ms-immutability-policy-mode", x_ms_immutability_policy_mode); } if let Some(x_ms_legal_hold) = x_ms_legal_hold { req_builder = req_builder.header("x-ms-legal-hold", x_ms_legal_hold.to_string()); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(start_copy_from_url::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(start_copy_from_url::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::ACCEPTED => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body) .map_err(|source| start_copy_from_url::Error::DeserializeError(source, rsp_body.clone()))?; Err(start_copy_from_url::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod start_copy_from_url { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn copy_from_url( operation_config: &crate::OperationConfig, container_name: &str, blob: &str, x_ms_requires_sync: &str, timeout: Option<i64>, x_ms_meta: Option<&str>, x_ms_access_tier: Option<&str>, x_ms_source_if_modified_since: Option<&str>, x_ms_source_if_unmodified_since: Option<&str>, x_ms_source_if_match: Option<&str>, x_ms_source_if_none_match: Option<&str>, if_modified_since: Option<&str>, if_unmodified_since: Option<&str>, if_match: Option<&str>, if_none_match: Option<&str>, x_ms_if_tags: Option<&str>, x_ms_copy_source: &str, x_ms_lease_id: Option<&str>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, x_ms_source_content_md5: Option<&str>, x_ms_tags: Option<&str>, x_ms_immutability_policy_until_date: Option<&str>, x_ms_immutability_policy_mode: Option<&str>, x_ms_legal_hold: Option<bool>, x_ms_copy_source_authorization: Option<&str>, ) -> std::result::Result<(), copy_from_url::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/{}/{}?comp=copy&sync", operation_config.base_path(), container_name, blob); let mut url = url::Url::parse(url_str).map_err(copy_from_url::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(copy_from_url::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } req_builder = req_builder.header("x-ms-requires-sync", x_ms_requires_sync); if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } if let Some(x_ms_meta) = x_ms_meta { req_builder = req_builder.header("x-ms-meta", x_ms_meta); } if let Some(x_ms_access_tier) = x_ms_access_tier { req_builder = req_builder.header("x-ms-access-tier", x_ms_access_tier); } if let Some(x_ms_source_if_modified_since) = x_ms_source_if_modified_since { req_builder = req_builder.header("x-ms-source-if-modified-since", x_ms_source_if_modified_since); } if let Some(x_ms_source_if_unmodified_since) = x_ms_source_if_unmodified_since { req_builder = req_builder.header("x-ms-source-if-unmodified-since", x_ms_source_if_unmodified_since); } if let Some(x_ms_source_if_match) = x_ms_source_if_match { req_builder = req_builder.header("x-ms-source-if-match", x_ms_source_if_match); } if let Some(x_ms_source_if_none_match) = x_ms_source_if_none_match { req_builder = req_builder.header("x-ms-source-if-none-match", x_ms_source_if_none_match); } if let Some(if_modified_since) = if_modified_since { req_builder = req_builder.header("If-Modified-Since", if_modified_since); } if let Some(if_unmodified_since) = if_unmodified_since { req_builder = req_builder.header("If-Unmodified-Since", if_unmodified_since); } if let Some(if_match) = if_match { req_builder = req_builder.header("If-Match", if_match); } if let Some(if_none_match) = if_none_match { req_builder = req_builder.header("If-None-Match", if_none_match); } if let Some(x_ms_if_tags) = x_ms_if_tags { req_builder = req_builder.header("x-ms-if-tags", x_ms_if_tags); } req_builder = req_builder.header("x-ms-copy-source", x_ms_copy_source); if let Some(x_ms_lease_id) = x_ms_lease_id { req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } if let Some(x_ms_source_content_md5) = x_ms_source_content_md5 { req_builder = req_builder.header("x-ms-source-content-md5", x_ms_source_content_md5); } if let Some(x_ms_tags) = x_ms_tags { req_builder = req_builder.header("x-ms-tags", x_ms_tags); } if let Some(x_ms_immutability_policy_until_date) = x_ms_immutability_policy_until_date { req_builder = req_builder.header("x-ms-immutability-policy-until-date", x_ms_immutability_policy_until_date); } if let Some(x_ms_immutability_policy_mode) = x_ms_immutability_policy_mode { req_builder = req_builder.header("x-ms-immutability-policy-mode", x_ms_immutability_policy_mode); } if let Some(x_ms_legal_hold) = x_ms_legal_hold { req_builder = req_builder.header("x-ms-legal-hold", x_ms_legal_hold.to_string()); } if let Some(x_ms_copy_source_authorization) = x_ms_copy_source_authorization { req_builder = req_builder.header("x-ms-copy-source-authorization", x_ms_copy_source_authorization); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(copy_from_url::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(copy_from_url::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::ACCEPTED => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body).map_err(|source| copy_from_url::Error::DeserializeError(source, rsp_body.clone()))?; Err(copy_from_url::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod copy_from_url { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn abort_copy_from_url( operation_config: &crate::OperationConfig, container_name: &str, blob: &str, comp: &str, x_ms_copy_action: &str, copyid: &str, timeout: Option<i64>, x_ms_lease_id: Option<&str>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, ) -> std::result::Result<(), abort_copy_from_url::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/{}/{}?comp=copy&copyid", operation_config.base_path(), container_name, blob); let mut url = url::Url::parse(url_str).map_err(abort_copy_from_url::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(abort_copy_from_url::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("comp", comp); req_builder = req_builder.header("x-ms-copy-action", x_ms_copy_action); url.query_pairs_mut().append_pair("copyid", copyid); if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } if let Some(x_ms_lease_id) = x_ms_lease_id { req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(abort_copy_from_url::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(abort_copy_from_url::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::NO_CONTENT => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body) .map_err(|source| abort_copy_from_url::Error::DeserializeError(source, rsp_body.clone()))?; Err(abort_copy_from_url::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod abort_copy_from_url { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn set_tier( operation_config: &crate::OperationConfig, container_name: &str, blob: &str, comp: &str, snapshot: Option<&str>, versionid: Option<&str>, timeout: Option<i64>, x_ms_access_tier: &str, x_ms_rehydrate_priority: Option<&str>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, x_ms_lease_id: Option<&str>, x_ms_if_tags: Option<&str>, ) -> std::result::Result<set_tier::Response, set_tier::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/{}/{}?comp=tier", operation_config.base_path(), container_name, blob); let mut url = url::Url::parse(url_str).map_err(set_tier::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(set_tier::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("comp", comp); if let Some(snapshot) = snapshot { url.query_pairs_mut().append_pair("snapshot", snapshot); } if let Some(versionid) = versionid { url.query_pairs_mut().append_pair("versionid", versionid); } if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } req_builder = req_builder.header("x-ms-access-tier", x_ms_access_tier); if let Some(x_ms_rehydrate_priority) = x_ms_rehydrate_priority { req_builder = req_builder.header("x-ms-rehydrate-priority", x_ms_rehydrate_priority); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } if let Some(x_ms_lease_id) = x_ms_lease_id { req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id); } if let Some(x_ms_if_tags) = x_ms_if_tags { req_builder = req_builder.header("x-ms-if-tags", x_ms_if_tags); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(set_tier::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(set_tier::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(set_tier::Response::Ok200), http::StatusCode::ACCEPTED => Ok(set_tier::Response::Accepted202), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body).map_err(|source| set_tier::Error::DeserializeError(source, rsp_body.clone()))?; Err(set_tier::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod set_tier { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200, Accepted202, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get_account_info( operation_config: &crate::OperationConfig, container_name: &str, blob: &str, restype: &str, comp: &str, x_ms_version: &str, ) -> std::result::Result<(), get_account_info::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/{}/{}?restype=account&comp=properties", operation_config.base_path(), container_name, blob ); let mut url = url::Url::parse(url_str).map_err(get_account_info::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get_account_info::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("restype", restype); url.query_pairs_mut().append_pair("comp", comp); req_builder = req_builder.header("x-ms-version", x_ms_version); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get_account_info::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(get_account_info::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body) .map_err(|source| get_account_info::Error::DeserializeError(source, rsp_body.clone()))?; Err(get_account_info::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod get_account_info { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn query( operation_config: &crate::OperationConfig, container_name: &str, blob: &str, comp: &str, query_request: Option<&models::QueryRequest>, snapshot: Option<&str>, timeout: Option<i64>, x_ms_lease_id: Option<&str>, x_ms_encryption_key: Option<&str>, x_ms_encryption_key_sha256: Option<&str>, x_ms_encryption_algorithm: Option<&str>, if_modified_since: Option<&str>, if_unmodified_since: Option<&str>, if_match: Option<&str>, if_none_match: Option<&str>, x_ms_if_tags: Option<&str>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, ) -> std::result::Result<query::Response, query::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/{}/{}?comp=query", operation_config.base_path(), container_name, blob); let mut url = url::Url::parse(url_str).map_err(query::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(query::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("comp", comp); let req_body = if let Some(query_request) = query_request { req_builder = req_builder.header("content-type", "application/json"); azure_core::to_json(query_request).map_err(query::Error::SerializeError)? } else { bytes::Bytes::from_static(azure_core::EMPTY_BODY) }; if let Some(snapshot) = snapshot { url.query_pairs_mut().append_pair("snapshot", snapshot); } if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } if let Some(x_ms_lease_id) = x_ms_lease_id { req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id); } if let Some(x_ms_encryption_key) = x_ms_encryption_key { req_builder = req_builder.header("x-ms-encryption-key", x_ms_encryption_key); } if let Some(x_ms_encryption_key_sha256) = x_ms_encryption_key_sha256 { req_builder = req_builder.header("x-ms-encryption-key-sha256", x_ms_encryption_key_sha256); } if let Some(x_ms_encryption_algorithm) = x_ms_encryption_algorithm { req_builder = req_builder.header("x-ms-encryption-algorithm", x_ms_encryption_algorithm); } if let Some(if_modified_since) = if_modified_since { req_builder = req_builder.header("If-Modified-Since", if_modified_since); } if let Some(if_unmodified_since) = if_unmodified_since { req_builder = req_builder.header("If-Unmodified-Since", if_unmodified_since); } if let Some(if_match) = if_match { req_builder = req_builder.header("If-Match", if_match); } if let Some(if_none_match) = if_none_match { req_builder = req_builder.header("If-None-Match", if_none_match); } if let Some(x_ms_if_tags) = x_ms_if_tags { req_builder = req_builder.header("x-ms-if-tags", x_ms_if_tags); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(query::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(query::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: serde_json::Value = serde_json::from_slice(rsp_body).map_err(|source| query::Error::DeserializeError(source, rsp_body.clone()))?; Ok(query::Response::Ok200(rsp_value)) } http::StatusCode::PARTIAL_CONTENT => { let rsp_body = rsp.body(); let rsp_value: serde_json::Value = serde_json::from_slice(rsp_body).map_err(|source| query::Error::DeserializeError(source, rsp_body.clone()))?; Ok(query::Response::PartialContent206(rsp_value)) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body).map_err(|source| query::Error::DeserializeError(source, rsp_body.clone()))?; Err(query::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod query { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200(serde_json::Value), PartialContent206(serde_json::Value), } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get_tags( operation_config: &crate::OperationConfig, container_name: &str, blob: &str, comp: &str, timeout: Option<i64>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, snapshot: Option<&str>, versionid: Option<&str>, x_ms_if_tags: Option<&str>, x_ms_lease_id: Option<&str>, ) -> std::result::Result<models::BlobTags, get_tags::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/{}/{}?comp=tags", operation_config.base_path(), container_name, blob); let mut url = url::Url::parse(url_str).map_err(get_tags::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get_tags::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("comp", comp); if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } if let Some(snapshot) = snapshot { url.query_pairs_mut().append_pair("snapshot", snapshot); } if let Some(versionid) = versionid { url.query_pairs_mut().append_pair("versionid", versionid); } if let Some(x_ms_if_tags) = x_ms_if_tags { req_builder = req_builder.header("x-ms-if-tags", x_ms_if_tags); } if let Some(x_ms_lease_id) = x_ms_lease_id { req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get_tags::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(get_tags::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::BlobTags = serde_json::from_slice(rsp_body).map_err(|source| get_tags::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body).map_err(|source| get_tags::Error::DeserializeError(source, rsp_body.clone()))?; Err(get_tags::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod get_tags { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn set_tags( operation_config: &crate::OperationConfig, container_name: &str, blob: &str, comp: &str, x_ms_version: &str, timeout: Option<i64>, versionid: Option<&str>, content_md5: Option<&str>, x_ms_content_crc64: Option<&str>, x_ms_client_request_id: Option<&str>, x_ms_if_tags: Option<&str>, x_ms_lease_id: Option<&str>, tags: Option<&models::BlobTags>, ) -> std::result::Result<(), set_tags::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/{}/{}?comp=tags", operation_config.base_path(), container_name, blob); let mut url = url::Url::parse(url_str).map_err(set_tags::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(set_tags::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("comp", comp); req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } if let Some(versionid) = versionid { url.query_pairs_mut().append_pair("versionid", versionid); } if let Some(content_md5) = content_md5 { req_builder = req_builder.header("Content-MD5", content_md5); } if let Some(x_ms_content_crc64) = x_ms_content_crc64 { req_builder = req_builder.header("x-ms-content-crc64", x_ms_content_crc64); } if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } if let Some(x_ms_if_tags) = x_ms_if_tags { req_builder = req_builder.header("x-ms-if-tags", x_ms_if_tags); } if let Some(x_ms_lease_id) = x_ms_lease_id { req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id); } let req_body = if let Some(tags) = tags { req_builder = req_builder.header("content-type", "application/json"); azure_core::to_json(tags).map_err(set_tags::Error::SerializeError)? } else { bytes::Bytes::from_static(azure_core::EMPTY_BODY) }; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(set_tags::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(set_tags::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::NO_CONTENT => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body).map_err(|source| set_tags::Error::DeserializeError(source, rsp_body.clone()))?; Err(set_tags::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod set_tags { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod page_blob { use super::{models, API_VERSION}; pub async fn create( operation_config: &crate::OperationConfig, container_name: &str, blob: &str, x_ms_blob_type: &str, timeout: Option<i64>, content_length: i64, x_ms_access_tier: Option<&str>, x_ms_blob_content_type: Option<&str>, x_ms_blob_content_encoding: Option<&str>, x_ms_blob_content_language: Option<&str>, x_ms_blob_content_md5: Option<&str>, x_ms_blob_cache_control: Option<&str>, x_ms_meta: Option<&str>, x_ms_lease_id: Option<&str>, x_ms_blob_content_disposition: Option<&str>, x_ms_encryption_key: Option<&str>, x_ms_encryption_key_sha256: Option<&str>, x_ms_encryption_algorithm: Option<&str>, x_ms_encryption_scope: Option<&str>, if_modified_since: Option<&str>, if_unmodified_since: Option<&str>, if_match: Option<&str>, if_none_match: Option<&str>, x_ms_if_tags: Option<&str>, x_ms_blob_content_length: i64, x_ms_blob_sequence_number: Option<i64>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, x_ms_tags: Option<&str>, x_ms_immutability_policy_until_date: Option<&str>, x_ms_immutability_policy_mode: Option<&str>, x_ms_legal_hold: Option<bool>, ) -> std::result::Result<(), create::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/{}/{}?PageBlob", operation_config.base_path(), container_name, blob); let mut url = url::Url::parse(url_str).map_err(create::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(create::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } req_builder = req_builder.header("x-ms-blob-type", x_ms_blob_type); if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } req_builder = req_builder.header("Content-Length", content_length); if let Some(x_ms_access_tier) = x_ms_access_tier { req_builder = req_builder.header("x-ms-access-tier", x_ms_access_tier); } if let Some(x_ms_blob_content_type) = x_ms_blob_content_type { req_builder = req_builder.header("x-ms-blob-content-type", x_ms_blob_content_type); } if let Some(x_ms_blob_content_encoding) = x_ms_blob_content_encoding { req_builder = req_builder.header("x-ms-blob-content-encoding", x_ms_blob_content_encoding); } if let Some(x_ms_blob_content_language) = x_ms_blob_content_language { req_builder = req_builder.header("x-ms-blob-content-language", x_ms_blob_content_language); } if let Some(x_ms_blob_content_md5) = x_ms_blob_content_md5 { req_builder = req_builder.header("x-ms-blob-content-md5", x_ms_blob_content_md5); } if let Some(x_ms_blob_cache_control) = x_ms_blob_cache_control { req_builder = req_builder.header("x-ms-blob-cache-control", x_ms_blob_cache_control); } if let Some(x_ms_meta) = x_ms_meta { req_builder = req_builder.header("x-ms-meta", x_ms_meta); } if let Some(x_ms_lease_id) = x_ms_lease_id { req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id); } if let Some(x_ms_blob_content_disposition) = x_ms_blob_content_disposition { req_builder = req_builder.header("x-ms-blob-content-disposition", x_ms_blob_content_disposition); } if let Some(x_ms_encryption_key) = x_ms_encryption_key { req_builder = req_builder.header("x-ms-encryption-key", x_ms_encryption_key); } if let Some(x_ms_encryption_key_sha256) = x_ms_encryption_key_sha256 { req_builder = req_builder.header("x-ms-encryption-key-sha256", x_ms_encryption_key_sha256); } if let Some(x_ms_encryption_algorithm) = x_ms_encryption_algorithm { req_builder = req_builder.header("x-ms-encryption-algorithm", x_ms_encryption_algorithm); } if let Some(x_ms_encryption_scope) = x_ms_encryption_scope { req_builder = req_builder.header("x-ms-encryption-scope", x_ms_encryption_scope); } if let Some(if_modified_since) = if_modified_since { req_builder = req_builder.header("If-Modified-Since", if_modified_since); } if let Some(if_unmodified_since) = if_unmodified_since { req_builder = req_builder.header("If-Unmodified-Since", if_unmodified_since); } if let Some(if_match) = if_match { req_builder = req_builder.header("If-Match", if_match); } if let Some(if_none_match) = if_none_match { req_builder = req_builder.header("If-None-Match", if_none_match); } if let Some(x_ms_if_tags) = x_ms_if_tags { req_builder = req_builder.header("x-ms-if-tags", x_ms_if_tags); } req_builder = req_builder.header("x-ms-blob-content-length", x_ms_blob_content_length); if let Some(x_ms_blob_sequence_number) = x_ms_blob_sequence_number { req_builder = req_builder.header("x-ms-blob-sequence-number", x_ms_blob_sequence_number); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } if let Some(x_ms_tags) = x_ms_tags { req_builder = req_builder.header("x-ms-tags", x_ms_tags); } if let Some(x_ms_immutability_policy_until_date) = x_ms_immutability_policy_until_date { req_builder = req_builder.header("x-ms-immutability-policy-until-date", x_ms_immutability_policy_until_date); } if let Some(x_ms_immutability_policy_mode) = x_ms_immutability_policy_mode { req_builder = req_builder.header("x-ms-immutability-policy-mode", x_ms_immutability_policy_mode); } if let Some(x_ms_legal_hold) = x_ms_legal_hold { req_builder = req_builder.header("x-ms-legal-hold", x_ms_legal_hold.to_string()); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(create::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(create::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::CREATED => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?; Err(create::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod create { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn upload_pages( operation_config: &crate::OperationConfig, container_name: &str, blob: &str, comp: &str, x_ms_page_write: &str, body: &serde_json::Value, content_length: i64, content_md5: Option<&str>, x_ms_content_crc64: Option<&str>, timeout: Option<i64>, x_ms_range: Option<&str>, x_ms_lease_id: Option<&str>, x_ms_encryption_key: Option<&str>, x_ms_encryption_key_sha256: Option<&str>, x_ms_encryption_algorithm: Option<&str>, x_ms_encryption_scope: Option<&str>, x_ms_if_sequence_number_le: Option<i64>, x_ms_if_sequence_number_lt: Option<i64>, x_ms_if_sequence_number_eq: Option<i64>, if_modified_since: Option<&str>, if_unmodified_since: Option<&str>, if_match: Option<&str>, if_none_match: Option<&str>, x_ms_if_tags: Option<&str>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, ) -> std::result::Result<(), upload_pages::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/{}/{}?comp=page&update", operation_config.base_path(), container_name, blob); let mut url = url::Url::parse(url_str).map_err(upload_pages::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(upload_pages::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("comp", comp); req_builder = req_builder.header("x-ms-page-write", x_ms_page_write); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(body).map_err(upload_pages::Error::SerializeError)?; req_builder = req_builder.header("Content-Length", content_length); if let Some(content_md5) = content_md5 { req_builder = req_builder.header("Content-MD5", content_md5); } if let Some(x_ms_content_crc64) = x_ms_content_crc64 { req_builder = req_builder.header("x-ms-content-crc64", x_ms_content_crc64); } if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } if let Some(x_ms_range) = x_ms_range { req_builder = req_builder.header("x-ms-range", x_ms_range); } if let Some(x_ms_lease_id) = x_ms_lease_id { req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id); } if let Some(x_ms_encryption_key) = x_ms_encryption_key { req_builder = req_builder.header("x-ms-encryption-key", x_ms_encryption_key); } if let Some(x_ms_encryption_key_sha256) = x_ms_encryption_key_sha256 { req_builder = req_builder.header("x-ms-encryption-key-sha256", x_ms_encryption_key_sha256); } if let Some(x_ms_encryption_algorithm) = x_ms_encryption_algorithm { req_builder = req_builder.header("x-ms-encryption-algorithm", x_ms_encryption_algorithm); } if let Some(x_ms_encryption_scope) = x_ms_encryption_scope { req_builder = req_builder.header("x-ms-encryption-scope", x_ms_encryption_scope); } if let Some(x_ms_if_sequence_number_le) = x_ms_if_sequence_number_le { req_builder = req_builder.header("x-ms-if-sequence-number-le", x_ms_if_sequence_number_le); } if let Some(x_ms_if_sequence_number_lt) = x_ms_if_sequence_number_lt { req_builder = req_builder.header("x-ms-if-sequence-number-lt", x_ms_if_sequence_number_lt); } if let Some(x_ms_if_sequence_number_eq) = x_ms_if_sequence_number_eq { req_builder = req_builder.header("x-ms-if-sequence-number-eq", x_ms_if_sequence_number_eq); } if let Some(if_modified_since) = if_modified_since { req_builder = req_builder.header("If-Modified-Since", if_modified_since); } if let Some(if_unmodified_since) = if_unmodified_since { req_builder = req_builder.header("If-Unmodified-Since", if_unmodified_since); } if let Some(if_match) = if_match { req_builder = req_builder.header("If-Match", if_match); } if let Some(if_none_match) = if_none_match { req_builder = req_builder.header("If-None-Match", if_none_match); } if let Some(x_ms_if_tags) = x_ms_if_tags { req_builder = req_builder.header("x-ms-if-tags", x_ms_if_tags); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(upload_pages::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(upload_pages::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::CREATED => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body).map_err(|source| upload_pages::Error::DeserializeError(source, rsp_body.clone()))?; Err(upload_pages::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod upload_pages { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn clear_pages( operation_config: &crate::OperationConfig, container_name: &str, blob: &str, comp: &str, x_ms_page_write: &str, content_length: i64, timeout: Option<i64>, x_ms_range: Option<&str>, x_ms_lease_id: Option<&str>, x_ms_encryption_key: Option<&str>, x_ms_encryption_key_sha256: Option<&str>, x_ms_encryption_algorithm: Option<&str>, x_ms_encryption_scope: Option<&str>, x_ms_if_sequence_number_le: Option<i64>, x_ms_if_sequence_number_lt: Option<i64>, x_ms_if_sequence_number_eq: Option<i64>, if_modified_since: Option<&str>, if_unmodified_since: Option<&str>, if_match: Option<&str>, if_none_match: Option<&str>, x_ms_if_tags: Option<&str>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, ) -> std::result::Result<(), clear_pages::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/{}/{}?comp=page&clear", operation_config.base_path(), container_name, blob); let mut url = url::Url::parse(url_str).map_err(clear_pages::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(clear_pages::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("comp", comp); req_builder = req_builder.header("x-ms-page-write", x_ms_page_write); req_builder = req_builder.header("Content-Length", content_length); if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } if let Some(x_ms_range) = x_ms_range { req_builder = req_builder.header("x-ms-range", x_ms_range); } if let Some(x_ms_lease_id) = x_ms_lease_id { req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id); } if let Some(x_ms_encryption_key) = x_ms_encryption_key { req_builder = req_builder.header("x-ms-encryption-key", x_ms_encryption_key); } if let Some(x_ms_encryption_key_sha256) = x_ms_encryption_key_sha256 { req_builder = req_builder.header("x-ms-encryption-key-sha256", x_ms_encryption_key_sha256); } if let Some(x_ms_encryption_algorithm) = x_ms_encryption_algorithm { req_builder = req_builder.header("x-ms-encryption-algorithm", x_ms_encryption_algorithm); } if let Some(x_ms_encryption_scope) = x_ms_encryption_scope { req_builder = req_builder.header("x-ms-encryption-scope", x_ms_encryption_scope); } if let Some(x_ms_if_sequence_number_le) = x_ms_if_sequence_number_le { req_builder = req_builder.header("x-ms-if-sequence-number-le", x_ms_if_sequence_number_le); } if let Some(x_ms_if_sequence_number_lt) = x_ms_if_sequence_number_lt { req_builder = req_builder.header("x-ms-if-sequence-number-lt", x_ms_if_sequence_number_lt); } if let Some(x_ms_if_sequence_number_eq) = x_ms_if_sequence_number_eq { req_builder = req_builder.header("x-ms-if-sequence-number-eq", x_ms_if_sequence_number_eq); } if let Some(if_modified_since) = if_modified_since { req_builder = req_builder.header("If-Modified-Since", if_modified_since); } if let Some(if_unmodified_since) = if_unmodified_since { req_builder = req_builder.header("If-Unmodified-Since", if_unmodified_since); } if let Some(if_match) = if_match { req_builder = req_builder.header("If-Match", if_match); } if let Some(if_none_match) = if_none_match { req_builder = req_builder.header("If-None-Match", if_none_match); } if let Some(x_ms_if_tags) = x_ms_if_tags { req_builder = req_builder.header("x-ms-if-tags", x_ms_if_tags); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(clear_pages::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(clear_pages::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::CREATED => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body).map_err(|source| clear_pages::Error::DeserializeError(source, rsp_body.clone()))?; Err(clear_pages::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod clear_pages { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn upload_pages_from_url( operation_config: &crate::OperationConfig, container_name: &str, blob: &str, comp: &str, x_ms_page_write: &str, x_ms_copy_source: &str, x_ms_source_range: &str, x_ms_source_content_md5: Option<&str>, x_ms_source_content_crc64: Option<&str>, content_length: i64, timeout: Option<i64>, x_ms_range: &str, x_ms_encryption_key: Option<&str>, x_ms_encryption_key_sha256: Option<&str>, x_ms_encryption_algorithm: Option<&str>, x_ms_encryption_scope: Option<&str>, x_ms_lease_id: Option<&str>, x_ms_if_sequence_number_le: Option<i64>, x_ms_if_sequence_number_lt: Option<i64>, x_ms_if_sequence_number_eq: Option<i64>, if_modified_since: Option<&str>, if_unmodified_since: Option<&str>, if_match: Option<&str>, if_none_match: Option<&str>, x_ms_if_tags: Option<&str>, x_ms_source_if_modified_since: Option<&str>, x_ms_source_if_unmodified_since: Option<&str>, x_ms_source_if_match: Option<&str>, x_ms_source_if_none_match: Option<&str>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, x_ms_copy_source_authorization: Option<&str>, ) -> std::result::Result<(), upload_pages_from_url::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/{}/{}?comp=page&update&fromUrl", operation_config.base_path(), container_name, blob ); let mut url = url::Url::parse(url_str).map_err(upload_pages_from_url::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(upload_pages_from_url::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("comp", comp); req_builder = req_builder.header("x-ms-page-write", x_ms_page_write); req_builder = req_builder.header("x-ms-copy-source", x_ms_copy_source); req_builder = req_builder.header("x-ms-source-range", x_ms_source_range); if let Some(x_ms_source_content_md5) = x_ms_source_content_md5 { req_builder = req_builder.header("x-ms-source-content-md5", x_ms_source_content_md5); } if let Some(x_ms_source_content_crc64) = x_ms_source_content_crc64 { req_builder = req_builder.header("x-ms-source-content-crc64", x_ms_source_content_crc64); } req_builder = req_builder.header("Content-Length", content_length); if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } req_builder = req_builder.header("x-ms-range", x_ms_range); if let Some(x_ms_encryption_key) = x_ms_encryption_key { req_builder = req_builder.header("x-ms-encryption-key", x_ms_encryption_key); } if let Some(x_ms_encryption_key_sha256) = x_ms_encryption_key_sha256 { req_builder = req_builder.header("x-ms-encryption-key-sha256", x_ms_encryption_key_sha256); } if let Some(x_ms_encryption_algorithm) = x_ms_encryption_algorithm { req_builder = req_builder.header("x-ms-encryption-algorithm", x_ms_encryption_algorithm); } if let Some(x_ms_encryption_scope) = x_ms_encryption_scope { req_builder = req_builder.header("x-ms-encryption-scope", x_ms_encryption_scope); } if let Some(x_ms_lease_id) = x_ms_lease_id { req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id); } if let Some(x_ms_if_sequence_number_le) = x_ms_if_sequence_number_le { req_builder = req_builder.header("x-ms-if-sequence-number-le", x_ms_if_sequence_number_le); } if let Some(x_ms_if_sequence_number_lt) = x_ms_if_sequence_number_lt { req_builder = req_builder.header("x-ms-if-sequence-number-lt", x_ms_if_sequence_number_lt); } if let Some(x_ms_if_sequence_number_eq) = x_ms_if_sequence_number_eq { req_builder = req_builder.header("x-ms-if-sequence-number-eq", x_ms_if_sequence_number_eq); } if let Some(if_modified_since) = if_modified_since { req_builder = req_builder.header("If-Modified-Since", if_modified_since); } if let Some(if_unmodified_since) = if_unmodified_since { req_builder = req_builder.header("If-Unmodified-Since", if_unmodified_since); } if let Some(if_match) = if_match { req_builder = req_builder.header("If-Match", if_match); } if let Some(if_none_match) = if_none_match { req_builder = req_builder.header("If-None-Match", if_none_match); } if let Some(x_ms_if_tags) = x_ms_if_tags { req_builder = req_builder.header("x-ms-if-tags", x_ms_if_tags); } if let Some(x_ms_source_if_modified_since) = x_ms_source_if_modified_since { req_builder = req_builder.header("x-ms-source-if-modified-since", x_ms_source_if_modified_since); } if let Some(x_ms_source_if_unmodified_since) = x_ms_source_if_unmodified_since { req_builder = req_builder.header("x-ms-source-if-unmodified-since", x_ms_source_if_unmodified_since); } if let Some(x_ms_source_if_match) = x_ms_source_if_match { req_builder = req_builder.header("x-ms-source-if-match", x_ms_source_if_match); } if let Some(x_ms_source_if_none_match) = x_ms_source_if_none_match { req_builder = req_builder.header("x-ms-source-if-none-match", x_ms_source_if_none_match); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } if let Some(x_ms_copy_source_authorization) = x_ms_copy_source_authorization { req_builder = req_builder.header("x-ms-copy-source-authorization", x_ms_copy_source_authorization); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(upload_pages_from_url::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(upload_pages_from_url::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::CREATED => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body) .map_err(|source| upload_pages_from_url::Error::DeserializeError(source, rsp_body.clone()))?; Err(upload_pages_from_url::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod upload_pages_from_url { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get_page_ranges( operation_config: &crate::OperationConfig, container_name: &str, blob: &str, comp: &str, snapshot: Option<&str>, timeout: Option<i64>, x_ms_range: Option<&str>, x_ms_lease_id: Option<&str>, if_modified_since: Option<&str>, if_unmodified_since: Option<&str>, if_match: Option<&str>, if_none_match: Option<&str>, x_ms_if_tags: Option<&str>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, ) -> std::result::Result<models::PageList, get_page_ranges::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/{}/{}?comp=pagelist", operation_config.base_path(), container_name, blob); let mut url = url::Url::parse(url_str).map_err(get_page_ranges::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get_page_ranges::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("comp", comp); if let Some(snapshot) = snapshot { url.query_pairs_mut().append_pair("snapshot", snapshot); } if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } if let Some(x_ms_range) = x_ms_range { req_builder = req_builder.header("x-ms-range", x_ms_range); } if let Some(x_ms_lease_id) = x_ms_lease_id { req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id); } if let Some(if_modified_since) = if_modified_since { req_builder = req_builder.header("If-Modified-Since", if_modified_since); } if let Some(if_unmodified_since) = if_unmodified_since { req_builder = req_builder.header("If-Unmodified-Since", if_unmodified_since); } if let Some(if_match) = if_match { req_builder = req_builder.header("If-Match", if_match); } if let Some(if_none_match) = if_none_match { req_builder = req_builder.header("If-None-Match", if_none_match); } if let Some(x_ms_if_tags) = x_ms_if_tags { req_builder = req_builder.header("x-ms-if-tags", x_ms_if_tags); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get_page_ranges::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(get_page_ranges::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::PageList = serde_json::from_slice(rsp_body) .map_err(|source| get_page_ranges::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body) .map_err(|source| get_page_ranges::Error::DeserializeError(source, rsp_body.clone()))?; Err(get_page_ranges::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod get_page_ranges { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get_page_ranges_diff( operation_config: &crate::OperationConfig, container_name: &str, blob: &str, comp: &str, snapshot: Option<&str>, timeout: Option<i64>, prevsnapshot: Option<&str>, x_ms_previous_snapshot_url: Option<&str>, x_ms_range: Option<&str>, x_ms_lease_id: Option<&str>, if_modified_since: Option<&str>, if_unmodified_since: Option<&str>, if_match: Option<&str>, if_none_match: Option<&str>, x_ms_if_tags: Option<&str>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, ) -> std::result::Result<models::PageList, get_page_ranges_diff::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/{}/{}?comp=pagelist&diff", operation_config.base_path(), container_name, blob); let mut url = url::Url::parse(url_str).map_err(get_page_ranges_diff::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get_page_ranges_diff::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("comp", comp); if let Some(snapshot) = snapshot { url.query_pairs_mut().append_pair("snapshot", snapshot); } if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } if let Some(prevsnapshot) = prevsnapshot { url.query_pairs_mut().append_pair("prevsnapshot", prevsnapshot); } if let Some(x_ms_previous_snapshot_url) = x_ms_previous_snapshot_url { req_builder = req_builder.header("x-ms-previous-snapshot-url", x_ms_previous_snapshot_url); } if let Some(x_ms_range) = x_ms_range { req_builder = req_builder.header("x-ms-range", x_ms_range); } if let Some(x_ms_lease_id) = x_ms_lease_id { req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id); } if let Some(if_modified_since) = if_modified_since { req_builder = req_builder.header("If-Modified-Since", if_modified_since); } if let Some(if_unmodified_since) = if_unmodified_since { req_builder = req_builder.header("If-Unmodified-Since", if_unmodified_since); } if let Some(if_match) = if_match { req_builder = req_builder.header("If-Match", if_match); } if let Some(if_none_match) = if_none_match { req_builder = req_builder.header("If-None-Match", if_none_match); } if let Some(x_ms_if_tags) = x_ms_if_tags { req_builder = req_builder.header("x-ms-if-tags", x_ms_if_tags); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get_page_ranges_diff::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(get_page_ranges_diff::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::PageList = serde_json::from_slice(rsp_body) .map_err(|source| get_page_ranges_diff::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body) .map_err(|source| get_page_ranges_diff::Error::DeserializeError(source, rsp_body.clone()))?; Err(get_page_ranges_diff::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod get_page_ranges_diff { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn resize( operation_config: &crate::OperationConfig, container_name: &str, blob: &str, comp: &str, timeout: Option<i64>, x_ms_lease_id: Option<&str>, x_ms_encryption_key: Option<&str>, x_ms_encryption_key_sha256: Option<&str>, x_ms_encryption_algorithm: Option<&str>, x_ms_encryption_scope: Option<&str>, if_modified_since: Option<&str>, if_unmodified_since: Option<&str>, if_match: Option<&str>, if_none_match: Option<&str>, x_ms_if_tags: Option<&str>, x_ms_blob_content_length: i64, x_ms_version: &str, x_ms_client_request_id: Option<&str>, ) -> std::result::Result<(), resize::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/{}/{}?comp=properties&Resize", operation_config.base_path(), container_name, blob ); let mut url = url::Url::parse(url_str).map_err(resize::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(resize::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("comp", comp); if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } if let Some(x_ms_lease_id) = x_ms_lease_id { req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id); } if let Some(x_ms_encryption_key) = x_ms_encryption_key { req_builder = req_builder.header("x-ms-encryption-key", x_ms_encryption_key); } if let Some(x_ms_encryption_key_sha256) = x_ms_encryption_key_sha256 { req_builder = req_builder.header("x-ms-encryption-key-sha256", x_ms_encryption_key_sha256); } if let Some(x_ms_encryption_algorithm) = x_ms_encryption_algorithm { req_builder = req_builder.header("x-ms-encryption-algorithm", x_ms_encryption_algorithm); } if let Some(x_ms_encryption_scope) = x_ms_encryption_scope { req_builder = req_builder.header("x-ms-encryption-scope", x_ms_encryption_scope); } if let Some(if_modified_since) = if_modified_since { req_builder = req_builder.header("If-Modified-Since", if_modified_since); } if let Some(if_unmodified_since) = if_unmodified_since { req_builder = req_builder.header("If-Unmodified-Since", if_unmodified_since); } if let Some(if_match) = if_match { req_builder = req_builder.header("If-Match", if_match); } if let Some(if_none_match) = if_none_match { req_builder = req_builder.header("If-None-Match", if_none_match); } if let Some(x_ms_if_tags) = x_ms_if_tags { req_builder = req_builder.header("x-ms-if-tags", x_ms_if_tags); } req_builder = req_builder.header("x-ms-blob-content-length", x_ms_blob_content_length); req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(resize::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(resize::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body).map_err(|source| resize::Error::DeserializeError(source, rsp_body.clone()))?; Err(resize::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod resize { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn update_sequence_number( operation_config: &crate::OperationConfig, container_name: &str, blob: &str, comp: &str, timeout: Option<i64>, x_ms_lease_id: Option<&str>, if_modified_since: Option<&str>, if_unmodified_since: Option<&str>, if_match: Option<&str>, if_none_match: Option<&str>, x_ms_if_tags: Option<&str>, x_ms_sequence_number_action: &str, x_ms_blob_sequence_number: Option<i64>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, ) -> std::result::Result<(), update_sequence_number::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/{}/{}?comp=properties&UpdateSequenceNumber", operation_config.base_path(), container_name, blob ); let mut url = url::Url::parse(url_str).map_err(update_sequence_number::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(update_sequence_number::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("comp", comp); if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } if let Some(x_ms_lease_id) = x_ms_lease_id { req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id); } if let Some(if_modified_since) = if_modified_since { req_builder = req_builder.header("If-Modified-Since", if_modified_since); } if let Some(if_unmodified_since) = if_unmodified_since { req_builder = req_builder.header("If-Unmodified-Since", if_unmodified_since); } if let Some(if_match) = if_match { req_builder = req_builder.header("If-Match", if_match); } if let Some(if_none_match) = if_none_match { req_builder = req_builder.header("If-None-Match", if_none_match); } if let Some(x_ms_if_tags) = x_ms_if_tags { req_builder = req_builder.header("x-ms-if-tags", x_ms_if_tags); } req_builder = req_builder.header("x-ms-sequence-number-action", x_ms_sequence_number_action); if let Some(x_ms_blob_sequence_number) = x_ms_blob_sequence_number { req_builder = req_builder.header("x-ms-blob-sequence-number", x_ms_blob_sequence_number); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(update_sequence_number::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(update_sequence_number::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body) .map_err(|source| update_sequence_number::Error::DeserializeError(source, rsp_body.clone()))?; Err(update_sequence_number::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod update_sequence_number { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn copy_incremental( operation_config: &crate::OperationConfig, container_name: &str, blob: &str, comp: &str, timeout: Option<i64>, if_modified_since: Option<&str>, if_unmodified_since: Option<&str>, if_match: Option<&str>, if_none_match: Option<&str>, x_ms_if_tags: Option<&str>, x_ms_copy_source: &str, x_ms_version: &str, x_ms_client_request_id: Option<&str>, ) -> std::result::Result<(), copy_incremental::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/{}/{}?comp=incrementalcopy", operation_config.base_path(), container_name, blob); let mut url = url::Url::parse(url_str).map_err(copy_incremental::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(copy_incremental::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("comp", comp); if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } if let Some(if_modified_since) = if_modified_since { req_builder = req_builder.header("If-Modified-Since", if_modified_since); } if let Some(if_unmodified_since) = if_unmodified_since { req_builder = req_builder.header("If-Unmodified-Since", if_unmodified_since); } if let Some(if_match) = if_match { req_builder = req_builder.header("If-Match", if_match); } if let Some(if_none_match) = if_none_match { req_builder = req_builder.header("If-None-Match", if_none_match); } if let Some(x_ms_if_tags) = x_ms_if_tags { req_builder = req_builder.header("x-ms-if-tags", x_ms_if_tags); } req_builder = req_builder.header("x-ms-copy-source", x_ms_copy_source); req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(copy_incremental::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(copy_incremental::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::ACCEPTED => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body) .map_err(|source| copy_incremental::Error::DeserializeError(source, rsp_body.clone()))?; Err(copy_incremental::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod copy_incremental { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod append_blob { use super::{models, API_VERSION}; pub async fn create( operation_config: &crate::OperationConfig, container_name: &str, blob: &str, x_ms_blob_type: &str, timeout: Option<i64>, content_length: i64, x_ms_blob_content_type: Option<&str>, x_ms_blob_content_encoding: Option<&str>, x_ms_blob_content_language: Option<&str>, x_ms_blob_content_md5: Option<&str>, x_ms_blob_cache_control: Option<&str>, x_ms_meta: Option<&str>, x_ms_lease_id: Option<&str>, x_ms_blob_content_disposition: Option<&str>, x_ms_encryption_key: Option<&str>, x_ms_encryption_key_sha256: Option<&str>, x_ms_encryption_algorithm: Option<&str>, x_ms_encryption_scope: Option<&str>, if_modified_since: Option<&str>, if_unmodified_since: Option<&str>, if_match: Option<&str>, if_none_match: Option<&str>, x_ms_if_tags: Option<&str>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, x_ms_tags: Option<&str>, x_ms_immutability_policy_until_date: Option<&str>, x_ms_immutability_policy_mode: Option<&str>, x_ms_legal_hold: Option<bool>, ) -> std::result::Result<(), create::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/{}/{}?AppendBlob", operation_config.base_path(), container_name, blob); let mut url = url::Url::parse(url_str).map_err(create::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(create::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } req_builder = req_builder.header("x-ms-blob-type", x_ms_blob_type); if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } req_builder = req_builder.header("Content-Length", content_length); if let Some(x_ms_blob_content_type) = x_ms_blob_content_type { req_builder = req_builder.header("x-ms-blob-content-type", x_ms_blob_content_type); } if let Some(x_ms_blob_content_encoding) = x_ms_blob_content_encoding { req_builder = req_builder.header("x-ms-blob-content-encoding", x_ms_blob_content_encoding); } if let Some(x_ms_blob_content_language) = x_ms_blob_content_language { req_builder = req_builder.header("x-ms-blob-content-language", x_ms_blob_content_language); } if let Some(x_ms_blob_content_md5) = x_ms_blob_content_md5 { req_builder = req_builder.header("x-ms-blob-content-md5", x_ms_blob_content_md5); } if let Some(x_ms_blob_cache_control) = x_ms_blob_cache_control { req_builder = req_builder.header("x-ms-blob-cache-control", x_ms_blob_cache_control); } if let Some(x_ms_meta) = x_ms_meta { req_builder = req_builder.header("x-ms-meta", x_ms_meta); } if let Some(x_ms_lease_id) = x_ms_lease_id { req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id); } if let Some(x_ms_blob_content_disposition) = x_ms_blob_content_disposition { req_builder = req_builder.header("x-ms-blob-content-disposition", x_ms_blob_content_disposition); } if let Some(x_ms_encryption_key) = x_ms_encryption_key { req_builder = req_builder.header("x-ms-encryption-key", x_ms_encryption_key); } if let Some(x_ms_encryption_key_sha256) = x_ms_encryption_key_sha256 { req_builder = req_builder.header("x-ms-encryption-key-sha256", x_ms_encryption_key_sha256); } if let Some(x_ms_encryption_algorithm) = x_ms_encryption_algorithm { req_builder = req_builder.header("x-ms-encryption-algorithm", x_ms_encryption_algorithm); } if let Some(x_ms_encryption_scope) = x_ms_encryption_scope { req_builder = req_builder.header("x-ms-encryption-scope", x_ms_encryption_scope); } if let Some(if_modified_since) = if_modified_since { req_builder = req_builder.header("If-Modified-Since", if_modified_since); } if let Some(if_unmodified_since) = if_unmodified_since { req_builder = req_builder.header("If-Unmodified-Since", if_unmodified_since); } if let Some(if_match) = if_match { req_builder = req_builder.header("If-Match", if_match); } if let Some(if_none_match) = if_none_match { req_builder = req_builder.header("If-None-Match", if_none_match); } if let Some(x_ms_if_tags) = x_ms_if_tags { req_builder = req_builder.header("x-ms-if-tags", x_ms_if_tags); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } if let Some(x_ms_tags) = x_ms_tags { req_builder = req_builder.header("x-ms-tags", x_ms_tags); } if let Some(x_ms_immutability_policy_until_date) = x_ms_immutability_policy_until_date { req_builder = req_builder.header("x-ms-immutability-policy-until-date", x_ms_immutability_policy_until_date); } if let Some(x_ms_immutability_policy_mode) = x_ms_immutability_policy_mode { req_builder = req_builder.header("x-ms-immutability-policy-mode", x_ms_immutability_policy_mode); } if let Some(x_ms_legal_hold) = x_ms_legal_hold { req_builder = req_builder.header("x-ms-legal-hold", x_ms_legal_hold.to_string()); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(create::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(create::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::CREATED => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?; Err(create::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod create { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn append_block( operation_config: &crate::OperationConfig, container_name: &str, blob: &str, comp: &str, body: &serde_json::Value, timeout: Option<i64>, content_length: i64, content_md5: Option<&str>, x_ms_content_crc64: Option<&str>, x_ms_lease_id: Option<&str>, x_ms_blob_condition_maxsize: Option<i64>, x_ms_blob_condition_appendpos: Option<i64>, x_ms_encryption_key: Option<&str>, x_ms_encryption_key_sha256: Option<&str>, x_ms_encryption_algorithm: Option<&str>, x_ms_encryption_scope: Option<&str>, if_modified_since: Option<&str>, if_unmodified_since: Option<&str>, if_match: Option<&str>, if_none_match: Option<&str>, x_ms_if_tags: Option<&str>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, ) -> std::result::Result<(), append_block::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/{}/{}?comp=appendblock", operation_config.base_path(), container_name, blob); let mut url = url::Url::parse(url_str).map_err(append_block::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(append_block::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("comp", comp); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(body).map_err(append_block::Error::SerializeError)?; if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } req_builder = req_builder.header("Content-Length", content_length); if let Some(content_md5) = content_md5 { req_builder = req_builder.header("Content-MD5", content_md5); } if let Some(x_ms_content_crc64) = x_ms_content_crc64 { req_builder = req_builder.header("x-ms-content-crc64", x_ms_content_crc64); } if let Some(x_ms_lease_id) = x_ms_lease_id { req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id); } if let Some(x_ms_blob_condition_maxsize) = x_ms_blob_condition_maxsize { req_builder = req_builder.header("x-ms-blob-condition-maxsize", x_ms_blob_condition_maxsize); } if let Some(x_ms_blob_condition_appendpos) = x_ms_blob_condition_appendpos { req_builder = req_builder.header("x-ms-blob-condition-appendpos", x_ms_blob_condition_appendpos); } if let Some(x_ms_encryption_key) = x_ms_encryption_key { req_builder = req_builder.header("x-ms-encryption-key", x_ms_encryption_key); } if let Some(x_ms_encryption_key_sha256) = x_ms_encryption_key_sha256 { req_builder = req_builder.header("x-ms-encryption-key-sha256", x_ms_encryption_key_sha256); } if let Some(x_ms_encryption_algorithm) = x_ms_encryption_algorithm { req_builder = req_builder.header("x-ms-encryption-algorithm", x_ms_encryption_algorithm); } if let Some(x_ms_encryption_scope) = x_ms_encryption_scope { req_builder = req_builder.header("x-ms-encryption-scope", x_ms_encryption_scope); } if let Some(if_modified_since) = if_modified_since { req_builder = req_builder.header("If-Modified-Since", if_modified_since); } if let Some(if_unmodified_since) = if_unmodified_since { req_builder = req_builder.header("If-Unmodified-Since", if_unmodified_since); } if let Some(if_match) = if_match { req_builder = req_builder.header("If-Match", if_match); } if let Some(if_none_match) = if_none_match { req_builder = req_builder.header("If-None-Match", if_none_match); } if let Some(x_ms_if_tags) = x_ms_if_tags { req_builder = req_builder.header("x-ms-if-tags", x_ms_if_tags); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(append_block::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(append_block::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::CREATED => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body).map_err(|source| append_block::Error::DeserializeError(source, rsp_body.clone()))?; Err(append_block::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod append_block { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn append_block_from_url( operation_config: &crate::OperationConfig, container_name: &str, blob: &str, comp: &str, x_ms_copy_source: &str, x_ms_source_range: Option<&str>, x_ms_source_content_md5: Option<&str>, x_ms_source_content_crc64: Option<&str>, timeout: Option<i64>, content_length: i64, content_md5: Option<&str>, x_ms_encryption_key: Option<&str>, x_ms_encryption_key_sha256: Option<&str>, x_ms_encryption_algorithm: Option<&str>, x_ms_encryption_scope: Option<&str>, x_ms_lease_id: Option<&str>, x_ms_blob_condition_maxsize: Option<i64>, x_ms_blob_condition_appendpos: Option<i64>, if_modified_since: Option<&str>, if_unmodified_since: Option<&str>, if_match: Option<&str>, if_none_match: Option<&str>, x_ms_if_tags: Option<&str>, x_ms_source_if_modified_since: Option<&str>, x_ms_source_if_unmodified_since: Option<&str>, x_ms_source_if_match: Option<&str>, x_ms_source_if_none_match: Option<&str>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, x_ms_copy_source_authorization: Option<&str>, ) -> std::result::Result<(), append_block_from_url::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/{}/{}?comp=appendblock&fromUrl", operation_config.base_path(), container_name, blob ); let mut url = url::Url::parse(url_str).map_err(append_block_from_url::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(append_block_from_url::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("comp", comp); req_builder = req_builder.header("x-ms-copy-source", x_ms_copy_source); if let Some(x_ms_source_range) = x_ms_source_range { req_builder = req_builder.header("x-ms-source-range", x_ms_source_range); } if let Some(x_ms_source_content_md5) = x_ms_source_content_md5 { req_builder = req_builder.header("x-ms-source-content-md5", x_ms_source_content_md5); } if let Some(x_ms_source_content_crc64) = x_ms_source_content_crc64 { req_builder = req_builder.header("x-ms-source-content-crc64", x_ms_source_content_crc64); } if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } req_builder = req_builder.header("Content-Length", content_length); if let Some(content_md5) = content_md5 { req_builder = req_builder.header("Content-MD5", content_md5); } if let Some(x_ms_encryption_key) = x_ms_encryption_key { req_builder = req_builder.header("x-ms-encryption-key", x_ms_encryption_key); } if let Some(x_ms_encryption_key_sha256) = x_ms_encryption_key_sha256 { req_builder = req_builder.header("x-ms-encryption-key-sha256", x_ms_encryption_key_sha256); } if let Some(x_ms_encryption_algorithm) = x_ms_encryption_algorithm { req_builder = req_builder.header("x-ms-encryption-algorithm", x_ms_encryption_algorithm); } if let Some(x_ms_encryption_scope) = x_ms_encryption_scope { req_builder = req_builder.header("x-ms-encryption-scope", x_ms_encryption_scope); } if let Some(x_ms_lease_id) = x_ms_lease_id { req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id); } if let Some(x_ms_blob_condition_maxsize) = x_ms_blob_condition_maxsize { req_builder = req_builder.header("x-ms-blob-condition-maxsize", x_ms_blob_condition_maxsize); } if let Some(x_ms_blob_condition_appendpos) = x_ms_blob_condition_appendpos { req_builder = req_builder.header("x-ms-blob-condition-appendpos", x_ms_blob_condition_appendpos); } if let Some(if_modified_since) = if_modified_since { req_builder = req_builder.header("If-Modified-Since", if_modified_since); } if let Some(if_unmodified_since) = if_unmodified_since { req_builder = req_builder.header("If-Unmodified-Since", if_unmodified_since); } if let Some(if_match) = if_match { req_builder = req_builder.header("If-Match", if_match); } if let Some(if_none_match) = if_none_match { req_builder = req_builder.header("If-None-Match", if_none_match); } if let Some(x_ms_if_tags) = x_ms_if_tags { req_builder = req_builder.header("x-ms-if-tags", x_ms_if_tags); } if let Some(x_ms_source_if_modified_since) = x_ms_source_if_modified_since { req_builder = req_builder.header("x-ms-source-if-modified-since", x_ms_source_if_modified_since); } if let Some(x_ms_source_if_unmodified_since) = x_ms_source_if_unmodified_since { req_builder = req_builder.header("x-ms-source-if-unmodified-since", x_ms_source_if_unmodified_since); } if let Some(x_ms_source_if_match) = x_ms_source_if_match { req_builder = req_builder.header("x-ms-source-if-match", x_ms_source_if_match); } if let Some(x_ms_source_if_none_match) = x_ms_source_if_none_match { req_builder = req_builder.header("x-ms-source-if-none-match", x_ms_source_if_none_match); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } if let Some(x_ms_copy_source_authorization) = x_ms_copy_source_authorization { req_builder = req_builder.header("x-ms-copy-source-authorization", x_ms_copy_source_authorization); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(append_block_from_url::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(append_block_from_url::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::CREATED => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body) .map_err(|source| append_block_from_url::Error::DeserializeError(source, rsp_body.clone()))?; Err(append_block_from_url::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod append_block_from_url { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn seal( operation_config: &crate::OperationConfig, container_name: &str, blob: &str, comp: &str, timeout: Option<i64>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, x_ms_lease_id: Option<&str>, if_modified_since: Option<&str>, if_unmodified_since: Option<&str>, if_match: Option<&str>, if_none_match: Option<&str>, x_ms_blob_condition_appendpos: Option<i64>, ) -> std::result::Result<(), seal::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/{}/{}?comp=seal", operation_config.base_path(), container_name, blob); let mut url = url::Url::parse(url_str).map_err(seal::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(seal::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("comp", comp); if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } if let Some(x_ms_lease_id) = x_ms_lease_id { req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id); } if let Some(if_modified_since) = if_modified_since { req_builder = req_builder.header("If-Modified-Since", if_modified_since); } if let Some(if_unmodified_since) = if_unmodified_since { req_builder = req_builder.header("If-Unmodified-Since", if_unmodified_since); } if let Some(if_match) = if_match { req_builder = req_builder.header("If-Match", if_match); } if let Some(if_none_match) = if_none_match { req_builder = req_builder.header("If-None-Match", if_none_match); } if let Some(x_ms_blob_condition_appendpos) = x_ms_blob_condition_appendpos { req_builder = req_builder.header("x-ms-blob-condition-appendpos", x_ms_blob_condition_appendpos); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(seal::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(seal::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body).map_err(|source| seal::Error::DeserializeError(source, rsp_body.clone()))?; Err(seal::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod seal { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod block_blob { use super::{models, API_VERSION}; pub async fn upload( operation_config: &crate::OperationConfig, container_name: &str, blob: &str, x_ms_blob_type: &str, body: &serde_json::Value, timeout: Option<i64>, content_md5: Option<&str>, content_length: i64, x_ms_blob_content_type: Option<&str>, x_ms_blob_content_encoding: Option<&str>, x_ms_blob_content_language: Option<&str>, x_ms_blob_content_md5: Option<&str>, x_ms_blob_cache_control: Option<&str>, x_ms_meta: Option<&str>, x_ms_lease_id: Option<&str>, x_ms_blob_content_disposition: Option<&str>, x_ms_encryption_key: Option<&str>, x_ms_encryption_key_sha256: Option<&str>, x_ms_encryption_algorithm: Option<&str>, x_ms_encryption_scope: Option<&str>, x_ms_access_tier: Option<&str>, if_modified_since: Option<&str>, if_unmodified_since: Option<&str>, if_match: Option<&str>, if_none_match: Option<&str>, x_ms_if_tags: Option<&str>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, x_ms_tags: Option<&str>, x_ms_immutability_policy_until_date: Option<&str>, x_ms_immutability_policy_mode: Option<&str>, x_ms_legal_hold: Option<bool>, ) -> std::result::Result<(), upload::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/{}/{}?BlockBlob", operation_config.base_path(), container_name, blob); let mut url = url::Url::parse(url_str).map_err(upload::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(upload::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } req_builder = req_builder.header("x-ms-blob-type", x_ms_blob_type); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(body).map_err(upload::Error::SerializeError)?; if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } if let Some(content_md5) = content_md5 { req_builder = req_builder.header("Content-MD5", content_md5); } req_builder = req_builder.header("Content-Length", content_length); if let Some(x_ms_blob_content_type) = x_ms_blob_content_type { req_builder = req_builder.header("x-ms-blob-content-type", x_ms_blob_content_type); } if let Some(x_ms_blob_content_encoding) = x_ms_blob_content_encoding { req_builder = req_builder.header("x-ms-blob-content-encoding", x_ms_blob_content_encoding); } if let Some(x_ms_blob_content_language) = x_ms_blob_content_language { req_builder = req_builder.header("x-ms-blob-content-language", x_ms_blob_content_language); } if let Some(x_ms_blob_content_md5) = x_ms_blob_content_md5 { req_builder = req_builder.header("x-ms-blob-content-md5", x_ms_blob_content_md5); } if let Some(x_ms_blob_cache_control) = x_ms_blob_cache_control { req_builder = req_builder.header("x-ms-blob-cache-control", x_ms_blob_cache_control); } if let Some(x_ms_meta) = x_ms_meta { req_builder = req_builder.header("x-ms-meta", x_ms_meta); } if let Some(x_ms_lease_id) = x_ms_lease_id { req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id); } if let Some(x_ms_blob_content_disposition) = x_ms_blob_content_disposition { req_builder = req_builder.header("x-ms-blob-content-disposition", x_ms_blob_content_disposition); } if let Some(x_ms_encryption_key) = x_ms_encryption_key { req_builder = req_builder.header("x-ms-encryption-key", x_ms_encryption_key); } if let Some(x_ms_encryption_key_sha256) = x_ms_encryption_key_sha256 { req_builder = req_builder.header("x-ms-encryption-key-sha256", x_ms_encryption_key_sha256); } if let Some(x_ms_encryption_algorithm) = x_ms_encryption_algorithm { req_builder = req_builder.header("x-ms-encryption-algorithm", x_ms_encryption_algorithm); } if let Some(x_ms_encryption_scope) = x_ms_encryption_scope { req_builder = req_builder.header("x-ms-encryption-scope", x_ms_encryption_scope); } if let Some(x_ms_access_tier) = x_ms_access_tier { req_builder = req_builder.header("x-ms-access-tier", x_ms_access_tier); } if let Some(if_modified_since) = if_modified_since { req_builder = req_builder.header("If-Modified-Since", if_modified_since); } if let Some(if_unmodified_since) = if_unmodified_since { req_builder = req_builder.header("If-Unmodified-Since", if_unmodified_since); } if let Some(if_match) = if_match { req_builder = req_builder.header("If-Match", if_match); } if let Some(if_none_match) = if_none_match { req_builder = req_builder.header("If-None-Match", if_none_match); } if let Some(x_ms_if_tags) = x_ms_if_tags { req_builder = req_builder.header("x-ms-if-tags", x_ms_if_tags); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } if let Some(x_ms_tags) = x_ms_tags { req_builder = req_builder.header("x-ms-tags", x_ms_tags); } if let Some(x_ms_immutability_policy_until_date) = x_ms_immutability_policy_until_date { req_builder = req_builder.header("x-ms-immutability-policy-until-date", x_ms_immutability_policy_until_date); } if let Some(x_ms_immutability_policy_mode) = x_ms_immutability_policy_mode { req_builder = req_builder.header("x-ms-immutability-policy-mode", x_ms_immutability_policy_mode); } if let Some(x_ms_legal_hold) = x_ms_legal_hold { req_builder = req_builder.header("x-ms-legal-hold", x_ms_legal_hold.to_string()); } req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(upload::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(upload::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::CREATED => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body).map_err(|source| upload::Error::DeserializeError(source, rsp_body.clone()))?; Err(upload::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod upload { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn put_blob_from_url( operation_config: &crate::OperationConfig, container_name: &str, blob: &str, x_ms_blob_type: &str, timeout: Option<i64>, content_md5: Option<&str>, content_length: i64, x_ms_blob_content_type: Option<&str>, x_ms_blob_content_encoding: Option<&str>, x_ms_blob_content_language: Option<&str>, x_ms_blob_content_md5: Option<&str>, x_ms_blob_cache_control: Option<&str>, x_ms_meta: Option<&str>, x_ms_lease_id: Option<&str>, x_ms_blob_content_disposition: Option<&str>, x_ms_encryption_key: Option<&str>, x_ms_encryption_key_sha256: Option<&str>, x_ms_encryption_algorithm: Option<&str>, x_ms_encryption_scope: Option<&str>, x_ms_access_tier: Option<&str>, if_modified_since: Option<&str>, if_unmodified_since: Option<&str>, if_match: Option<&str>, if_none_match: Option<&str>, x_ms_if_tags: Option<&str>, x_ms_source_if_modified_since: Option<&str>, x_ms_source_if_unmodified_since: Option<&str>, x_ms_source_if_match: Option<&str>, x_ms_source_if_none_match: Option<&str>, x_ms_source_if_tags: Option<&str>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, x_ms_source_content_md5: Option<&str>, x_ms_tags: Option<&str>, x_ms_copy_source: &str, x_ms_copy_source_blob_properties: Option<bool>, x_ms_copy_source_authorization: Option<&str>, ) -> std::result::Result<(), put_blob_from_url::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/{}/{}?BlockBlob&fromUrl", operation_config.base_path(), container_name, blob); let mut url = url::Url::parse(url_str).map_err(put_blob_from_url::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(put_blob_from_url::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } req_builder = req_builder.header("x-ms-blob-type", x_ms_blob_type); if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } if let Some(content_md5) = content_md5 { req_builder = req_builder.header("Content-MD5", content_md5); } req_builder = req_builder.header("Content-Length", content_length); if let Some(x_ms_blob_content_type) = x_ms_blob_content_type { req_builder = req_builder.header("x-ms-blob-content-type", x_ms_blob_content_type); } if let Some(x_ms_blob_content_encoding) = x_ms_blob_content_encoding { req_builder = req_builder.header("x-ms-blob-content-encoding", x_ms_blob_content_encoding); } if let Some(x_ms_blob_content_language) = x_ms_blob_content_language { req_builder = req_builder.header("x-ms-blob-content-language", x_ms_blob_content_language); } if let Some(x_ms_blob_content_md5) = x_ms_blob_content_md5 { req_builder = req_builder.header("x-ms-blob-content-md5", x_ms_blob_content_md5); } if let Some(x_ms_blob_cache_control) = x_ms_blob_cache_control { req_builder = req_builder.header("x-ms-blob-cache-control", x_ms_blob_cache_control); } if let Some(x_ms_meta) = x_ms_meta { req_builder = req_builder.header("x-ms-meta", x_ms_meta); } if let Some(x_ms_lease_id) = x_ms_lease_id { req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id); } if let Some(x_ms_blob_content_disposition) = x_ms_blob_content_disposition { req_builder = req_builder.header("x-ms-blob-content-disposition", x_ms_blob_content_disposition); } if let Some(x_ms_encryption_key) = x_ms_encryption_key { req_builder = req_builder.header("x-ms-encryption-key", x_ms_encryption_key); } if let Some(x_ms_encryption_key_sha256) = x_ms_encryption_key_sha256 { req_builder = req_builder.header("x-ms-encryption-key-sha256", x_ms_encryption_key_sha256); } if let Some(x_ms_encryption_algorithm) = x_ms_encryption_algorithm { req_builder = req_builder.header("x-ms-encryption-algorithm", x_ms_encryption_algorithm); } if let Some(x_ms_encryption_scope) = x_ms_encryption_scope { req_builder = req_builder.header("x-ms-encryption-scope", x_ms_encryption_scope); } if let Some(x_ms_access_tier) = x_ms_access_tier { req_builder = req_builder.header("x-ms-access-tier", x_ms_access_tier); } if let Some(if_modified_since) = if_modified_since { req_builder = req_builder.header("If-Modified-Since", if_modified_since); } if let Some(if_unmodified_since) = if_unmodified_since { req_builder = req_builder.header("If-Unmodified-Since", if_unmodified_since); } if let Some(if_match) = if_match { req_builder = req_builder.header("If-Match", if_match); } if let Some(if_none_match) = if_none_match { req_builder = req_builder.header("If-None-Match", if_none_match); } if let Some(x_ms_if_tags) = x_ms_if_tags { req_builder = req_builder.header("x-ms-if-tags", x_ms_if_tags); } if let Some(x_ms_source_if_modified_since) = x_ms_source_if_modified_since { req_builder = req_builder.header("x-ms-source-if-modified-since", x_ms_source_if_modified_since); } if let Some(x_ms_source_if_unmodified_since) = x_ms_source_if_unmodified_since { req_builder = req_builder.header("x-ms-source-if-unmodified-since", x_ms_source_if_unmodified_since); } if let Some(x_ms_source_if_match) = x_ms_source_if_match { req_builder = req_builder.header("x-ms-source-if-match", x_ms_source_if_match); } if let Some(x_ms_source_if_none_match) = x_ms_source_if_none_match { req_builder = req_builder.header("x-ms-source-if-none-match", x_ms_source_if_none_match); } if let Some(x_ms_source_if_tags) = x_ms_source_if_tags { req_builder = req_builder.header("x-ms-source-if-tags", x_ms_source_if_tags); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } if let Some(x_ms_source_content_md5) = x_ms_source_content_md5 { req_builder = req_builder.header("x-ms-source-content-md5", x_ms_source_content_md5); } if let Some(x_ms_tags) = x_ms_tags { req_builder = req_builder.header("x-ms-tags", x_ms_tags); } req_builder = req_builder.header("x-ms-copy-source", x_ms_copy_source); if let Some(x_ms_copy_source_blob_properties) = x_ms_copy_source_blob_properties { req_builder = req_builder.header("x-ms-copy-source-blob-properties", x_ms_copy_source_blob_properties.to_string()); } if let Some(x_ms_copy_source_authorization) = x_ms_copy_source_authorization { req_builder = req_builder.header("x-ms-copy-source-authorization", x_ms_copy_source_authorization); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(put_blob_from_url::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(put_blob_from_url::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::CREATED => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body) .map_err(|source| put_blob_from_url::Error::DeserializeError(source, rsp_body.clone()))?; Err(put_blob_from_url::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod put_blob_from_url { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn stage_block( operation_config: &crate::OperationConfig, container_name: &str, blob: &str, comp: &str, blockid: &str, content_length: i64, content_md5: Option<&str>, x_ms_content_crc64: Option<&str>, body: &serde_json::Value, timeout: Option<i64>, x_ms_lease_id: Option<&str>, x_ms_encryption_key: Option<&str>, x_ms_encryption_key_sha256: Option<&str>, x_ms_encryption_algorithm: Option<&str>, x_ms_encryption_scope: Option<&str>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, ) -> std::result::Result<(), stage_block::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/{}/{}?comp=block", operation_config.base_path(), container_name, blob); let mut url = url::Url::parse(url_str).map_err(stage_block::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(stage_block::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("comp", comp); url.query_pairs_mut().append_pair("blockid", blockid); req_builder = req_builder.header("Content-Length", content_length); if let Some(content_md5) = content_md5 { req_builder = req_builder.header("Content-MD5", content_md5); } if let Some(x_ms_content_crc64) = x_ms_content_crc64 { req_builder = req_builder.header("x-ms-content-crc64", x_ms_content_crc64); } req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(body).map_err(stage_block::Error::SerializeError)?; if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } if let Some(x_ms_lease_id) = x_ms_lease_id { req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id); } if let Some(x_ms_encryption_key) = x_ms_encryption_key { req_builder = req_builder.header("x-ms-encryption-key", x_ms_encryption_key); } if let Some(x_ms_encryption_key_sha256) = x_ms_encryption_key_sha256 { req_builder = req_builder.header("x-ms-encryption-key-sha256", x_ms_encryption_key_sha256); } if let Some(x_ms_encryption_algorithm) = x_ms_encryption_algorithm { req_builder = req_builder.header("x-ms-encryption-algorithm", x_ms_encryption_algorithm); } if let Some(x_ms_encryption_scope) = x_ms_encryption_scope { req_builder = req_builder.header("x-ms-encryption-scope", x_ms_encryption_scope); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(stage_block::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(stage_block::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::CREATED => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body).map_err(|source| stage_block::Error::DeserializeError(source, rsp_body.clone()))?; Err(stage_block::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod stage_block { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn stage_block_from_url( operation_config: &crate::OperationConfig, container_name: &str, blob: &str, comp: &str, blockid: &str, content_length: i64, x_ms_copy_source: &str, x_ms_source_range: Option<&str>, x_ms_source_content_md5: Option<&str>, x_ms_source_content_crc64: Option<&str>, timeout: Option<i64>, x_ms_encryption_key: Option<&str>, x_ms_encryption_key_sha256: Option<&str>, x_ms_encryption_algorithm: Option<&str>, x_ms_encryption_scope: Option<&str>, x_ms_lease_id: Option<&str>, x_ms_source_if_modified_since: Option<&str>, x_ms_source_if_unmodified_since: Option<&str>, x_ms_source_if_match: Option<&str>, x_ms_source_if_none_match: Option<&str>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, x_ms_copy_source_authorization: Option<&str>, ) -> std::result::Result<(), stage_block_from_url::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/{}/{}?comp=block&fromURL", operation_config.base_path(), container_name, blob); let mut url = url::Url::parse(url_str).map_err(stage_block_from_url::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(stage_block_from_url::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("comp", comp); url.query_pairs_mut().append_pair("blockid", blockid); req_builder = req_builder.header("Content-Length", content_length); req_builder = req_builder.header("x-ms-copy-source", x_ms_copy_source); if let Some(x_ms_source_range) = x_ms_source_range { req_builder = req_builder.header("x-ms-source-range", x_ms_source_range); } if let Some(x_ms_source_content_md5) = x_ms_source_content_md5 { req_builder = req_builder.header("x-ms-source-content-md5", x_ms_source_content_md5); } if let Some(x_ms_source_content_crc64) = x_ms_source_content_crc64 { req_builder = req_builder.header("x-ms-source-content-crc64", x_ms_source_content_crc64); } if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } if let Some(x_ms_encryption_key) = x_ms_encryption_key { req_builder = req_builder.header("x-ms-encryption-key", x_ms_encryption_key); } if let Some(x_ms_encryption_key_sha256) = x_ms_encryption_key_sha256 { req_builder = req_builder.header("x-ms-encryption-key-sha256", x_ms_encryption_key_sha256); } if let Some(x_ms_encryption_algorithm) = x_ms_encryption_algorithm { req_builder = req_builder.header("x-ms-encryption-algorithm", x_ms_encryption_algorithm); } if let Some(x_ms_encryption_scope) = x_ms_encryption_scope { req_builder = req_builder.header("x-ms-encryption-scope", x_ms_encryption_scope); } if let Some(x_ms_lease_id) = x_ms_lease_id { req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id); } if let Some(x_ms_source_if_modified_since) = x_ms_source_if_modified_since { req_builder = req_builder.header("x-ms-source-if-modified-since", x_ms_source_if_modified_since); } if let Some(x_ms_source_if_unmodified_since) = x_ms_source_if_unmodified_since { req_builder = req_builder.header("x-ms-source-if-unmodified-since", x_ms_source_if_unmodified_since); } if let Some(x_ms_source_if_match) = x_ms_source_if_match { req_builder = req_builder.header("x-ms-source-if-match", x_ms_source_if_match); } if let Some(x_ms_source_if_none_match) = x_ms_source_if_none_match { req_builder = req_builder.header("x-ms-source-if-none-match", x_ms_source_if_none_match); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } if let Some(x_ms_copy_source_authorization) = x_ms_copy_source_authorization { req_builder = req_builder.header("x-ms-copy-source-authorization", x_ms_copy_source_authorization); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(stage_block_from_url::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(stage_block_from_url::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::CREATED => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body) .map_err(|source| stage_block_from_url::Error::DeserializeError(source, rsp_body.clone()))?; Err(stage_block_from_url::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod stage_block_from_url { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get_block_list( operation_config: &crate::OperationConfig, container_name: &str, blob: &str, comp: &str, snapshot: Option<&str>, blocklisttype: &str, timeout: Option<i64>, x_ms_lease_id: Option<&str>, x_ms_if_tags: Option<&str>, x_ms_version: &str, x_ms_client_request_id: Option<&str>, ) -> std::result::Result<models::BlockList, get_block_list::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/{}/{}?comp=blocklist", operation_config.base_path(), container_name, blob); let mut url = url::Url::parse(url_str).map_err(get_block_list::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get_block_list::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("comp", comp); if let Some(snapshot) = snapshot { url.query_pairs_mut().append_pair("snapshot", snapshot); } url.query_pairs_mut().append_pair("blocklisttype", blocklisttype); if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } if let Some(x_ms_lease_id) = x_ms_lease_id { req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id); } if let Some(x_ms_if_tags) = x_ms_if_tags { req_builder = req_builder.header("x-ms-if-tags", x_ms_if_tags); } req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get_block_list::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(get_block_list::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::BlockList = serde_json::from_slice(rsp_body).map_err(|source| get_block_list::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body).map_err(|source| get_block_list::Error::DeserializeError(source, rsp_body.clone()))?; Err(get_block_list::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod get_block_list { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn commit_block_list( operation_config: &crate::OperationConfig, container_name: &str, blob: &str, comp: &str, timeout: Option<i64>, x_ms_blob_cache_control: Option<&str>, x_ms_blob_content_type: Option<&str>, x_ms_blob_content_encoding: Option<&str>, x_ms_blob_content_language: Option<&str>, x_ms_blob_content_md5: Option<&str>, content_md5: Option<&str>, x_ms_content_crc64: Option<&str>, x_ms_meta: Option<&str>, x_ms_lease_id: Option<&str>, x_ms_blob_content_disposition: Option<&str>, x_ms_encryption_key: Option<&str>, x_ms_encryption_key_sha256: Option<&str>, x_ms_encryption_algorithm: Option<&str>, x_ms_encryption_scope: Option<&str>, x_ms_access_tier: Option<&str>, if_modified_since: Option<&str>, if_unmodified_since: Option<&str>, if_match: Option<&str>, if_none_match: Option<&str>, x_ms_if_tags: Option<&str>, blocks: &models::BlockLookupList, x_ms_version: &str, x_ms_client_request_id: Option<&str>, x_ms_tags: Option<&str>, x_ms_immutability_policy_until_date: Option<&str>, x_ms_immutability_policy_mode: Option<&str>, x_ms_legal_hold: Option<bool>, ) -> std::result::Result<(), commit_block_list::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/{}/{}?comp=blocklist", operation_config.base_path(), container_name, blob); let mut url = url::Url::parse(url_str).map_err(commit_block_list::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(commit_block_list::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("comp", comp); if let Some(timeout) = timeout { url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str()); } if let Some(x_ms_blob_cache_control) = x_ms_blob_cache_control { req_builder = req_builder.header("x-ms-blob-cache-control", x_ms_blob_cache_control); } if let Some(x_ms_blob_content_type) = x_ms_blob_content_type { req_builder = req_builder.header("x-ms-blob-content-type", x_ms_blob_content_type); } if let Some(x_ms_blob_content_encoding) = x_ms_blob_content_encoding { req_builder = req_builder.header("x-ms-blob-content-encoding", x_ms_blob_content_encoding); } if let Some(x_ms_blob_content_language) = x_ms_blob_content_language { req_builder = req_builder.header("x-ms-blob-content-language", x_ms_blob_content_language); } if let Some(x_ms_blob_content_md5) = x_ms_blob_content_md5 { req_builder = req_builder.header("x-ms-blob-content-md5", x_ms_blob_content_md5); } if let Some(content_md5) = content_md5 { req_builder = req_builder.header("Content-MD5", content_md5); } if let Some(x_ms_content_crc64) = x_ms_content_crc64 { req_builder = req_builder.header("x-ms-content-crc64", x_ms_content_crc64); } if let Some(x_ms_meta) = x_ms_meta { req_builder = req_builder.header("x-ms-meta", x_ms_meta); } if let Some(x_ms_lease_id) = x_ms_lease_id { req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id); } if let Some(x_ms_blob_content_disposition) = x_ms_blob_content_disposition { req_builder = req_builder.header("x-ms-blob-content-disposition", x_ms_blob_content_disposition); } if let Some(x_ms_encryption_key) = x_ms_encryption_key { req_builder = req_builder.header("x-ms-encryption-key", x_ms_encryption_key); } if let Some(x_ms_encryption_key_sha256) = x_ms_encryption_key_sha256 { req_builder = req_builder.header("x-ms-encryption-key-sha256", x_ms_encryption_key_sha256); } if let Some(x_ms_encryption_algorithm) = x_ms_encryption_algorithm { req_builder = req_builder.header("x-ms-encryption-algorithm", x_ms_encryption_algorithm); } if let Some(x_ms_encryption_scope) = x_ms_encryption_scope { req_builder = req_builder.header("x-ms-encryption-scope", x_ms_encryption_scope); } if let Some(x_ms_access_tier) = x_ms_access_tier { req_builder = req_builder.header("x-ms-access-tier", x_ms_access_tier); } if let Some(if_modified_since) = if_modified_since { req_builder = req_builder.header("If-Modified-Since", if_modified_since); } if let Some(if_unmodified_since) = if_unmodified_since { req_builder = req_builder.header("If-Unmodified-Since", if_unmodified_since); } if let Some(if_match) = if_match { req_builder = req_builder.header("If-Match", if_match); } if let Some(if_none_match) = if_none_match { req_builder = req_builder.header("If-None-Match", if_none_match); } if let Some(x_ms_if_tags) = x_ms_if_tags { req_builder = req_builder.header("x-ms-if-tags", x_ms_if_tags); } req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(blocks).map_err(commit_block_list::Error::SerializeError)?; req_builder = req_builder.header("x-ms-version", x_ms_version); if let Some(x_ms_client_request_id) = x_ms_client_request_id { req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id); } if let Some(x_ms_tags) = x_ms_tags { req_builder = req_builder.header("x-ms-tags", x_ms_tags); } if let Some(x_ms_immutability_policy_until_date) = x_ms_immutability_policy_until_date { req_builder = req_builder.header("x-ms-immutability-policy-until-date", x_ms_immutability_policy_until_date); } if let Some(x_ms_immutability_policy_mode) = x_ms_immutability_policy_mode { req_builder = req_builder.header("x-ms-immutability-policy-mode", x_ms_immutability_policy_mode); } if let Some(x_ms_legal_hold) = x_ms_legal_hold { req_builder = req_builder.header("x-ms-legal-hold", x_ms_legal_hold.to_string()); } req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(commit_block_list::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(commit_block_list::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::CREATED => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: models::StorageError = serde_json::from_slice(rsp_body) .map_err(|source| commit_block_list::Error::DeserializeError(source, rsp_body.clone()))?; Err(commit_block_list::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod commit_block_list { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::StorageError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } }
use std::sync::Arc; use rosu_v2::prelude::{Score, User}; use twilight_model::channel::Message; use crate::{embeds::RecentListEmbed, BotResult, Context}; use super::{Pages, Pagination}; pub struct RecentListPagination { ctx: Arc<Context>, msg: Message, pages: Pages, user: User, scores: Vec<Score>, } impl RecentListPagination { pub fn new(msg: Message, user: User, scores: Vec<Score>, ctx: Arc<Context>) -> Self { Self { msg, user, pages: Pages::new(10, scores.len()), scores, ctx, } } } #[async_trait] impl Pagination for RecentListPagination { type PageData = RecentListEmbed; fn msg(&self) -> &Message { &self.msg } fn pages(&self) -> Pages { self.pages } fn pages_mut(&mut self) -> &mut Pages { &mut self.pages } fn single_step(&self) -> usize { self.pages.per_page } async fn final_processing(mut self, ctx: &Context) -> BotResult<()> { // Set maps on garbage collection list if unranked for map in self.scores.iter().filter_map(|s| s.map.as_ref()) { ctx.map_garbage_collector(map).execute(ctx); } Ok(()) } async fn build_page(&mut self) -> BotResult<Self::PageData> { let scores = self.scores.iter().skip(self.pages.index).take(10); RecentListEmbed::new( &self.user, scores, &self.ctx, (self.page(), self.pages.total_pages), ) .await } }
use crate::client::mix_traffic::MixMessage; use crate::client::topology_control::TopologyAccessor; use crate::client::InputMessage; use futures::channel::mpsc; use futures::task::{Context, Poll}; use futures::{Future, Stream, StreamExt}; use log::{error, info, trace, warn}; use sphinx::route::Destination; use std::pin::Pin; use std::time::Duration; use tokio::runtime::Handle; use tokio::task::JoinHandle; use tokio::time; use topology::NymTopology; pub(crate) struct OutQueueControl<T: NymTopology> { average_packet_delay: Duration, average_message_sending_delay: Duration, next_delay: time::Delay, mix_tx: mpsc::UnboundedSender<MixMessage>, input_rx: mpsc::UnboundedReceiver<InputMessage>, our_info: Destination, topology_access: TopologyAccessor<T>, } pub(crate) enum StreamMessage { Cover, Real(InputMessage), } impl<T: NymTopology> Stream for OutQueueControl<T> { type Item = StreamMessage; fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> { // it is not yet time to return a message if Pin::new(&mut self.next_delay).poll(cx).is_pending() { return Poll::Pending; }; // we know it's time to send a message, so let's prepare delay for the next one // Get the `now` by looking at the current `delay` deadline let now = self.next_delay.deadline(); let next_poisson_delay = mix_client::poisson::sample(self.average_message_sending_delay); // The next interval value is `next_poisson_delay` after the one that just // yielded. let next = now + next_poisson_delay; self.next_delay.reset(next); // decide what kind of message to send match Pin::new(&mut self.input_rx).poll_next(cx) { // in the case our real message channel stream was closed, we should also indicate we are closed // (and whoever is using the stream should panic) Poll::Ready(None) => Poll::Ready(None), // if there's an actual message - return it Poll::Ready(Some(real_message)) => Poll::Ready(Some(StreamMessage::Real(real_message))), // otherwise construct a dummy one Poll::Pending => Poll::Ready(Some(StreamMessage::Cover)), } } } impl<T: 'static + NymTopology> OutQueueControl<T> { pub(crate) fn new( mix_tx: mpsc::UnboundedSender<MixMessage>, input_rx: mpsc::UnboundedReceiver<InputMessage>, our_info: Destination, topology_access: TopologyAccessor<T>, average_packet_delay: Duration, average_message_sending_delay: Duration, ) -> Self { OutQueueControl { average_packet_delay, average_message_sending_delay, next_delay: time::delay_for(Default::default()), mix_tx, input_rx, our_info, topology_access, } } async fn on_message(&mut self, next_message: StreamMessage) { trace!("created new message"); let route = match self.topology_access.random_route().await { None => { warn!("No valid topology detected - won't send any real or loop message this time"); // TODO: this creates a potential problem: we can lose real messages if we were // unable to get topology, perhaps we should store them in some buffer? return; } Some(route) => route, }; let next_packet = match next_message { StreamMessage::Cover => mix_client::packet::loop_cover_message_route( self.our_info.address.clone(), self.our_info.identifier, route, self.average_packet_delay, ), StreamMessage::Real(real_message) => mix_client::packet::encapsulate_message_route( real_message.0, real_message.1, route, self.average_packet_delay, ), }; let next_packet = match next_packet { Ok(message) => message, Err(err) => { error!( "Somehow we managed to create an invalid traffic message - {:?}", err ); return; } }; // if this one fails, there's no retrying because it means that either: // - we run out of memory // - the receiver channel is closed // in either case there's no recovery and we can only panic self.mix_tx .unbounded_send(MixMessage::new(next_packet.0, next_packet.1)) .unwrap(); } pub(crate) async fn run_out_queue_control(mut self) { // we should set initial delay only when we actually start the stream self.next_delay = time::delay_for(mix_client::poisson::sample( self.average_message_sending_delay, )); info!("starting out queue controller"); while let Some(next_message) = self.next().await { self.on_message(next_message).await; } } pub(crate) fn start(self, handle: &Handle) -> JoinHandle<()> { handle.spawn(async move { self.run_out_queue_control().await }) } }
extern crate regex; use std::env; use std::net::IpAddr; use std::str::FromStr; #[derive(Debug)] pub enum RequestMethod { Invalid, Get, Post, } impl Default for RequestMethod { fn default() -> RequestMethod { RequestMethod::Invalid } } #[derive(Default, Debug)] pub struct Request { pub remote_address: String, pub method: RequestMethod, pub host: String, pub ip: String, pub debug: bool, } impl Request { pub fn parse() -> Result<Request, &'static str> { /***** Retrieve CGI variables from the environment *****/ let remote_addr = match env::var("REMOTE_ADDR") { Ok(val) => val, Err(_err) => return Err("Failed to retrieve REMOTE_ADDR"), }; let request_method = match env::var("REQUEST_METHOD") { Ok(val) => match val.as_str() { "GET" => RequestMethod::Get, "POST" => RequestMethod::Post, &_ => RequestMethod::Invalid, }, Err(_err) => return Err("Failed to retrieve REQUEST_METHOD"), }; let query_str = match env::var("QUERY_STRING") { Ok(val) => val, Err(_err) => return Err("Failed to retrieve QUERY_STRING"), }; Ok(Request { remote_address: remote_addr, method: request_method, host: parse_host_from_query_string(&query_str), ip: parse_ip_from_query_string(&query_str), debug: query_str.contains("debug=true"), }) } } fn parse_host_from_query_string(query_str: &str) -> String { // DNS label has a limit of 63 characters let host_re = regex::Regex::new(r"host=(\w{1,63})").unwrap(); let host_str = host_re.captures(&query_str).map_or("", |caps| { caps.get(1).map_or("", |cap| cap.as_str()) }); String::from(host_str) } fn parse_ip_from_query_string(query_str: &str) -> String { // IP address could be IPv4 or IPv6 let ip_re = regex::Regex::new(r"ip=([0-9a-fA-F.:]{1,45})").unwrap(); let ip_str = ip_re.captures(&query_str).map_or("", |caps| { caps.get(1).map_or("", |cap| { // Try to create an IpAddr object to validate the value match IpAddr::from_str(cap.as_str()) { Ok(_val) => cap.as_str(), Err(_error) => "", } }) }); String::from(ip_str) } #[cfg(test)] mod tests { use super::*; #[test] fn valid_host() { assert_eq!("test", parse_host_from_query_string("host=test&ip=192.168.100.1&extraparam=garbage")); } #[test] fn valid_ipv4() { assert_eq!("192.168.100.1", parse_ip_from_query_string("host=test&ip=192.168.100.1&extraparam=garbage")); } #[test] fn valid_ipv6() { assert_eq!("fc00::1", parse_ip_from_query_string("host=test&ip=fc00::1&extraparam=garbage")); } #[test] fn invalid_host() { assert_ne!("test123.invalid", parse_host_from_query_string("host=test123.invalid")); } #[test] fn invalid_ipv4() { assert_eq!("", parse_ip_from_query_string("ip=999.168.100.1")); } #[test] fn invalid_ipv6() { assert_eq!("", parse_ip_from_query_string("ip=fc00:1.1.1.1:1")); } #[test] fn no_host() { assert_eq!("", parse_host_from_query_string("ip=1.1.1.1")); } #[test] fn no_ip() { assert_eq!("", parse_ip_from_query_string("host=test")); } }
use std::fs; fn parse(contents: &String) -> Vec<i64> { contents.lines() .map(|x| x.parse::<i64>().unwrap()) .collect() } fn is_ok(f: i64, arr: &[i64]) -> bool { for i in arr.iter() { for j in arr.iter() { if i + j == f { return true; } } } false } fn analyze(preamble: usize, numbers: &Vec<i64>) -> i64 { for w in numbers.windows(preamble + 1) { let my = &w[preamble]; let rest = &w[..preamble]; if !is_ok(*my, rest) { return *my; } } unreachable!(); } fn find_cont_sum(numbers: &Vec<i64>, target: i64) -> i64 { for window_size in 2..numbers.len() { for window in numbers.windows(window_size) { if window.iter().sum::<i64>() == target { return window.iter().max().unwrap() + window.iter().min().unwrap(); } } } unreachable!(); } fn main() { let contents = fs::read_to_string("input.txt") .expect("error loading file"); let stream = parse(&contents); let result1 = analyze(25, &stream); println!("result 1 = {}", &result1); let result2 = find_cont_sum(&stream, result1); println!("result 2 = {}", &result2); }
//! Collect ISBNs from across the data sources. use std::collections::HashMap; use std::fmt::Debug; use std::fs::read_to_string; use serde::Deserialize; use toml; use friendly::bytes; use crate::ids::collector::KeyCollector; use crate::prelude::Result; use crate::prelude::*; use polars::prelude::*; /// Collect ISBNs from across the data sources. #[derive(Args, Debug)] #[command(name = "collect-isbns")] pub struct CollectISBNs { /// Path to the output file (in Parquet format) #[arg(short = 'o', long = "output")] out_file: PathBuf, /// path to the ISBN source definition file (in TOML format) #[arg(name = "DEFS")] source_file: PathBuf, } #[derive(Deserialize, Debug)] #[serde(untagged)] enum MultiSource { Single(ISBNSource), Multi(Vec<ISBNSource>), } #[derive(Deserialize, Debug)] struct ISBNSource { path: String, #[serde(default)] column: Option<String>, } /// The type of ISBN source set specifications. type SourceSet = HashMap<String, MultiSource>; impl MultiSource { fn to_list(&self) -> Vec<&ISBNSource> { match self { MultiSource::Single(s) => vec![&s], MultiSource::Multi(ss) => ss.iter().map(|s| s).collect(), } } } /// Read a single ISBN source into the accumulator. fn read_source(kc: &mut KeyCollector, name: &str, src: &ISBNSource) -> Result<()> { let mut acc = kc.accum(name); let id_col = src.column.as_deref().unwrap_or("isbn"); info!("reading ISBNs from {} (column {})", src.path, id_col); let df = if src.path.ends_with(".csv") { LazyCsvReader::new(src.path.to_string()) .has_header(true) .finish()? } else { LazyFrame::scan_parquet(src.path.to_string(), Default::default())? }; let df = df.select(&[col(id_col)]); let df = df.drop_nulls(None); let df = df.collect()?; let isbns = df.column(id_col)?.utf8()?; info!("adding {} ISBNs", isbns.len()); acc.add_keys(isbns.into_iter().flatten()); Ok(()) } impl Command for CollectISBNs { fn exec(&self) -> Result<()> { info!("reading spec from {}", self.source_file.display()); let spec = read_to_string(&self.source_file)?; let spec: SourceSet = toml::de::from_str(&spec)?; let mut kc = KeyCollector::new(); for (name, ms) in spec { for source in ms.to_list() { read_source(&mut kc, &name, source)?; } } info!("saving {} ISBNs to {}", kc.len(), self.out_file.display()); let n = kc.save("isbn", "isbn_id", &self.out_file)?; info!("wrote {} rows in {}", n, bytes(file_size(&self.out_file)?)); Ok(()) } }
use crate::error::UdpError; use crate::state::State; use crate::audio::output::AudioOutputMessage; use futures_util::{SinkExt, StreamExt}; use log::*; use mumble_protocol::crypt::ClientCryptState; use mumble_protocol::voice::VoicePacket; use mumble_protocol::control::msgs::CryptSetup; use std::net::{Ipv6Addr, SocketAddr}; use std::sync::Arc; use std::convert::TryInto; use tokio::net::UdpSocket; use tokio::sync::broadcast; use tokio::sync::{watch, RwLock}; use tokio::time::{timeout, Duration}; use tokio_util::udp::UdpFramed; use tokio::select; pub async fn handle(state: Arc<RwLock<State>>) -> Result<(), UdpError> { let state_lock = state.read().await; let mut server = state_lock.server_recv(); let mut connected = state_lock.connected_recv(); let mut crypt = state_lock.crypt_recv(); drop(state_lock); loop { // wait not being in idle state if server.borrow().is_none() { server.changed().await.unwrap(); continue; } //wait the server connection if !*connected.borrow() { connected.changed().await.unwrap(); continue; } //wait the crypt if crypt.borrow().is_none() { crypt.changed().await.unwrap(); continue; } let udp_framed = connect(&crypt).await?; let addr = match server.borrow().as_ref() { Some(server) => server.addr.clone(), None => continue, //disconnected }; select!( _ = tokio::spawn(handle_socket(Arc::clone(&state), udp_framed, addr)) => {}, _ = tokio::spawn(send_pings(Arc::clone(&state))) => {}, _ = tokio::spawn(check_changed_server(Arc::clone(&state), addr)) => {} ); debug!("Fully disconnected UDP stream"); } } //udp only care if we disconnect or addr changed async fn check_changed_server( state: Arc<RwLock<State>>, addr: SocketAddr, ) { let mut server = state.read().await.server_recv(); while let Ok(()) = server.changed().await { match server.borrow().as_ref() { //unchanged Some(server) if server.addr == addr => {}, //changed _ => break, }; } } async fn connect( crypt_state: &watch::Receiver<Option<Box<CryptSetup>>>, ) -> Result<UdpFramed<ClientCryptState>, UdpError> { // Bind UDP socket let udp_socket = UdpSocket::bind((Ipv6Addr::from(0u128), 0u16)) .await?; // Wait for initial CryptState let crypt_state = new_crypt_state(crypt_state)?; debug!("UDP connected"); // Wrap the raw UDP packets in Mumble's crypto and voice codec (CryptState does both) let udp_framed = UdpFramed::new(udp_socket, crypt_state); Ok(udp_framed) } fn new_crypt_state( crypt: &watch::Receiver<Option<Box<CryptSetup>>>, ) -> Result<ClientCryptState, UdpError> { let key = crypt.borrow().to_owned(); // disconnected before we received the CryptSetup packet, oh well let key = key.ok_or_else(|| UdpError::DisconnectBeforeCryptSetup)?; Ok(ClientCryptState::new_from( key.get_key() .try_into() .expect("Server sent private key with incorrect size"), key.get_client_nonce() .try_into() .expect("Server sent client_nonce with incorrect size"), key.get_server_nonce() .try_into() .expect("Server sent server_nonce with incorrect size"), )) } //TODO break this function in three: send packet, recv packet, crypt async fn handle_socket( state: Arc<RwLock<State>>, mut source: UdpFramed<ClientCryptState>, addr: SocketAddr, ) -> Result<(), UdpError> { let state_lock = state.read().await; let audio_send = state.read().await.get_audio_sink_sender(); let mut udp_audio = state_lock.get_udp_sink_receiver().await.unwrap(); let mut crypt = state_lock.crypt_recv(); drop(state_lock); //carefull not to return without giving udp_audio back let error = loop { select!( packet_net = source.next() => { let (packet, _src_addr) = match packet_net { Some(Ok(packet)) => packet, Some(Err(err)) => { warn!("Got an invalid UDP packet: {}", err); // To be expected, considering this is the internet, just ignore it continue; }, None => { //socket closed break UdpError::GenericError; }, }; match packet { VoicePacket::Ping { timestamp } => { state.read().await.udp_ping_broadcast_send(timestamp); } VoicePacket::Audio { session_id, //target, seq_num, //TODO check packet seq payload, // position_info, .. } => { //TODO verify seq_num audio_send.send(AudioOutputMessage::VoicePacket { user_id: session_id, seq_num, data: payload, }).unwrap(); } } }, packet = udp_audio.recv() => { //safe because there always be a state.udp_audio_send let packet = packet.unwrap(); if let Err(_) = source.send((packet, addr)).await { break UdpError::GenericError; } }, _ = crypt.changed() => { //update crypt on the fly match new_crypt_state(&crypt) { Err(_) => break UdpError::GenericError, Ok(crypt_state) => *source.codec_mut() = crypt_state, } } ); }; state.read().await.set_udp_sink_receiver(udp_audio).await; Err(error) } async fn recv_ping( last_ping: u64, ping_recv: &mut broadcast::Receiver<u64>, ) { loop { match ping_recv.recv().await { //ping received, return Ok(ping) if ping == last_ping => return, //wrong response, older ping received? wait for next Ok(_) => continue, Err(_) => panic!("Udp ping broadcast closed"), } } } async fn send_pings(state: Arc<RwLock<State>>) { let state_lock = state.read().await; let send_packets = state_lock.get_udp_sink_sender(); let mut ping_recv = state_lock.udp_ping_broadcast_receiver(); drop(state_lock); let mut last_send = 0; loop { //send the ping send_packets.send(VoicePacket::Ping { timestamp: last_send }).unwrap(); //check if we receive the packet before the timeout let received = timeout( Duration::from_secs(1), //timeout is 1s recv_ping(last_send, &mut ping_recv), //return if correct ping received ).await; match received { Ok(()) => { //ping received, using UDP state.read().await.set_link_udp(true).await; }, Err(_) => { //timeout, use TCP instead state.read().await.set_link_udp(false).await; } } //change the ping id to avoid overlapping responses last_send = last_send.wrapping_add(1); } }
pub mod codic;
pub const NAME: &'static str = "Audrey fforbes-Hamilton";
//! # 455. 分发饼干 //! https://leetcode-cn.com/problems/assign-cookies/ //! 假设你是一位很棒的家长,想要给你的孩子们一些小饼干。但是,每个孩子最多只能给一块饼干。 //! 对每个孩子 i,都有一个胃口值 g[i],这是能让孩子们满足胃口的饼干的最小尺寸;并且每块饼干 j,都有一个尺寸 s[j] 。 //! 如果 s[j] >= g[i],我们可以将这个饼干 j 分配给孩子 i ,这个孩子会得到满足。你的目标是尽可能满足越多数量的孩子,并输出这个最大数值。 //! # 解题思路 //! 排序后采用贪心规则,最小的饼干满足胃口最小的孩子 pub struct Solution; impl Solution { pub fn find_content_children(mut g: Vec<i32>, mut s: Vec<i32>) -> i32 { g.sort(); s.sort(); let mut cur_cookie = 0; let mut ret = 0; for i in 0..g.len() { for j in cur_cookie..s.len() { cur_cookie = j + 1; if s[j] >= g[i] { ret += 1; break; } } if cur_cookie == s.len() { break; } } ret } } #[cfg(test)] mod tests { #[test] fn it_works() { //解释: //你有三个孩子和两块小饼干,3个孩子的胃口值分别是:1,2,3。 //虽然你有两块小饼干,由于他们的尺寸都是1,你只能让胃口值是1的孩子满足。 //所以你应该输出1。 assert_eq!( super::Solution::find_content_children(vec![1, 2, 3], vec![1, 1]), 1 ); } }
use spair::prelude::*; pub struct Error<'a>(pub &'a spair::FetchError); impl<'a, C: spair::Component> spair::Render<C> for Error<'a> { fn render(self, nodes: spair::Nodes<C>) { nodes.div(|d| { d.nodes() .span(|s| s.nodes().render(&self.0.to_string()).done()); }); } }
use std::{ net::{IpAddr, Ipv6Addr}, usize, }; use serde::{Deserialize, Serialize}; #[derive(Copy, Clone, Debug, Hash, Eq, PartialEq, Serialize, Deserialize)] pub struct Config { pub ip: std::net::IpAddr, pub port: u16, pub max_length: u64, pub db_cache_capacity: u64, } impl Default for Config { fn default() -> Self { Config { ip: IpAddr::V6(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 1)), port: 3999, max_length: 5_000_000, db_cache_capacity: 5_000_000, } } } impl Config { pub async fn load(path: Option<&str>) -> Option<Self> { let file = tokio::fs::read_to_string(path.unwrap_or("config.yaml")).await; if let Ok(str) = file { return Some(serde_yaml::from_str(&str).unwrap()); } None } }
fn main() { let index = 8; for index in 0..4 { print!("{} ", index); } println!(":{}", index); }
extern crate advent_of_code_2017_day_11; use advent_of_code_2017_day_11::*; #[test] fn part_1_example_1() { assert_eq!(solve_puzzle_part_1("ne,ne,ne"), "3"); } #[test] fn part_1_example_2() { assert_eq!(solve_puzzle_part_1("ne,ne,sw,sw"), "0"); } #[test] fn part_1_example_3() { assert_eq!(solve_puzzle_part_1("ne,ne,s,s"), "2"); } #[test] fn part_1_example_4() { assert_eq!(solve_puzzle_part_1("se,sw,se,sw,sw"), "3"); }
//! DMA sink use imxrt_hal::{ dma::{Channel, Circular, Peripheral, WriteHalf}, iomuxc, uart::Tx, }; /// DMA output type Output<M> = Peripheral<Tx<M>, u8, Circular<u8>>; pub enum Sink { _1(Output<iomuxc::consts::U1>), _2(Output<iomuxc::consts::U2>), _3(Output<iomuxc::consts::U3>), _4(Output<iomuxc::consts::U4>), _5(Output<iomuxc::consts::U5>), _6(Output<iomuxc::consts::U6>), _7(Output<iomuxc::consts::U7>), _8(Output<iomuxc::consts::U8>), } impl Sink { pub fn is_transfer_interrupt(&self) -> bool { match self { Sink::_1(periph) => periph.is_transfer_interrupt(), Sink::_2(periph) => periph.is_transfer_interrupt(), Sink::_3(periph) => periph.is_transfer_interrupt(), Sink::_4(periph) => periph.is_transfer_interrupt(), Sink::_5(periph) => periph.is_transfer_interrupt(), Sink::_6(periph) => periph.is_transfer_interrupt(), Sink::_7(periph) => periph.is_transfer_interrupt(), Sink::_8(periph) => periph.is_transfer_interrupt(), } } pub fn transfer_clear_interrupt(&mut self) { match self { Sink::_1(periph) => periph.transfer_clear_interrupt(), Sink::_2(periph) => periph.transfer_clear_interrupt(), Sink::_3(periph) => periph.transfer_clear_interrupt(), Sink::_4(periph) => periph.transfer_clear_interrupt(), Sink::_5(periph) => periph.transfer_clear_interrupt(), Sink::_6(periph) => periph.transfer_clear_interrupt(), Sink::_7(periph) => periph.transfer_clear_interrupt(), Sink::_8(periph) => periph.transfer_clear_interrupt(), } } pub fn is_transfer_complete(&self) -> bool { match self { Sink::_1(periph) => periph.is_transfer_complete(), Sink::_2(periph) => periph.is_transfer_complete(), Sink::_3(periph) => periph.is_transfer_complete(), Sink::_4(periph) => periph.is_transfer_complete(), Sink::_5(periph) => periph.is_transfer_complete(), Sink::_6(periph) => periph.is_transfer_complete(), Sink::_7(periph) => periph.is_transfer_complete(), Sink::_8(periph) => periph.is_transfer_complete(), } } pub fn transfer_complete(&mut self) -> Option<Circular<u8>> { match self { Sink::_1(periph) => periph.transfer_complete(), Sink::_2(periph) => periph.transfer_complete(), Sink::_3(periph) => periph.transfer_complete(), Sink::_4(periph) => periph.transfer_complete(), Sink::_5(periph) => periph.transfer_complete(), Sink::_6(periph) => periph.transfer_complete(), Sink::_7(periph) => periph.transfer_complete(), Sink::_8(periph) => periph.transfer_complete(), } } pub fn start_transfer(&mut self, buffer: Circular<u8>) { match self { Sink::_1(periph) => periph .start_transfer(buffer) .expect("Start transfer UART1 failed"), Sink::_2(periph) => periph .start_transfer(buffer) .expect("Start transfer UART2 failed"), Sink::_3(periph) => periph .start_transfer(buffer) .expect("Start transfer UART3 failed"), Sink::_4(periph) => periph .start_transfer(buffer) .expect("Start transfer UART4 failed"), Sink::_5(periph) => periph .start_transfer(buffer) .expect("Start transfer UART5 failed"), Sink::_6(periph) => periph .start_transfer(buffer) .expect("Start transfer UART6 failed"), Sink::_7(periph) => periph .start_transfer(buffer) .expect("Start transfer UART7 failed"), Sink::_8(periph) => periph .start_transfer(buffer) .expect("Start transfer UART8 failed"), } } pub fn write_half(&mut self) -> Option<WriteHalf<u8>> { match self { Sink::_1(periph) => periph.write_half(), Sink::_2(periph) => periph.write_half(), Sink::_3(periph) => periph.write_half(), Sink::_4(periph) => periph.write_half(), Sink::_5(periph) => periph.write_half(), Sink::_6(periph) => periph.write_half(), Sink::_7(periph) => periph.write_half(), Sink::_8(periph) => periph.write_half(), } } } pub trait IntoSink { fn into_sink(self, channel: Channel) -> Sink; } impl IntoSink for Tx<iomuxc::consts::U1> { fn into_sink(self, channel: Channel) -> Sink { Sink::_1(Peripheral::new_transfer(self, channel)) } } impl IntoSink for Tx<iomuxc::consts::U2> { fn into_sink(self, channel: Channel) -> Sink { Sink::_2(Peripheral::new_transfer(self, channel)) } } impl IntoSink for Tx<iomuxc::consts::U3> { fn into_sink(self, channel: Channel) -> Sink { Sink::_3(Peripheral::new_transfer(self, channel)) } } impl IntoSink for Tx<iomuxc::consts::U4> { fn into_sink(self, channel: Channel) -> Sink { Sink::_4(Peripheral::new_transfer(self, channel)) } } impl IntoSink for Tx<iomuxc::consts::U5> { fn into_sink(self, channel: Channel) -> Sink { Sink::_5(Peripheral::new_transfer(self, channel)) } } impl IntoSink for Tx<iomuxc::consts::U6> { fn into_sink(self, channel: Channel) -> Sink { Sink::_6(Peripheral::new_transfer(self, channel)) } } impl IntoSink for Tx<iomuxc::consts::U7> { fn into_sink(self, channel: Channel) -> Sink { Sink::_7(Peripheral::new_transfer(self, channel)) } } impl IntoSink for Tx<iomuxc::consts::U8> { fn into_sink(self, channel: Channel) -> Sink { Sink::_8(Peripheral::new_transfer(self, channel)) } }
use std::env::args; use std::io; mod one; mod two; mod three; mod four; mod five; mod six; mod seven; mod eight; mod nine; mod ten; fn main() { let args: Vec<_> = args().collect(); if args.len() > 1 { let stdin = io::stdin(); let buf = stdin.lock(); match args[1].as_ref() { "1a" => println!("{}", one::one_a(buf)), "1b" => println!("{}", one::one_b(buf)), "2a" => println!("{}", two::two_a(buf)), "2b" => println!("{}", two::two_b(buf)), "3a" => println!("{}", three::three_a(buf)), "3b" => println!("{}", three::three_b(buf)), "4a" => println!("{}", four::four_a(buf)), "4b" => println!("{}", four::four_b(buf)), "5a" => println!("{}", five::five_a(buf)), "5b" => println!("{}", five::five_b(buf)), "6a" => println!("{}", six::six_a(buf)), "6b" => println!("{}", six::six_b(buf, 10000)), "7a" => println!("{}", seven::seven_a(buf)), "7b" => println!("{}", seven::seven_b(buf, 5, 60)), "8a" => println!("{}", eight::eight_a(buf)), "8b" => println!("{}", eight::eight_b(buf)), "9a" => println!("{}", nine::nine_a(buf)), "9b" => println!("{}", nine::nine_b(buf)), "10" => ten::ten(buf), _ => println!("idk that"), } } }
use std::cmp; use crate::types::{Block, ColumnType}; pub struct ChunkIterator<'a, K: ColumnType> { position: usize, size: usize, block: &'a Block<K>, } impl<'a, K: ColumnType> Iterator for ChunkIterator<'a, K> { type Item = Block; fn next(&mut self) -> Option<Block> { let m = self.block.row_count(); if m == 0 && self.position == 0 { self.position += 1; return Some(Block::default()); } if self.position >= m { return None; } let mut result = Block::new(); let size = cmp::min(self.size, m - self.position); for column in self.block.columns().iter() { let range = self.position..self.position + size; let data = column.slice(range); result = result.column(column.name(), data); } self.position += size; Some(result) } } impl<'a, K: ColumnType> ChunkIterator<'a, K> { pub fn new(size: usize, block: &Block<K>) -> ChunkIterator<K> { ChunkIterator { position: 0, size, block, } } }
#[doc = "Register `PTPSSIR` reader"] pub type R = crate::R<PTPSSIR_SPEC>; #[doc = "Register `PTPSSIR` writer"] pub type W = crate::W<PTPSSIR_SPEC>; #[doc = "Field `STSSI` reader - System time subsecond increment"] pub type STSSI_R = crate::FieldReader; #[doc = "Field `STSSI` writer - System time subsecond increment"] pub type STSSI_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 8, O>; impl R { #[doc = "Bits 0:7 - System time subsecond increment"] #[inline(always)] pub fn stssi(&self) -> STSSI_R { STSSI_R::new((self.bits & 0xff) as u8) } } impl W { #[doc = "Bits 0:7 - System time subsecond increment"] #[inline(always)] #[must_use] pub fn stssi(&mut self) -> STSSI_W<PTPSSIR_SPEC, 0> { STSSI_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "Ethernet PTP subsecond increment register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ptpssir::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ptpssir::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct PTPSSIR_SPEC; impl crate::RegisterSpec for PTPSSIR_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`ptpssir::R`](R) reader structure"] impl crate::Readable for PTPSSIR_SPEC {} #[doc = "`write(|w| ..)` method takes [`ptpssir::W`](W) writer structure"] impl crate::Writable for PTPSSIR_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets PTPSSIR to value 0"] impl crate::Resettable for PTPSSIR_SPEC { const RESET_VALUE: Self::Ux = 0; }
fn main() { // Blocks of code associated with the conditions in if expressions are sometimes called arms, // just like the arms in match expressions let number = 6; if number % 4 == 0 { // the condition in if must be a bool println!("number is divisible by 4"); } else if number % 3 == 0 { println!("number is divisible by 3"); } else if number % 2 == 0 { println!("number is divisible by 2"); } else { println!("number is not divisible by 4, 3, or 2"); } let condition = true; let number = if condition { 5 } else { 6 }; // if is an expression, we can use it on the right side of a let statement // let number = if condition { 5 } else { "six" }; // If the types are mismatched, we’ll get an error. // variables must have a single type. Rust needs to know at compile time what type the number variable is, definitively let mut counter = 0; let result = loop { counter += 1; if counter == 10 { break counter * 2; } }; // uses of a loop is to retry an operation you know might fail, such as checking whether a thread has completed its job // can add the value you want returned after the break expression you use to stop the loop; // that value will be returned out of the loop so you can use it let mut number = 3; while number != 0 { // While a condition holds true, the code runs; otherwise, it exits the loop. println!("{}!", number); number -= 1; } let a = [10, 20, 30, 40, 50]; for element in a.iter() { // we’ve now increased the safety of the code and eliminated the chance of bugs that might result from going beyond the end of the array or not going far enough and missing some items. println!("the value is: {}", element); } for number in (1..4).rev() { // use Range which is a type provided that generates all numbers in sequence starting from one number // and ending before another number and also rev method to reverse the range println!("{}!", number); } }
pub mod client; pub mod parse_links; pub mod parse_media; pub mod parse_url;
use crate::token::{ TokenKind, Token }; /// Lexical Analyzer #[derive(Debug, Clone)] pub struct Lexier { input: String, position: usize, read_position: usize, ch: char, } impl Lexier { pub fn new(input: String) -> Lexier { let mut lexier: Lexier; lexier = Lexier { input: input, position: 0, read_position: 0, ch: ' ', }; lexier.read_char(); lexier } /// Tokenize input string pub fn next_token(&mut self) -> Token { let token: Token; self.skip(); match self.ch { '=' => { if self.peek_char() == '=' { token = Token {kind:TokenKind::Eq, literal: "==".to_string()}; self.read_char(); } else { token = Token {kind: TokenKind::Assign, literal: self.ch.to_string()}; } }, '+' => token = Token {kind: TokenKind::Plus, literal: self.ch.to_string()}, '-' => token = Token {kind: TokenKind::Minus, literal: self.ch.to_string()}, '!' => { if self.peek_char() == '=' { token = Token {kind: TokenKind::NotEq, literal: "!=".to_string()}; self.read_char(); } else { token = Token {kind: TokenKind::Bang, literal: self.ch.to_string()}; } }, '*' => token = Token {kind: TokenKind::Asterisk, literal: self.ch.to_string()}, '/' => token = Token {kind: TokenKind::Slash, literal: self.ch.to_string()}, '<' => token = Token {kind: TokenKind::Lt, literal: self.ch.to_string()}, '>' => token = Token {kind: TokenKind::Gt, literal: self.ch.to_string()}, '(' => token = Token {kind: TokenKind::Lparen, literal: self.ch.to_string()}, ')' => token = Token {kind: TokenKind::Rparen, literal: self.ch.to_string()}, '{' => token = Token {kind: TokenKind::Lbrace, literal: self.ch.to_string()}, '}' => token = Token {kind: TokenKind::Rbrace, literal: self.ch.to_string()}, ',' => token = Token {kind: TokenKind::Comma, literal: self.ch.to_string()}, ';' => token = Token {kind: TokenKind::Semicolon, literal: self.ch.to_string()}, '\0' => token = Token {kind: TokenKind::Eof, literal: "".to_string()}, 'a'...'z' | 'A' ... 'Z' | '_' => { let ident = self.read_identifier(); return self.lookup_ident(&ident) }, '0' ... '9' => return Token {kind: TokenKind::Integer, literal: self.read_integer()}, '"' => token = Token {kind: TokenKind::String, literal: self.read_string()}, '[' => token = Token {kind: TokenKind::Lbracket, literal: self.ch.to_string()}, ']' => token = Token {kind: TokenKind::Rbracket, literal: self.ch.to_string()}, _ => token = Token {kind: TokenKind::Illegal, literal: self.ch.to_string()}, } self.read_char(); token } /// Check whether ident is keywords, and return the suitable token. fn lookup_ident(&mut self, ident: &str) -> Token { match ident { "fn" => Token {kind: TokenKind::Function, literal: ident.to_string()}, "let" => Token {kind: TokenKind::Let, literal: ident.to_string()}, "true" => Token {kind: TokenKind::True, literal: ident.to_string()}, "false" => Token {kind: TokenKind::False, literal: ident.to_string()}, "if" => Token {kind: TokenKind::If, literal: ident.to_string()}, "else" => Token {kind: TokenKind::Else, literal: ident.to_string()}, "return" => Token {kind: TokenKind::Return, literal: ident.to_string()}, _ => Token {kind: TokenKind::Identifier, literal: ident.to_string()} } } /// Increment current position fn read_char(&mut self) { if self.read_position >= self.input.len() { self.ch = '\0'; } else { self.ch = self.input.chars() .skip(self.read_position).next().unwrap() } self.position = self.read_position; self.read_position += 1; } /// Read current character as identifier fn read_identifier(&mut self) -> String { let mut identifier = "".to_string(); while self.ch.is_alphabetic() || self.ch == '_' { identifier.push(self.ch); self.read_char(); } identifier } /// Read current character as integer fn read_integer(&mut self) -> String { let mut integer = "".to_string(); while self.ch.is_digit(10) { integer.push(self.ch); self.read_char(); } integer } /// Read string literal fn read_string(&mut self) -> String { let position = self.position + 1; let mut string = String::new(); self.read_char(); loop { if self.ch == '"' || self.ch == '\0' { break; } string.push(self.ch); self.read_char(); } string } /// Read peek character fn peek_char(&mut self) -> char{ if self.read_position >= self.input.len() { return '\0' } self.input.chars().nth(self.read_position).unwrap() } /// Skip meaningless character (e.x. whitespace) fn skip(&mut self) { if self.ch == ' ' || self.ch == '\t' || self.ch == '\n' || self.ch == '\r' { self.read_char(); } } } #[test] fn test_next_token() { let input = "\ foo;\ bar;\ let five = 5;\ let ten = 10;\ \ let add = fn(x, y) {\ x + y;\ };\ let result = add(five, ten);\ !-/*5 ;\ 5 < 10 > 5;\ if( 5 < 10 ) {\ return true;\ } else {\ return false;\ }\ 10 == 10;\ 10 != 9;\ \"foobar\"\ \"foo bar\"\ \"\"\ [1, 2];\ ".to_string(); let tests = [ Token { kind: TokenKind::Identifier, literal: "foo".to_string() }, Token { kind: TokenKind::Semicolon, literal: ";".to_string() }, Token { kind: TokenKind::Identifier, literal: "bar".to_string() }, Token { kind: TokenKind::Semicolon, literal: ";".to_string() }, Token { kind: TokenKind::Let, literal: "let".to_string() }, Token { kind: TokenKind::Identifier, literal: "five".to_string() }, Token { kind: TokenKind::Assign, literal: "=".to_string() }, Token { kind: TokenKind::Integer, literal: "5".to_string() }, Token { kind: TokenKind::Semicolon, literal: ";".to_string() }, Token { kind: TokenKind::Let, literal: "let".to_string() }, Token { kind: TokenKind::Identifier, literal: "ten".to_string() }, Token { kind: TokenKind::Assign, literal: "=".to_string() }, Token { kind: TokenKind::Integer, literal: "10".to_string() }, Token { kind: TokenKind::Semicolon, literal: ";".to_string() }, Token { kind: TokenKind::Let, literal: "let".to_string() }, Token { kind: TokenKind::Identifier, literal: "add".to_string() }, Token { kind: TokenKind::Assign, literal: "=".to_string() }, Token { kind: TokenKind::Function, literal: "fn".to_string() }, Token { kind: TokenKind::Lparen, literal: "(".to_string() }, Token { kind: TokenKind::Identifier, literal: "x".to_string() }, Token { kind: TokenKind::Comma, literal: ",".to_string() }, Token { kind: TokenKind::Identifier, literal: "y".to_string() }, Token { kind: TokenKind::Rparen, literal: ")".to_string() }, Token { kind: TokenKind::Lbrace, literal: "{".to_string() }, Token { kind: TokenKind::Identifier, literal: "x".to_string() }, Token { kind: TokenKind::Plus, literal: "+".to_string() }, Token { kind: TokenKind::Identifier, literal: "y".to_string() }, Token { kind: TokenKind::Semicolon, literal: ";".to_string() }, Token { kind: TokenKind::Rbrace, literal: "}".to_string() }, Token { kind: TokenKind::Semicolon, literal: ";".to_string() }, Token { kind: TokenKind::Let, literal: "let".to_string() }, Token { kind: TokenKind::Identifier, literal: "result".to_string() }, Token { kind: TokenKind::Assign, literal: "=".to_string() }, Token { kind: TokenKind::Identifier, literal: "add".to_string() }, Token { kind: TokenKind::Lparen, literal: "(".to_string() }, Token { kind: TokenKind::Identifier, literal: "five".to_string() }, Token { kind: TokenKind::Comma, literal: ",".to_string() }, Token { kind: TokenKind::Identifier, literal: "ten".to_string() }, Token { kind: TokenKind::Rparen, literal: ")".to_string() }, Token { kind: TokenKind::Semicolon, literal: ";".to_string() }, Token { kind: TokenKind::Bang, literal: "!".to_string() }, Token { kind: TokenKind::Minus, literal: "-".to_string() }, Token { kind: TokenKind::Slash, literal: "/".to_string() }, Token { kind: TokenKind::Asterisk, literal: "*".to_string() }, Token { kind: TokenKind::Integer, literal: "5".to_string() }, Token { kind: TokenKind::Semicolon, literal: ";".to_string() }, Token { kind: TokenKind::Integer, literal: "5".to_string() }, Token { kind: TokenKind::Lt, literal: "<".to_string() }, Token { kind: TokenKind::Integer, literal: "10".to_string() }, Token { kind: TokenKind::Gt, literal: ">".to_string() }, Token { kind: TokenKind::Integer, literal: "5".to_string() }, Token { kind: TokenKind::Semicolon, literal: ";".to_string() }, Token { kind: TokenKind::If, literal: "if".to_string() }, Token { kind: TokenKind::Lparen, literal: "(".to_string() }, Token { kind: TokenKind::Integer, literal: "5".to_string() }, Token { kind: TokenKind::Lt, literal: "<".to_string() }, Token { kind: TokenKind::Integer, literal: "10".to_string() }, Token { kind: TokenKind::Rparen, literal: ")".to_string() }, Token { kind: TokenKind::Lbrace, literal: "{".to_string() }, Token { kind: TokenKind::Return, literal: "return".to_string() }, Token { kind: TokenKind::True, literal: "true".to_string() }, Token { kind: TokenKind::Semicolon, literal: ";".to_string() }, Token { kind: TokenKind::Rbrace, literal: "}".to_string() }, Token { kind: TokenKind::Else, literal: "else".to_string() }, Token { kind: TokenKind::Lbrace, literal: "{".to_string() }, Token { kind: TokenKind::Return, literal: "return".to_string() }, Token { kind: TokenKind::False, literal: "false".to_string() }, Token { kind: TokenKind::Semicolon, literal: ";".to_string() }, Token { kind: TokenKind::Rbrace, literal: "}".to_string() }, Token { kind: TokenKind::Integer, literal: "10".to_string() }, Token { kind: TokenKind::Eq, literal: "==".to_string() }, Token { kind: TokenKind::Integer, literal: "10".to_string() }, Token { kind: TokenKind::Semicolon, literal: ";".to_string() }, Token { kind: TokenKind::Integer, literal: "10".to_string() }, Token { kind: TokenKind::NotEq, literal: "!=".to_string() }, Token { kind: TokenKind::Integer, literal: "9".to_string() }, Token { kind: TokenKind::Semicolon, literal: ";".to_string() }, Token { kind: TokenKind::String, literal: "foobar".to_string() }, Token { kind: TokenKind::String, literal: "foo bar".to_string() }, Token { kind: TokenKind::String, literal: "".to_string() }, Token { kind: TokenKind::Lbracket, literal: "[".to_string() }, Token { kind: TokenKind::Integer, literal: "1".to_string() }, Token { kind: TokenKind::Comma, literal: ",".to_string() }, Token { kind: TokenKind::Integer, literal: "2".to_string() }, Token { kind: TokenKind::Rbracket, literal: "]".to_string() }, Token { kind: TokenKind::Semicolon, literal: ";".to_string() }, Token { kind: TokenKind::Eof, literal: "".to_string() } ]; let mut lexier = Lexier::new(input); let mut token: Token; for test in tests.iter() { token = lexier.next_token(); //println!("input:\t{{ kind: {}, literal: {} }}", token.get_kind_literal(), token.literal); //println!("test:\t{{ kind: {}, literal: {} }}", test.get_kind_literal(), test.literal); if token.get_kind_literal() != test.get_kind_literal() { panic!("token.kind not {}. got={}", test.get_kind_literal(), token.get_kind_literal()); } if token.literal != test.literal { panic!("token.literal not {}. got={}", test.literal, token.literal); } } }
#[macro_use] extern crate serde_derive; #[macro_use] extern crate gumdrop; extern crate cert_machine; extern crate openssl; mod arg_parser; mod config_parser; mod kubernetes_certs; use config_parser::User; use config_parser::Instance; use std::collections::HashMap; use std::io::Write; use std::os::unix::fs::symlink; use std::process::exit; use std::fs::OpenOptions; use std::fs; use kubernetes_certs::gen_cert; use kubernetes_certs::CertType; use kubernetes_certs::gen_main_ca_cert; use cert_machine::Bundle; use kubernetes_certs::gen_ca_cert; use kubernetes_certs::write_bundle_to_file; use arg_parser::{CommandOptions, Command}; use config_parser::Config; use gumdrop::Options; pub struct CA { pub main_ca: Box<Bundle>, pub etcd_ca: Box<Bundle>, pub front_ca: Box<Bundle>, } impl CA { fn read_from_fs(dir: &str) -> CA { let main_ca_dir = format!("{}/CA/root", &dir); let etcd_ca_dir = format!("{}/CA/etcd", &dir); let front_ca_dir = format!("{}/CA/front-proxy", &dir); CA { main_ca: Bundle::read_from_fs(&main_ca_dir, "ca").unwrap(), etcd_ca: Bundle::read_from_fs(&etcd_ca_dir, "ca").unwrap(), front_ca: Bundle::read_from_fs(&front_ca_dir, "ca").unwrap(), } } } fn create_ca(config: &Config) -> Result<CA, &'static str> { println!("Creating CA with name: {}", config.cluster_name); let main_ca = match gen_main_ca_cert(&config) { Ok(bundle) => { let outdir = format!("{}/CA/root", &config.out_dir); let index_filename = format!("{}/index", &outdir); let mut file = OpenOptions::new().write(true) .create_new(true) .open(&index_filename) .unwrap(); let sn: u32 = 0; file.write_all(sn.to_string().as_bytes()).unwrap(); write_bundle_to_file(&bundle, &outdir, "ca", config.overwrite).unwrap(); bundle }, Err(error) => return Err(error), }; println!("Create CA: etcd"); let etcd_ca = match gen_ca_cert("etcd", Some(&main_ca), &config) { Ok(bundle) => { let outdir = format!("{}/CA/etcd", &config.out_dir); let index_filename = format!("{}/index", &outdir); let mut file = OpenOptions::new().write(true) .create_new(true) .open(&index_filename) .unwrap(); let sn: u32 = 0; file.write_all(sn.to_string().as_bytes()).unwrap(); write_bundle_to_file(&bundle, &outdir,"ca", config.overwrite).unwrap(); bundle }, Err(error) => return Err(error), }; println!("Create CA: front proxy"); let front_ca = match gen_ca_cert("front-proxy-ca", Some(&main_ca), &config) { Ok(bundle) => { let outdir = format!("{}/CA/front-proxy", &config.out_dir); let index_filename = format!("{}/index", &outdir); let mut file = OpenOptions::new().write(true) .create_new(true) .open(&index_filename) .unwrap(); let sn: u32 = 0; file.write_all(sn.to_string().as_bytes()).unwrap(); write_bundle_to_file(&bundle, &outdir, "ca", config.overwrite).unwrap(); bundle }, Err(error) => return Err(error), }; let root_ca_crt_symlink = format!("{}/master/ca.crt", &config.out_dir); let root_ca_key_symlink = format!("{}/master/ca.key", &config.out_dir); let etcd_ca_crt_symlink = format!("{}/master/etcd-ca.crt", &config.out_dir); let front_ca_crt_symlink = format!("{}/master/front-proxy-ca.crt", &config.out_dir); let front_ca_key_symlink = format!("{}/master/front-proxy-ca.key", &config.out_dir); symlink("../CA/root/certs/ca.crt", &root_ca_crt_symlink).unwrap(); symlink("../CA/root/keys/ca.key", &root_ca_key_symlink).unwrap(); symlink("../CA/etcd/certs/ca.crt", &etcd_ca_crt_symlink).unwrap(); symlink("../CA/front-proxy/certs/ca.crt", &front_ca_crt_symlink).unwrap(); symlink("../CA/front-proxy/keys/ca.key", &front_ca_key_symlink).unwrap(); Ok(CA { main_ca, etcd_ca, front_ca, }) } fn create_symlink(ca_dir: &str, cert_name: &str, dest: &str) { let types = vec![("key", "keys"), ("crt", "certs")]; for postfix in types.iter() { let source_filename = format!("{}/{}/{}.{}", &ca_dir, &postfix.1, &cert_name, &postfix.0); let dest_filename = format!("{}.{}", &dest, &postfix.0); if let Err(_) = symlink(&source_filename, &dest_filename) { match fs::symlink_metadata(&dest_filename) { Ok(ref metadata) => { match metadata.file_type().is_symlink() { true => { fs::remove_file(&dest_filename).unwrap(); symlink(&source_filename, &dest_filename).unwrap(); }, false => { eprintln!("Unable to create symlink. \"{}\" exists and not a symlink!", &dest_filename); exit(1); }, } }, Err(err) => { panic!("Enable to create symlink: {}", err); }, } } } } fn main() { let opts = CommandOptions::parse_args_default_or_exit(); let config_filename = opts.config.unwrap_or("config.toml".to_owned()); let mut config = Config::new(&config_filename); if let Some(opts_outdir) = opts.outdir { config.out_dir = opts_outdir.to_owned(); } match opts.command { Some(Command::New(_)) => { kubernetes_certs::create_directory_struct(&config, &config.out_dir).unwrap(); let ca = match create_ca(&config) { Ok(ca) => ca, Err(err) => { panic!("Error when creating certificate authority: {}", err); }, }; for instance in config.worker.iter() { let mut cert_filename = match instance.filename { Some(ref filename) => filename.to_owned(), None => instance.hostname.clone(), }; let ca_symlink = format!("{}/{}/ca.crt", &config.out_dir, &cert_filename); symlink("../CA/root/certs/ca.crt", &ca_symlink).unwrap(); gen_cert(&ca, &config, &CertType::Kubelet(&instance)).unwrap(); gen_cert(&ca, &config, &CertType::KubeletServer(&instance)).unwrap(); } for instance in config.etcd_server.iter() { let mut cert_filename = match instance.filename { Some(ref filename) => filename.to_owned(), None => instance.hostname.clone(), }; let ca_symlink = format!("{}/{}/etcd-ca.crt", &config.out_dir, &cert_filename); symlink("../CA/etcd/certs/ca.crt", &ca_symlink).unwrap(); gen_cert(&ca, &config, &CertType::EtcdServer(&instance)).unwrap(); } if let Some(ref users) = config.user { for user in users { println!("Creating cert for kubernetes user: {}", &user.username); gen_cert(&ca, &config, &CertType::User(&user)).unwrap(); } } if let Some(ref users) = config.etcd_users { for user in users { println!("Creating cert for etcd user: {}", &user); gen_cert(&ca, &config, &CertType::EtcdUser(&user)).unwrap(); } } kubernetes_certs::kube_certs(&ca, &config, &config.out_dir); }, Some(Command::InitCa(_)) => { match create_ca(&config) { Ok(ca) => ca, Err(err) => { panic!("Error when creating certificate authority: {}", err); }, }; }, Some(Command::GenCert(options)) => { let ca = CA::read_from_fs(&config.out_dir); match options.kind.as_ref() { "admin" => { gen_cert(&ca, &config, &CertType::Admin).unwrap(); () }, "apiserver" => { gen_cert(&ca, &config, &CertType::ApiServer).unwrap(); () }, "apiserver-client" => { gen_cert(&ca, &config, &CertType::ApiServerClient).unwrap(); () }, "apiserver-etcd-client" => { gen_cert(&ca, &config, &CertType::ApiServerEtcdClient).unwrap(); () }, "controller-manager" => { gen_cert(&ca, &config, &CertType::ControllerManager).unwrap(); () }, "scheduler" => { gen_cert(&ca, &config, &CertType::Scheduler).unwrap(); () }, "front-proxy-client" => { gen_cert(&ca, &config, &CertType::FrontProxy).unwrap(); () }, "proxy" => { gen_cert(&ca, &config, &CertType::Proxy).unwrap(); () }, kind if kind.starts_with("kubelet:") => { let hostname = kind.clone().split_at(8); println!("Gen cert for {} node!", hostname.1); let mut instances: HashMap<&str, &Instance> = HashMap::new(); for instance in config.worker.iter() { instances.insert(&instance.hostname, &instance); } let instance = match instances.get::<str>(&hostname.1) { Some(instance) => instance, None => { eprintln!("No such kubelet hostname found in config file: {}", &hostname.1); exit(1); }, }; let mut cert_filename = match instance.filename { Some(ref filename) => filename.to_owned(), None => instance.hostname.clone(), }; let node_path = format!("{}/{}", &config.out_dir, &cert_filename); fs::create_dir_all(&node_path).unwrap(); gen_cert(&ca, &config, &CertType::Kubelet(&instance)).unwrap(); gen_cert(&ca, &config, &CertType::KubeletServer(&instance)).unwrap(); () }, kind if kind.starts_with("etcd:") => { let hostname = kind.clone().split_at(5); let mut instances: HashMap<&str, &Instance> = HashMap::new(); for instance in config.etcd_server.iter() { instances.insert(&instance.hostname, &instance); } let instance = match instances.get::<str>(&hostname.1) { Some(instance) => instance, None => { eprintln!("No such etcd server hostname found in config file: \"{}\"", &hostname.1); exit(1); }, }; println!("Gen cert for \"{}\" etcd node!", hostname.1); gen_cert(&ca, &config, &CertType::EtcdServer(&instance)).unwrap(); () }, kind if kind.starts_with("etcd-user:") => { let username = kind.clone().split_at(10); println!("Gen cert for \"{}\" etcd user!", username.1); gen_cert(&ca, &config, &CertType::EtcdUser(&username.1)).unwrap(); () }, _ => println!("No such certificate kind!"), } }, Some(Command::User(options)) => { print!("Create user cert with name: {}", options.user); let ca = CA::read_from_fs(&config.out_dir); match options.group { Some(ref group) => println!(" and group: {}", group), None => print!("\n"), } let user = User { username: options.user, group: options.group, }; gen_cert(&ca, &config, &CertType::User(&user)).unwrap(); }, None => (), } }
use crate::crypto::vss::Vss; use crate::net::SignerID; use crate::signer_node::BidirectionalSharedSecretMap; use crate::signer_node::SharedSecret; use crate::signer_node::SharedSecretMap; use curv::cryptographic_primitives::secret_sharing::feldman_vss::{ ShamirSecretSharing, VerifiableSS, }; use tapyrus::{PrivateKey, PublicKey}; pub mod aggregate; pub mod compute_sig; pub mod create_block_vss; pub mod create_key; pub mod create_node_vss; pub mod sign; pub mod traits; pub fn index_of(private_key: &PrivateKey, public_keys: &Vec<PublicKey>) -> usize { let secp = tapyrus::secp256k1::Secp256k1::new(); let public_key = PublicKey::from_private_key(&secp, private_key); let pos = public_keys .iter() .position(|pk| pk == &public_key) .expect("private_key or public_keys is invalid."); pos + 1 } pub fn vss_to_shared_secret_map( node_vss_vec: &Vec<Vss>, params: &ShamirSecretSharing, ) -> SharedSecretMap { let mut shared_secrets = SharedSecretMap::new(); for node_vss in node_vss_vec { shared_secrets.insert( SignerID { pubkey: node_vss.sender_public_key, }, SharedSecret { vss: VerifiableSS { parameters: params.clone(), commitments: node_vss .positive_commitments .iter() .map(|c| c.to_point()) .collect(), }, secret_share: node_vss.positive_secret, }, ); } shared_secrets } pub fn vss_to_bidirectional_shared_secret_map( block_vss_vec: &Vec<Vss>, params: &ShamirSecretSharing, ) -> BidirectionalSharedSecretMap { let mut shared_block_secrets = BidirectionalSharedSecretMap::new(); for vss in block_vss_vec.iter() { shared_block_secrets.insert( SignerID { pubkey: vss.sender_public_key, }, ( SharedSecret { secret_share: vss.positive_secret, vss: VerifiableSS { parameters: params.clone(), commitments: vss .positive_commitments .iter() .map(|c| c.to_point()) .collect(), }, }, SharedSecret { secret_share: vss.negative_secret, vss: VerifiableSS { parameters: params.clone(), commitments: vss .negative_commitments .iter() .map(|c| c.to_point()) .collect(), }, }, ), ); } shared_block_secrets } #[cfg(test)] mod tests { use super::*; use std::str::FromStr; use tapyrus::{PrivateKey, PublicKey}; #[test] fn test_index_of() { let private_key = PrivateKey::from_wif("KwUKaA3KgtRhCsioNWZQfC6Nd6vPNTXwgmqcStewZ3KdonmP3k43").unwrap(); let public_keys = vec![ "023092e0fad6f42a2f456f5a891d8ea868393ca4308fef0f29388a1c5687d5860e", "03abdc3e2d64fb3e9ceeaf7d0f272c14c36793dfb862018c34a6ac5dfe0c02860e", "03d10d42715a8c7e6c93fac9336bcb5b286e827e766594e4d166b4894b805236a7", ] .iter() .map(|key| PublicKey::from_str(key).unwrap()) .collect(); assert_eq!(index_of(&private_key, &public_keys), 2); } }
use crate::error::{Error, ErrorType}; use crate::json::{general_tokens::*, ParseTokens, StackTokens, JSON}; use crate::scanner::Scanner; // Given the json_document structure where the iterator index is in the // start of a Unicode escape sequence values, i.e. X in \uXXXX, // get the decimal representation of the escape sequence. fn parse_escape_sequence( index_start: usize, scanner: &mut Scanner, ) -> Result<u32, Error> { // The decimal representation of the Unicode escape sequence. let mut uffff: u32 = 0; // Unicode escape sequence is defined // as \uxxxx, where x is ASCII Hex value. for _ in 0..4 { let next = scanner.next(); if next.is_none() { return Err(Error::new( ErrorType::E104, index_start, scanner.current().index + 1, )); } if !scanner.current().character.is_ascii_hexdigit() { // Invalid character found in Unicode escape sequence. return Err(Error::new( ErrorType::E117, index_start, scanner.current().index + 1, )); } let hex = scanner.current().character.to_digit(16).unwrap(); uffff = uffff * 16 + hex } // Return the decimal value of the Unicode escape sequence. Ok(uffff) } fn validate( json_document: &mut JSON, scanner: &mut Scanner, ) -> Result<(usize, String), ()> { // Save the position of the first character. // This will help us set a range that will highlight the whole incorrect value // in case of an error. // // Example: // // ```rust // // Invalid `"unterminated` root value in JSON document. // let text: &str = "\"unterminated"; // let errors = jsonprima::validate(&text); // println!("{:#?}", errors); // => [("E104", 0, 13)] // ``` let index_start = scanner.current().index; // The parsed string value which will be returned by this function. let mut string_value: Vec<char> = Vec::new(); // Parse all characters as string values until quotation mark. loop { scanner.next().ok_or_else(|| { // No more characters to parse. let err = Error::new(ErrorType::E104, index_start, scanner.current().index + 1); json_document.errors.push(err); })?; // End of string reached. We have successfully parse the JSON string. if scanner.current().character == '"' { json_document.last_parsed_token = Some(ParseTokens::String); return Ok((index_start, string_value.into_iter().collect::<String>())); } // In case the character is not closing quotation mark and exists, // validate it based on the rules of RFC 8259. match scanner.current().character { '\\' => { // Start of escape character. // Read the next character to find out more. scanner.next().ok_or_else(|| { // No more characters to parse. let err = Error::new(ErrorType::E104, index_start, scanner.current().index + 1); json_document.errors.push(err); })?; match scanner.current().character { // Valid escape character sequence. '/' => { string_value.push('/'); continue; } '\\' => { string_value.push('\\'); continue; } '"' => { string_value.push('"'); continue; } 'b' => { string_value.push('\x08'); continue; } 'f' => { string_value.push('\x0C'); continue; } 'n' => { string_value.push('\n'); continue; } 'r' => { string_value.push('\r'); continue; } 't' => { string_value.push('\t'); continue; } // Start of Unicode escape sequence. 'u' => { // Unicode escape sequences can form a surrogate pair. // Parse the first escape sequence // and if is invalid we assume that is part of // a surrogate pair and we parse the next one. // If the second escape sequence forms an invalid // surrogate pair then we return with an error. // If the first escape sequence is valid, then we // do not have to parse the second, as it is not a // surrogate pair. let high_surrogate: u32 = parse_escape_sequence(index_start, scanner) .or_else(|err| { json_document.errors.push(err); Err(()) })?; // Check parsed Unicode value. match std::char::from_u32(high_surrogate) { // We successfully parsed the Unicode // escaped sequence, no surrogate pair. Some(val) => { string_value.push(val); continue; } None => { // We couldn't parse the Unicode escape // sequence. This most likely means that // is part of a surrogate pair. // Start parsing the next Unicode escape sequence, // as low surrogate pair. scanner.next().ok_or_else(|| { // Invalid Unicode escape sequence in second surrogate pair. let err = Error::new(ErrorType::E119, index_start, scanner.current().index + 1); json_document.errors.push(err); })?; if scanner.current().character != '\\' { let err = Error::new(ErrorType::E119, index_start, scanner.current().index + 1); json_document.errors.push(err); return Err(()); } scanner.next().ok_or_else(|| { // Invalid Unicode escape sequence in second surrogate pair. let err = Error::new(ErrorType::E119, index_start, scanner.current().index + 1); json_document.errors.push(err); })?; if scanner.current().character != 'u' { let err = Error::new(ErrorType::E119, index_start, scanner.current().index + 1); json_document.errors.push(err); return Err(()); } let low_surrogate: u32 = parse_escape_sequence(index_start, scanner) .or_else(|err| { json_document.errors.push(err); Err(()) })?; // Borrowed from https://stackoverflow.com/a/23920015 let unicode_value = (high_surrogate << 10) + low_surrogate - 0x35f_dc00; match std::char::from_u32(unicode_value) { Some(val) => { string_value.push(val); continue; } None => { // Invalid Unicode character in JSON string. let err = Error::new( ErrorType::E118, index_start, scanner.current().index + 1, ); json_document.errors.push(err); return Err(()); } } } } } HORIZONTAL_TAB | NEW_LINE | CARRIAGE_RETURN => { // Raw use of control characters in JSON string. let err = Error::new(ErrorType::E101, index_start, scanner.current().index + 1); json_document.errors.push(err); return Err(()); } _ => { // Invalid escape character in JSON string. let err = Error::new(ErrorType::E116, index_start, scanner.current().index + 1); json_document.errors.push(err); return Err(()); } } } HORIZONTAL_TAB | NEW_LINE | CARRIAGE_RETURN => { // Raw use of control characters in JSON string. let err = Error::new(ErrorType::E101, index_start, scanner.current().index + 1); json_document.errors.push(err); return Err(()); } // Valid non escape or control character. val => { string_value.push(val); continue; } } } } pub fn validate_string( json_document: &mut JSON, scanner: &mut Scanner, ) -> Result<(), ()> { match &json_document.last_parsed_token { Some(last_parsed_token) => match last_parsed_token { ParseTokens::BeginObject | ParseTokens::ValueSeparator | ParseTokens::NameSeparator | ParseTokens::BeginArray => match json_document.stack.last() { Some(token) => match token { StackTokens::NameSeparator => { json_document.stack.pop(); json_document.object_has_valid_member = true; match validate(json_document, scanner) { Ok(_) => Ok(()), Err(_) => Err(()), } } StackTokens::BeginObject => { json_document.object_has_valid_member = false; match validate(json_document, scanner) { Ok((index_start, val)) => { if json_document .object_member_names .last_mut() .unwrap() .contains(&val) { let last_parsed_index = scanner.current().index; let err = Error::new(ErrorType::E144, index_start, last_parsed_index + 1); json_document.errors.push(err); Err(()) } else { json_document .object_member_names .last_mut() .unwrap() .push(val); Ok(()) } } Err(_) => Err(()), } } StackTokens::BeginArray => match validate(json_document, scanner) { Ok(_) => Ok(()), Err(_) => Err(()), }, }, None => match validate(json_document, scanner) { Ok(_) => Ok(()), Err(_) => Err(()), }, }, // Illegal string after structural token. Expected comma or colon. _ => { let last_parsed_index = scanner.current().index; let err = Error::new(ErrorType::E114, last_parsed_index, last_parsed_index + 1); json_document.errors.push(err); Err(()) } }, None => { json_document.root_value_parsed = true; match validate(json_document, scanner) { Ok(_) => Ok(()), Err(_) => Err(()), } } } }
use std::error; use std::fmt; use std::io; fn read_and_validate(b: &mut io::BufRead) -> Result<PositiveNonzeroInterger, ???> { let mut line = String::new(); b.read_line(&mut line); let num: i64 = line.trim().parse(); let answer = PositiveNonzeroInterger::new(num); answer } fn test_with_str(s: &str) -> Result<PositiveNonzeroInterger, Box<error: Error>> { let mut b = io::BufReader::new(s.as_bytes()); read_and_validate(&mut b) } #[test] fn test_success() { let x = test_with_str("42/n"); assert_eq!(PositiveNonzeroInterger(42),x.unwrap()); } #[test] fn test_not_num() { let x = test_with_str("eleven billion/n"); assert_eq(x.is_err()); }
use crate::prelude::*; use crate::util; use std::cell::RefCell; use std::rc::Rc; use std::sync::{Arc, Mutex}; #[derive(Default, Clone)] pub struct Subject<O, S> { pub(crate) observers: O, pub(crate) subscription: S, } pub type LocalObserver<P> = Rc<RefCell<Vec<P>>>; pub type LocalSubject<'a, Item, Err> = Subject<LocalObserver<Box<dyn Publisher<Item, Err> + 'a>>, LocalSubscription>; type SharedPublishers<Item, Err> = Arc<Mutex<Vec<Box<dyn Publisher<Item, Err> + Send + Sync>>>>; pub type SharedSubject<Item, Err> = Subject<SharedPublishers<Item, Err>, SharedSubscription>; impl<'a, Item, Err> LocalSubject<'a, Item, Err> { pub fn local() -> Self { LocalSubject::local_new() } } impl<P> Subject<LocalObserver<P>, LocalSubscription> { pub fn local_new() -> Self { Subject { observers: Rc::new(RefCell::new(vec![])), subscription: LocalSubscription::default(), } } } impl<Item, Err> SharedSubject<Item, Err> { pub fn shared() -> Self { Subject { observers: Arc::new(Mutex::new(vec![])), subscription: SharedSubscription::default(), } } } impl<Item, Err> IntoShared for SharedSubject<Item, Err> where Item: 'static, Err: 'static, { type Shared = Self; #[inline(always)] fn to_shared(self) -> Self::Shared { self } } impl<'a, Item, Err> IntoShared for LocalSubject<'a, Item, Err> where Item: 'static, Err: 'static, { type Shared = Subject<SharedPublishers<Item, Err>, SharedSubscription>; fn to_shared(self) -> Self::Shared { let Self { observers, subscription, } = self; let observers = util::unwrap_rc_ref_cell( observers, "Cannot convert a `LocalSubscription` to `SharedSubscription` \ when it referenced by other.", ); let observers = if observers.is_empty() { Arc::new(Mutex::new(vec![])) } else { panic!( "Cannot convert a `LocalSubscription` to `SharedSubscription` \ when it subscribed." ) }; let subscription = subscription.to_shared(); Subject { observers, subscription, } } } macro local_subject_raw_subscribe_impl($o: ident,$u: ident) { type Unsub = $u; fn raw_subscribe(mut self, subscriber: Subscriber<$o, $u>) -> Self::Unsub { let subscription = subscriber.subscription.clone(); self.subscription.add(subscription.clone()); self.observers.borrow_mut().push(Box::new(subscriber)); subscription } } impl<'a, Item, Err, O, U> RawSubscribable<Subscriber<O, U>> for LocalSubject<'a, Item, Err> where O: Observer<Item, Err> + 'a, U: SubscriptionLike + Clone + 'static, { local_subject_raw_subscribe_impl!(O, U); } impl<'a, Item, Err, O, U> RawSubscribable<Subscriber<O, U>> for Subject< LocalObserver<Box<dyn for<'r> Publisher<&'r mut Item, Err> + 'a>>, LocalSubscription, > where O: for<'r> Observer<&'r mut Item, Err> + 'a, U: SubscriptionLike + Clone + 'static, { local_subject_raw_subscribe_impl!(O, U); } impl<'a, Item, Err, O, U> RawSubscribable<Subscriber<O, U>> for Subject< LocalObserver<Box<dyn for<'r> Publisher<Item, &'r mut Err> + 'a>>, LocalSubscription, > where O: for<'r> Observer<Item, &'r mut Err> + 'a, U: SubscriptionLike + Clone + 'static, { local_subject_raw_subscribe_impl!(O, U); } impl<'a, Item, Err, O, U> RawSubscribable<Subscriber<O, U>> for Subject< LocalObserver<Box<dyn for<'r> Publisher<&'r mut Item, &'r mut Err> + 'a>>, LocalSubscription, > where O: for<'r> Observer<&'r mut Item, &'r mut Err> + 'a, U: SubscriptionLike + Clone + 'static, { local_subject_raw_subscribe_impl!(O, U); } impl<Item, Err, O, S> RawSubscribable<Subscriber<O, S>> for SharedSubject<Item, Err> where S: IntoShared, O: IntoShared, O::Shared: Observer<Item, Err>, S::Shared: SubscriptionLike + Clone + 'static, { type Unsub = S::Shared; fn raw_subscribe(mut self, subscriber: Subscriber<O, S>) -> Self::Unsub { let subscriber = subscriber.to_shared(); let subscription = subscriber.subscription.clone(); self.subscription.add(subscription.clone()); self.observers.lock().unwrap().push(Box::new(subscriber)); subscription } } impl<O, S> SubscriptionLike for Subject<O, S> where S: SubscriptionLike, { #[inline] fn unsubscribe(&mut self) { self.subscription.unsubscribe() } #[inline] fn is_closed(&self) -> bool { self.subscription.is_closed() } #[inline] fn inner_addr(&self) -> *const () { self.subscription.inner_addr() } } impl<O, S> Fork for Subject<O, S> where Self: Clone, { type Output = Self; fn fork(&self) -> Self::Output { self.clone() } } /// By default, `Subject` can only emit item which implemented `Copy`, /// that because subject is a multicast stream, need pass item to multi /// subscriber. /// But `Copy` bound is not perfect to subject. Like `&mut T` should be emitted /// by `Subject`, because `Subject` just immediately pass it to subscribers and /// never alias it, but `&mut T` is not `Copy`. /// So we introduced `SubjectCopy`, it's only use to support `Item` be passed /// to many subscriber in `Subject`. The drawback is `SubjectCopy` is not auto /// implement for a type when the type is implemented `Copy`. Just use macro /// `impl_subject_copy_for_copy!` to impl `SubjectCopy` for your type after /// implemented `Copy`. /// /// # Example /// ``` /// # use rxrust::prelude::*; /// # use rxrust::subject::impl_subject_copy_for_copy; /// /// #[derive(Clone)] /// struct CustomType; /// /// impl Copy for CustomType {} /// impl_subject_copy_for_copy!(CustomType); /// /// // can pass `&mut CustomType` now. /// let mut ct = CustomType; /// /// let mut subject = Subject::local(); /// subject.next(&mut ct); /// subject.error(()); /// ``` pub trait SubjectCopy { fn copy(&self) -> Self; } /// todo: maybe we can support SubjectCopy like below. /// /// impl<T: Copy> SubjectCopy for T { /// #[inline] /// fn copy(&self) -> Self { self.clone() } /// } /// /// impl<T> SubjectCopy for &mut T { /// #[inline] /// fn copy(&self) -> Self { unsafe { std::mem::transmute_copy(self) } } /// } /// /// but code like these can't compile, so we'll implement `SubjectCopy` for /// primitive types. impl<T> SubjectCopy for &mut T { #[inline] fn copy(&self) -> Self { unsafe { std::mem::transmute_copy(self) } } } pub macro impl_subject_copy_for_copy($t: ty) { impl SubjectCopy for $t { #[inline] fn copy(&self) -> Self { *self } } } mod subject_copy_impls { use super::{impl_subject_copy_for_copy, SubjectCopy}; macro impl_subject_copys($($t:ty )*) { $(impl_subject_copy_for_copy!($t);)* } impl_subject_copys! { usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 f32 f64 bool char } impl_subject_copy_for_copy!(!); impl_subject_copy_for_copy!(()); impl<T: ?Sized> SubjectCopy for *const T { #[inline] fn copy(&self) -> Self { *self } } impl<T: ?Sized> SubjectCopy for *mut T { #[inline] fn copy(&self) -> Self { *self } } impl<T: ?Sized> SubjectCopy for &T { #[inline] fn copy(&self) -> Self { *self } } } impl<Item, Err, T> Observer<Item, Err> for Vec<T> where Item: SubjectCopy, Err: SubjectCopy, T: Publisher<Item, Err>, { fn next(&mut self, value: Item) { self.drain_filter(|subscriber| { subscriber.next(value.copy()); subscriber.is_closed() }); } fn error(&mut self, err: Err) { self.iter_mut().for_each(|subscriber| { subscriber.error(err.copy()); }); self.clear(); } fn complete(&mut self) { self.iter_mut().for_each(|subscriber| { subscriber.complete(); }); self.clear(); } } impl<Item, Err, S, O> Observer<Item, Err> for Subject<O, S> where O: Observer<Item, Err>, S: SubscriptionLike, { fn next(&mut self, value: Item) { if !self.subscription.is_closed() { self.observers.next(value) } } fn error(&mut self, err: Err) { if !self.subscription.is_closed() { self.observers.error(err); self.subscription.unsubscribe(); }; } fn complete(&mut self) { if !self.subscription.is_closed() { self.observers.complete(); self.subscription.unsubscribe(); } } } #[cfg(test)] mod test { use crate::prelude::*; use crate::subject::LocalObserver; #[test] fn emit_ref() { // emit ref let mut subject: LocalSubject<'_, _, ()> = Subject::local(); subject.next(&1); // emit mut ref type MutRefObserver<Item> = Box<dyn for<'r> Publisher<&'r mut Item, ()>>; let mut subject: Subject<LocalObserver<MutRefObserver<i32>>, _> = Subject::local_new(); subject .clone() .subscribe((|_: &mut _| {}) as for<'r> fn(&'r mut i32)); subject.next(&mut 1); } #[test] fn base_data_flow() { let mut i = 0; { let mut broadcast = Subject::local(); broadcast.fork().subscribe(|v| i = v * 2); broadcast.next(1); } assert_eq!(i, 2); } #[test] #[should_panic] fn error() { let mut broadcast = Subject::local(); broadcast .fork() .subscribe_err(|_: i32| {}, |e: _| panic!(e)); broadcast.next(1); broadcast.error(&"should panic!"); } #[test] fn unsubscribe() { let mut i = 0; { let mut subject = Subject::local(); subject.fork().subscribe(|v| i = v).unsubscribe(); subject.next(100); } assert_eq!(i, 0); } #[test] fn fork_and_shared() { let subject = Subject::shared(); subject .fork() .to_shared() .fork() .to_shared() .subscribe(|_: &()| {}); } #[test] fn empty_local_subject_can_convert_to_shared() { use crate::{ops::ObserveOn, scheduler::Schedulers}; use std::sync::{Arc, Mutex}; let value = Arc::new(Mutex::new(0)); let c_v = value.clone(); let mut subject = Subject::local().to_shared(); subject.fork().observe_on(Schedulers::NewThread).subscribe( move |v: i32| { *value.lock().unwrap() = v; }, ); subject.next(100); std::thread::sleep(std::time::Duration::from_millis(1)); assert_eq!(*c_v.lock().unwrap(), 100); } }
use std::cell::{self, RefCell}; use std::collections::HashMap; use std::fmt::{self, Debug}; use std::io::{self, ErrorKind}; use std::net::SocketAddr; use std::rc::Rc; use std::time::{Duration, Instant}; use futures::{Async, Future, Poll, Stream}; use mio::{Ready, SetReadiness}; use tokio_core::reactor::Timeout; use skcp::SharedKcp; #[derive(Clone)] pub struct KcpSessionUpdater { sessions: Rc<RefCell<HashMap<SocketAddr, SharedKcpSession>>>, } impl Debug for KcpSessionUpdater { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let hmap = self.sessions.borrow(); write!(f, "KcpSessionUpdater {{ sessions: {:?} }}", &*hmap) } } impl KcpSessionUpdater { pub fn new() -> KcpSessionUpdater { KcpSessionUpdater { sessions: Rc::new(RefCell::new(HashMap::new())) } } } impl KcpSessionUpdater { fn sessions_mut(&mut self) -> cell::RefMut<HashMap<SocketAddr, SharedKcpSession>> { self.sessions.borrow_mut() } pub fn input_by_addr(&mut self, addr: &SocketAddr, buf: &mut [u8]) -> io::Result<bool> { match self.sessions_mut().get_mut(addr) { None => Ok(false), Some(session) => { session.input(buf)?; Ok(true) } } } pub fn remove_by_addr(&mut self, addr: &SocketAddr) { let mut ses = self.sessions_mut(); ses.remove(addr); } pub fn insert_by_addr(&mut self, addr: SocketAddr, s: SharedKcpSession) { let mut ses = self.sessions_mut(); ses.insert(addr, s); } } pub struct KcpSession { kcp: SharedKcp, timer: Timeout, last_update: Rc<RefCell<Instant>>, readiness: SetReadiness, owner: Option<KcpSessionUpdater>, addr: SocketAddr, close_flag: Rc<RefCell<bool>>, expire_dur: Duration, } impl KcpSession { fn set_last_update(&mut self, t: Instant) { let mut u = self.last_update.borrow_mut(); *u = t; } fn set_readable(&mut self) -> io::Result<()> { self.readiness.set_readiness(Ready::readable()) } } impl KcpSession { fn try_remove_self(&mut self) { if let Some(ref mut u) = self.owner { u.remove_by_addr(&self.addr); } } } #[derive(Clone)] pub struct SharedKcpSession { inner: Rc<RefCell<KcpSession>>, } impl Debug for SharedKcpSession { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "KcpSession {{ .. }}") } } impl SharedKcpSession { pub fn new(kcp: SharedKcp, timer: Timeout, last_update: Rc<RefCell<Instant>>, readiness: SetReadiness, addr: SocketAddr, mut owner: Option<KcpSessionUpdater>, close_flag: Rc<RefCell<bool>>, expire_dur: Duration) -> io::Result<SharedKcpSession> { let inner = KcpSession { kcp: kcp, timer: timer, last_update: last_update, readiness: readiness, owner: owner.clone(), addr: addr, close_flag: close_flag, expire_dur: expire_dur, }; let mut session = SharedKcpSession { inner: Rc::new(RefCell::new(inner)) }; if let Some(ref mut sm) = owner { sm.insert_by_addr(addr, session.clone()); } session.update_kcp()?; Ok(session) } fn borrow<'a>(&'a self) -> cell::Ref<'a, KcpSession> { self.inner.borrow() } fn borrow_mut<'a>(&'a mut self) -> cell::RefMut<'a, KcpSession> { self.inner.borrow_mut() } fn poll_timer(&mut self) -> Poll<(), io::Error> { let mut inner = self.borrow_mut(); inner.timer.poll() } fn elapsed(&self) -> Duration { let inner = self.borrow(); let last_update = inner.last_update.borrow(); last_update.elapsed() } pub fn input(&mut self, buf: &mut [u8]) -> io::Result<()> { let mut inner = self.borrow_mut(); inner.set_last_update(Instant::now()); { let mut kcp = inner.kcp.borrow_mut(); kcp.input(buf)?; } inner.set_readable() } #[inline] fn is_expired(&self) -> bool { let dur = { let inner = self.borrow(); inner.expire_dur }; self.elapsed() >= dur } fn update_kcp(&mut self) -> io::Result<()> { let mut inner = self.borrow_mut(); let curr = ::current(); let now = Instant::now(); let next_dur = { let mut kcp = inner.kcp.borrow_mut(); kcp.update(curr)?; Duration::from_millis(kcp.check(curr) as u64) }; let update = now + next_dur; inner.timer.reset(update); let readable_size = { let kcp = inner.kcp.borrow(); match kcp.peeksize() { Ok(n) => n, Err(ref err) if err.kind() == ErrorKind::WouldBlock => 0, Err(err) => return Err(err), } }; if readable_size > 0 { inner.set_readable()?; } Ok(()) } fn expire_kcp(&mut self) -> io::Result<()> { let mut inner = self.borrow_mut(); { let mut kcp = inner.kcp.borrow_mut(); kcp.flush()?; kcp.expired(); } inner.set_readable()?; inner.try_remove_self(); Ok(()) } fn is_closed(&self) -> bool { let inner = self.borrow(); let cf = inner.close_flag.borrow(); *cf } } impl Stream for SharedKcpSession { type Item = (); type Error = io::Error; fn poll(&mut self) -> Poll<Option<()>, io::Error> { let _ = try_ready!(self.poll_timer()); if !self.is_expired() { self.update_kcp()?; } else { trace!("[SESS] KcpSession {} expired", self.borrow().addr); self.expire_kcp()?; return Ok(Async::Ready(None)); } if self.is_closed() { let inner = self.borrow(); let waitsnd = { let kcp = inner.kcp.borrow(); kcp.waitsnd() }; if waitsnd == 0 { trace!("[SESS] KcpSession {} closed", inner.addr); return Ok(Async::Ready(None)); } } Ok(Async::Ready(Some(()))) } }
use math::*; use num::*; use std::ops::*; use std::cmp::*; use std::fmt::*; pub enum MatrixResult<T> { Done(Matrix<T>), Error, } /// Some common functions for result checking. impl<T> MatrixResult<T> { /// Unwraps the value inside of a MatrixResult. If no value exists, the function panics. pub fn unwrap(self) -> Matrix<T> { match self { MatrixResult::Done(mat) => mat, MatrixResult::Error => panic!() } } /// Checks if a value exists. pub fn is_some(&self) -> bool { if let &MatrixResult::Done(_) = self { true } else { false } } /// Checks if an error occured. pub fn is_none(&self) -> bool { !self.is_some() } } /// println! for MatrixResult impl<T> Display for MatrixResult<T> where T: Display { #[inline] fn fmt(&self, f: &mut Formatter) -> Result { match self { &MatrixResult::Done(ref m) => write!(f, "{}", m), _ => { write!(f, "Error"); Ok(()) }, } } } /// Partial equality for results of matrix operations. Returns true only if both matrices are error matrices or they are indeed equal. impl<T: Copy + PartialEq + Default + Sub + Signed + PartialOrd + Float> PartialEq for MatrixResult<T> { #[inline] fn eq(&self, rhs: &Self) -> bool { match (self, rhs) { (&MatrixResult::Done(ref m1), &MatrixResult::Done(ref m2)) => { m1 == m2 }, (&MatrixResult::Error, &MatrixResult::Error) => true, _ => false } } } /// MatrixResult * &Matrix = MatrixResult impl<'a, T: Copy + Default + AddAssign + Mul<Output=T>> Mul<&'a Matrix<T>> for MatrixResult<T>{ type Output = MatrixResult<T>; #[inline] fn mul(self, rhs: &'a Matrix<T>) -> Self::Output { match self { MatrixResult::Done(mat) => &mat * rhs, _ => MatrixResult::Error, } } } /// MatrixResult * alpha = MatrixResult impl<T: Copy + Default + Mul<Output=T>> Mul<T> for MatrixResult<T>{ type Output = MatrixResult<T>; #[inline] fn mul(self, rhs: T) -> Self::Output { match self { MatrixResult::Done(mat) => MatrixResult::Done(&mat * rhs), _ => MatrixResult::Error, } } } /// &MatrixResult * &MatrixResult = MatrixResult impl<'a, 'b, T: Copy + Default + AddAssign + Mul<Output=T>> Mul<&'a MatrixResult<T>> for &'b MatrixResult<T>{ type Output = MatrixResult<T>; #[inline] fn mul(self, rhs: &'a MatrixResult<T>) -> Self::Output { match (self, rhs) { (&MatrixResult::Done(ref m1), &MatrixResult::Done(ref m2)) => m1 * m2, _ => MatrixResult::Error, } } } /// MatrixResult * &MatrixResult = MatrixResult impl<'a, T: Copy + Default + AddAssign + Mul<Output=T>> Mul<&'a MatrixResult<T>> for MatrixResult<T>{ type Output = MatrixResult<T>; #[inline] fn mul(self, rhs: &'a MatrixResult<T>) -> Self::Output { match (self, rhs) { (MatrixResult::Done(ref m1), &MatrixResult::Done(ref m2)) => m1 * m2, _ => MatrixResult::Error, } } }
{ "id": "4c78decb-7c29-4951-af2e-6a99ea5d7f79", "$type": "NestingPartResource", "Part": { "$type": "ResourcePart", "IntField": "1231", "StringField": "asdads" } }
use std::collections::HashSet; use std::result; use async_trait::async_trait; use regex::{Error, Regex}; use serde::{Deserialize, Serialize}; use serde_with::with_prefix; use tracing::instrument; use crate::config::DataServerConfig; use crate::storage::local::LocalStorage; #[cfg(feature = "s3-storage")] use crate::storage::s3::S3Storage; use crate::storage::{ResolvedId, Storage, TaggedStorageTypes}; use crate::types::Format::{Bam, Bcf, Cram, Vcf}; use crate::types::{Class, Fields, Format, Interval, Query, Response, Result, TaggedTypeAll, Tags}; /// A trait which matches the query id, replacing the match in the substitution text. pub trait IdResolver { /// Resolve the id, returning the substituted string if there is a match. fn resolve_id(&self, query: &Query) -> Option<ResolvedId>; } /// A trait for determining the response from `Storage`. #[async_trait] pub trait ResolveResponse { /// Convert from `LocalStorage`. async fn from_local(local_storage: &LocalStorage, query: &Query) -> Result<Response>; /// Convert from `S3Storage`. #[cfg(feature = "s3-storage")] async fn from_s3_storage(s3_storage: &S3Storage, query: &Query) -> Result<Response>; } /// A trait which uses storage to resolve requests into responses. #[async_trait] pub trait StorageResolver { /// Resolve a request into a response. async fn resolve_request<T: ResolveResponse>( &self, query: &mut Query, ) -> Option<Result<Response>>; } /// Determines whether the query matches for use with the storage. pub trait QueryAllowed { /// Does this query match. fn query_allowed(&self, query: &Query) -> bool; } /// A regex storage is a storage that matches ids using Regex. #[derive(Serialize, Debug, Clone, Deserialize)] #[serde(default)] pub struct Resolver { #[serde(with = "serde_regex")] regex: Regex, // Todo: should match guard be allowed as variables inside the substitution string? substitution_string: String, storage: Storage, allow_guard: AllowGuard, } /// A type which holds a resolved storage and an resolved id. #[derive(Debug)] pub struct ResolvedStorage<T> { resolved_storage: T, resolved_id: ResolvedId, } impl<T> ResolvedStorage<T> { /// Create a new resolved storage. pub fn new(resolved_storage: T, resolved_id: ResolvedId) -> Self { Self { resolved_storage, resolved_id, } } /// Get the resolved storage. pub fn resolved_storage(&self) -> &T { &self.resolved_storage } /// Get the resolved id. pub fn resolved_id(&self) -> &ResolvedId { &self.resolved_id } } impl ResolvedId {} with_prefix!(allow_interval_prefix "allow_interval_"); /// A query guard represents query parameters that can be allowed to storage for a given query. #[derive(Serialize, Clone, Debug, Deserialize, PartialEq, Eq)] #[serde(default)] pub struct AllowGuard { allow_reference_names: ReferenceNames, allow_fields: Fields, allow_tags: Tags, allow_formats: Vec<Format>, allow_classes: Vec<Class>, #[serde(flatten, with = "allow_interval_prefix")] allow_interval: Interval, } /// Reference names that can be matched. #[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Eq)] #[serde(untagged)] pub enum ReferenceNames { Tagged(TaggedTypeAll), List(HashSet<String>), } impl AllowGuard { /// Create a new allow guard. pub fn new( allow_reference_names: ReferenceNames, allow_fields: Fields, allow_tags: Tags, allow_formats: Vec<Format>, allow_classes: Vec<Class>, allow_interval: Interval, ) -> Self { Self { allow_reference_names, allow_fields, allow_tags, allow_formats, allow_classes, allow_interval, } } /// Get allow formats. pub fn allow_formats(&self) -> &[Format] { &self.allow_formats } /// Get allow classes. pub fn allow_classes(&self) -> &[Class] { &self.allow_classes } /// Get allow interval. pub fn allow_interval(&self) -> Interval { self.allow_interval } /// Get allow reference names. pub fn allow_reference_names(&self) -> &ReferenceNames { &self.allow_reference_names } /// Get allow fields. pub fn allow_fields(&self) -> &Fields { &self.allow_fields } /// Get allow tags. pub fn allow_tags(&self) -> &Tags { &self.allow_tags } } impl Default for AllowGuard { fn default() -> Self { Self { allow_formats: vec![Bam, Cram, Vcf, Bcf], allow_classes: vec![Class::Body, Class::Header], allow_interval: Default::default(), allow_reference_names: ReferenceNames::Tagged(TaggedTypeAll::All), allow_fields: Fields::Tagged(TaggedTypeAll::All), allow_tags: Tags::Tagged(TaggedTypeAll::All), } } } impl QueryAllowed for ReferenceNames { fn query_allowed(&self, query: &Query) -> bool { match (self, &query.reference_name()) { (ReferenceNames::Tagged(TaggedTypeAll::All), _) => true, (ReferenceNames::List(reference_names), Some(reference_name)) => { reference_names.contains(*reference_name) } (ReferenceNames::List(_), None) => false, } } } impl QueryAllowed for Fields { fn query_allowed(&self, query: &Query) -> bool { match (self, &query.fields()) { (Fields::Tagged(TaggedTypeAll::All), _) => true, (Fields::List(self_fields), Fields::List(query_fields)) => { self_fields.is_subset(query_fields) } (Fields::List(_), Fields::Tagged(TaggedTypeAll::All)) => false, } } } impl QueryAllowed for Tags { fn query_allowed(&self, query: &Query) -> bool { match (self, &query.tags()) { (Tags::Tagged(TaggedTypeAll::All), _) => true, (Tags::List(self_tags), Tags::List(query_tags)) => self_tags.is_subset(query_tags), (Tags::List(_), Tags::Tagged(TaggedTypeAll::All)) => false, } } } impl QueryAllowed for AllowGuard { fn query_allowed(&self, query: &Query) -> bool { self.allow_formats.contains(&query.format()) && self.allow_classes.contains(&query.class()) && self .allow_interval .contains(query.interval().start().unwrap_or(u32::MIN)) && self .allow_interval .contains(query.interval().end().unwrap_or(u32::MAX)) && self.allow_reference_names.query_allowed(query) && self.allow_fields.query_allowed(query) && self.allow_tags.query_allowed(query) } } impl Default for Resolver { fn default() -> Self { Self::new( Storage::default(), "(data)/(.*)", "$2", AllowGuard::default(), ) .expect("expected valid storage") } } impl Resolver { /// Create a new regex storage. pub fn new( storage: Storage, regex: &str, replacement_string: &str, allow_guard: AllowGuard, ) -> result::Result<Self, Error> { Ok(Self { regex: Regex::new(regex)?, substitution_string: replacement_string.to_string(), storage, allow_guard, }) } /// Set the local resolvers from the data server config. pub fn resolvers_from_data_server_config(&mut self, config: &DataServerConfig) { if let Storage::Tagged(TaggedStorageTypes::Local) = self.storage() { if let Some(local_storage) = config.into() { self.storage = Storage::Local { local_storage }; } } } /// Get the regex. pub fn regex(&self) -> &Regex { &self.regex } /// Get the substitution string. pub fn substitution_string(&self) -> &str { &self.substitution_string } /// Get the query guard. pub fn allow_guard(&self) -> &AllowGuard { &self.allow_guard } /// Get the storage backend. pub fn storage(&self) -> &Storage { &self.storage } /// Get allow formats. pub fn allow_formats(&self) -> &[Format] { self.allow_guard.allow_formats() } /// Get allow classes. pub fn allow_classes(&self) -> &[Class] { self.allow_guard.allow_classes() } /// Get allow interval. pub fn allow_interval(&self) -> Interval { self.allow_guard.allow_interval } /// Get allow reference names. pub fn allow_reference_names(&self) -> &ReferenceNames { &self.allow_guard.allow_reference_names } /// Get allow fields. pub fn allow_fields(&self) -> &Fields { &self.allow_guard.allow_fields } /// Get allow tags. pub fn allow_tags(&self) -> &Tags { &self.allow_guard.allow_tags } } impl IdResolver for Resolver { #[instrument(level = "trace", skip(self), ret)] fn resolve_id(&self, query: &Query) -> Option<ResolvedId> { if self.regex.is_match(query.id()) && self.allow_guard.query_allowed(query) { Some(ResolvedId::new( self .regex .replace(query.id(), &self.substitution_string) .to_string(), )) } else { None } } } #[async_trait] impl StorageResolver for Resolver { async fn resolve_request<T: ResolveResponse>( &self, query: &mut Query, ) -> Option<Result<Response>> { let resolved_id = self.resolve_id(query)?; let _matched_id = query.id().to_string(); query.set_id(resolved_id.into_inner()); if let Some(response) = self.storage().resolve_local_storage::<T>(query).await { return Some(response); } #[cfg(feature = "s3-storage")] if let Some(response) = self .storage() .resolve_s3_storage::<T>(self.regex(), &_matched_id, query) .await { return Some(response); } None } } impl IdResolver for &[Resolver] { #[instrument(level = "trace", skip(self), ret)] fn resolve_id(&self, query: &Query) -> Option<ResolvedId> { self.iter().find_map(|resolver| resolver.resolve_id(query)) } } #[async_trait] impl StorageResolver for &[Resolver] { async fn resolve_request<T: ResolveResponse>( &self, query: &mut Query, ) -> Option<Result<Response>> { for resolver in self.iter() { if let Some(resolved_storage) = resolver.resolve_request::<T>(query).await { return Some(resolved_storage); } } None } } #[cfg(test)] mod tests { use http::uri::Authority; use crate::config::tests::{test_config_from_env, test_config_from_file}; #[cfg(feature = "s3-storage")] use crate::storage::s3::S3Storage; use crate::types::Scheme::Http; use crate::types::Url; use super::*; struct TestResolveResponse; #[async_trait] impl ResolveResponse for TestResolveResponse { async fn from_local(local_storage: &LocalStorage, _: &Query) -> Result<Response> { Ok(Response::new( Bam, vec![Url::new(local_storage.authority().to_string())], )) } #[cfg(feature = "s3-storage")] async fn from_s3_storage(s3_storage: &S3Storage, _: &Query) -> Result<Response> { Ok(Response::new(Bam, vec![Url::new(s3_storage.bucket())])) } } #[tokio::test] async fn resolver_resolve_local_request() { let local_storage = LocalStorage::new( Http, Authority::from_static("127.0.0.1:8080"), "data".to_string(), "/data".to_string(), ); let resolver = Resolver::new( Storage::Local { local_storage }, "id", "$0-test", AllowGuard::default(), ) .unwrap(); assert_eq!( resolver .resolve_request::<TestResolveResponse>(&mut Query::new("id", Bam)) .await .unwrap() .unwrap(), Response::new(Bam, vec![Url::new("127.0.0.1:8080")]) ); } #[cfg(feature = "s3-storage")] #[tokio::test] async fn resolver_resolve_s3_request() { let resolver = Resolver::new( Storage::Tagged(TaggedStorageTypes::S3), "(id)-1", "$1-test", AllowGuard::default(), ) .unwrap(); assert_eq!( resolver .resolve_request::<TestResolveResponse>(&mut Query::new("id-1", Bam)) .await .unwrap() .unwrap(), Response::new(Bam, vec![Url::new("id")]) ); } #[test] fn resolver_resolve_id() { let resolver = Resolver::new(Storage::default(), "id", "$0-test", AllowGuard::default()).unwrap(); assert_eq!( resolver .resolve_id(&Query::new("id", Bam)) .unwrap() .into_inner(), "id-test" ); } #[test] fn resolver_array_resolve_id() { let resolver = vec![ Resolver::new( Storage::default(), "^(id-1)(.*)$", "$1-test-1", AllowGuard::default(), ) .unwrap(), Resolver::new( Storage::default(), "^(id-2)(.*)$", "$1-test-2", AllowGuard::default(), ) .unwrap(), ]; assert_eq!( resolver .as_slice() .resolve_id(&Query::new("id-1", Bam)) .unwrap() .into_inner(), "id-1-test-1" ); assert_eq!( resolver .as_slice() .resolve_id(&Query::new("id-2", Bam)) .unwrap() .into_inner(), "id-2-test-2" ); } #[test] fn config_resolvers_file() { test_config_from_file( r#" [[resolvers]] regex = "regex" "#, |config| { assert_eq!( config.resolvers().first().unwrap().regex().as_str(), "regex" ); }, ); } #[test] fn config_resolvers_guard_file() { test_config_from_file( r#" [[resolvers]] regex = "regex" [resolvers.allow_guard] allow_formats = ["BAM"] "#, |config| { assert_eq!( config.resolvers().first().unwrap().allow_formats(), &vec![Bam] ); }, ); } #[test] fn config_resolvers_env() { test_config_from_env(vec![("HTSGET_RESOLVERS", "[{regex=regex}]")], |config| { assert_eq!( config.resolvers().first().unwrap().regex().as_str(), "regex" ); }); } #[cfg(feature = "s3-storage")] #[test] fn config_resolvers_all_options_env() { test_config_from_env( vec![( "HTSGET_RESOLVERS", "[{ regex=regex, substitution_string=substitution_string, \ storage={ bucket=bucket }, \ allow_guard={ allow_reference_names=[chr1], allow_fields=[QNAME], allow_tags=[RG], \ allow_formats=[BAM], allow_classes=[body], allow_interval_start=100, \ allow_interval_end=1000 } }]", )], |config| { let storage = Storage::S3 { s3_storage: S3Storage::new("bucket".to_string(), None), }; let allow_guard = AllowGuard::new( ReferenceNames::List(HashSet::from_iter(vec!["chr1".to_string()])), Fields::List(HashSet::from_iter(vec!["QNAME".to_string()])), Tags::List(HashSet::from_iter(vec!["RG".to_string()])), vec![Bam], vec![Class::Body], Interval::new(Some(100), Some(1000)), ); let resolver = config.resolvers().first().unwrap(); assert_eq!(resolver.regex().to_string(), "regex"); assert_eq!(resolver.substitution_string(), "substitution_string"); assert_eq!(resolver.storage(), &storage); assert_eq!(resolver.allow_guard(), &allow_guard); }, ); } }
/////////////////////////////////////////////////////////////////////////////// // // Copyright 2018-2019 Airalab <research@aira.life> // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // /////////////////////////////////////////////////////////////////////////////// //! The Robonomics runtime. This can be compiled with `#[no_std]`, ready for Wasm. #![cfg_attr(not(feature = "std"), no_std)] // `construct_runtime!` does a lot of recursion and requires us to increase the limit to 256. #![recursion_limit="256"] pub mod robonomics; #[cfg(feature = "std")] use serde_derive::{Serialize, Deserialize}; #[cfg(feature = "std")] use primitives::bytes; use rstd::prelude::*; use support::construct_runtime; use parity_codec::{Encode, Decode}; use primitives::OpaqueMetadata; use runtime_primitives::{ ApplyResult, AnySignature, generic, create_runtime_str }; use runtime_primitives::transaction_validity::TransactionValidity; use runtime_primitives::traits::{ self, Verify, BlakeTwo256, Block as BlockT, AuthorityIdFor, DigestFor, NumberFor, StaticLookup, CurrencyToVoteHandler }; use grandpa::fg_primitives::{self, ScheduledChange}; use client::{ block_builder::api::{CheckInherentsResult, InherentData, self as block_builder_api}, runtime_api, impl_runtime_apis }; use version::RuntimeVersion; #[cfg(feature = "std")] use version::NativeVersion; // A few exports that help ease life for downstream crates. #[cfg(any(feature = "std", test))] pub use balances::Call as BalancesCall; pub use consensus::Call as ConsensusCall; pub use timestamp::Call as TimestampCall; pub use robonomics::Call as RobonomicsCall; pub use runtime_primitives::{Permill, Perbill}; pub use support::StorageValue; pub use timestamp::BlockPeriod; pub use staking::StakerStatus; pub use system::EventRecord; /// Alias to 512-bit hash when used in the context of a signature on the chain. pub type Signature = AnySignature; /// Some way of identifying an account on the chain. We intentionally make it equivalent /// to the public key of our transaction signing scheme. pub type AccountId = <Signature as Verify>::Signer; /// Alias to 512-bit hash when used in the context of a session signature on the chain. pub type AuthoritySignature = primitives::ed25519::Signature; /// The Ed25519 pub key of an session that belongs to an authority of the chain. This is /// exactly equivalent to what the substrate calls an "authority". pub type AuthorityId = <AuthoritySignature as Verify>::Signer; /// The type for looking up accounts. We don't expect more than 4 billion of them, but you /// never know... pub type AccountIndex = u32; /// Balance of an account. pub type Balance = u128; /// A hash of some data used by the chain. pub type Hash = primitives::H256; /// Index of a block number in the chain. pub type BlockNumber = u64; /// Index of an account's extrinsic in the chain. pub type Nonce = u64; /// Opaque types. These are used by the CLI to instantiate machinery that don't need to know /// the specifics of the runtime. They can then be made to be agnostic over specific formats /// of data like extrinsics, allowing for them to continue syncing the network through upgrades /// to even the core datastructures. pub mod opaque { use super::*; /// Opaque, encoded, unchecked extrinsic. #[derive(PartialEq, Eq, Clone, Default, Encode, Decode)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize, Debug))] pub struct UncheckedExtrinsic(#[cfg_attr(feature = "std", serde(with="bytes"))] pub Vec<u8>); impl traits::Extrinsic for UncheckedExtrinsic { fn is_signed(&self) -> Option<bool> { None } } /// Opaque block header type. pub type Header = generic::Header<BlockNumber, BlakeTwo256, generic::DigestItem<Hash, AuthorityId, AuthoritySignature>>; /// Opaque block type. pub type Block = generic::Block<Header, UncheckedExtrinsic>; /// Opaque block identifier type. pub type BlockId = generic::BlockId<Block>; /// Opaque session key type. pub type SessionKey = AuthorityId; } /// This runtime version. pub const VERSION: RuntimeVersion = RuntimeVersion { spec_name: create_runtime_str!("robonomics"), impl_name: create_runtime_str!("robonomics-node"), authoring_version: 1, spec_version: 22, impl_version: 23, apis: RUNTIME_API_VERSIONS, }; /// The version infromation used to identify this runtime when compiled natively. #[cfg(feature = "std")] pub fn native_version() -> NativeVersion { NativeVersion { runtime_version: VERSION, can_author_with: Default::default(), } } impl system::Trait for Runtime { /// The identifier used to distinguish between accounts. type AccountId = AccountId; /// The lookup mechanism to get account ID from whatever is passed in dispatchers. type Lookup = Indices; /// The index type for storing how many extrinsics an account has signed. type Index = Nonce; /// The index type for blocks. type BlockNumber = BlockNumber; /// The type for hashing blocks and tries. type Hash = Hash; /// The hashing algorithm used. type Hashing = BlakeTwo256; /// The header digest type. type Digest = generic::Digest<Log>; /// The header type. type Header = generic::Header<BlockNumber, BlakeTwo256, Log>; /// The ubiquitous event type. type Event = Event; /// The ubiquitous log type. type Log = Log; /// The ubiquitous origin type. type Origin = Origin; } impl timestamp::Trait for Runtime { /// A timestamp: seconds since the unix epoch. type Moment = u64; type OnTimestampSet = Aura; } impl aura::Trait for Runtime { type HandleReport = aura::StakingSlasher<Runtime>; } impl consensus::Trait for Runtime { /// The identifier we use to refer to authorities. type SessionKey = AuthorityId; // The aura module handles offline-reports internally // rather than using an explicit report system. type InherentOfflineReport = (); /// The ubiquitous log type. type Log = Log; } impl indices::Trait for Runtime { /// The type for recording indexing into the account enumeration. If this ever overflows, /// there will be problems! type AccountIndex = AccountIndex; /// Use the standard means of resolving an index hint from an id. type ResolveHint = indices::SimpleResolveHint<Self::AccountId, Self::AccountIndex>; /// Determine whether an account is dead. type IsDeadAccount = Balances; /// The uniquitous event type. type Event = Event; } impl balances::Trait for Runtime { /// The type for recording an account's balance. type Balance = Balance; /// What to do if an account's free balance gets zeroed. type OnFreeBalanceZero = (Staking, Session); /// What to do if a new account is created. type OnNewAccount = Indices; /// The uniquitous event type. type Event = Event; type TransactionPayment = (); type DustRemoval = (); type TransferPayment = (); } impl session::Trait for Runtime { type ConvertAccountIdToSessionKey = (); type OnSessionChange = (Staking, grandpa::SyncedAuthorities<Runtime>); type Event = Event; } impl staking::Trait for Runtime { type Currency = balances::Module<Self>; type CurrencyToVote = CurrencyToVoteHandler; type OnRewardMinted = (); type Event = Event; type Slash = (); type Reward = (); } impl grandpa::Trait for Runtime { type SessionKey = AuthorityId; type Log = Log; type Event = Event; } impl finality_tracker::Trait for Runtime { type OnFinalizationStalled = grandpa::SyncedAuthorities<Runtime>; } impl sudo::Trait for Runtime { type Proposal = Call; type Event = Event; } impl robonomics::Trait for Runtime { /// Type for recording indexing into the liability enumeration. type LiabilityIndex = u64; /// Native token as processing currency. type Currency = balances::Module<Self>; /// The uniquitous event type. type Event = Event; } construct_runtime!( pub enum Runtime with Log(InternalLog: DigestItem<Hash, AuthorityId, AuthoritySignature>) where Block = Block, NodeBlock = opaque::Block, UncheckedExtrinsic = UncheckedExtrinsic { System: system::{default, Log(ChangesTrieRoot)}, Aura: aura::{Module, Inherent(Timestamp)}, Timestamp: timestamp::{Module, Call, Storage, Config<T>, Inherent}, Consensus: consensus::{Module, Call, Storage, Config<T>, Log(AuthoritiesChange), Inherent}, Indices: indices, Balances: balances, Session: session, Staking: staking::{default, OfflineWorker}, FinalityTracker: finality_tracker::{Module, Call, Inherent}, Grandpa: grandpa::{Module, Call, Storage, Config<T>, Log(), Event<T>}, Sudo: sudo, Robonomics: robonomics::{Module, Call, Storage, Event<T>}, } ); /// The type used as a helper for interpreting the sender of transactions. type Context = system::ChainContext<Runtime>; /// The address format for describing accounts. type Address = <Indices as StaticLookup>::Source; /// Block header type as expected by this runtime. pub type Header = generic::Header<BlockNumber, BlakeTwo256, Log>; /// Block type as expected by this runtime. pub type Block = generic::Block<Header, UncheckedExtrinsic>; /// BlockId type as expected by this runtime. pub type BlockId = generic::BlockId<Block>; /// Unchecked extrinsic type as expected by this runtime. pub type UncheckedExtrinsic = generic::UncheckedMortalCompactExtrinsic<Address, Nonce, Call, Signature>; /// Extrinsic type that has already been checked. pub type CheckedExtrinsic = generic::CheckedExtrinsic<AccountId, Nonce, Call>; /// Executive: handles dispatch to the various modules. pub type Executive = executive::Executive<Runtime, Block, Context, Balances, AllModules>; // Implement our runtime API endpoints. This is just a bunch of proxying. impl_runtime_apis! { impl runtime_api::Core<Block> for Runtime { fn version() -> RuntimeVersion { VERSION } fn execute_block(block: Block) { Executive::execute_block(block) } fn initialize_block(header: &<Block as BlockT>::Header) { Executive::initialize_block(header) } fn authorities() -> Vec<AuthorityIdFor<Block>> { panic!("Deprecated, please use `AuthoritiesApi`.") } } impl runtime_api::Metadata<Block> for Runtime { fn metadata() -> OpaqueMetadata { Runtime::metadata().into() } } impl block_builder_api::BlockBuilder<Block> for Runtime { fn apply_extrinsic(extrinsic: <Block as BlockT>::Extrinsic) -> ApplyResult { Executive::apply_extrinsic(extrinsic) } fn finalize_block() -> <Block as BlockT>::Header { Executive::finalize_block() } fn inherent_extrinsics(data: InherentData) -> Vec<<Block as BlockT>::Extrinsic> { data.create_extrinsics() } fn check_inherents(block: Block, data: InherentData) -> CheckInherentsResult { data.check_extrinsics(&block) } fn random_seed() -> <Block as BlockT>::Hash { System::random_seed() } } impl runtime_api::TaggedTransactionQueue<Block> for Runtime { fn validate_transaction(tx: <Block as BlockT>::Extrinsic) -> TransactionValidity { Executive::validate_transaction(tx) } } impl offchain_primitives::OffchainWorkerApi<Block> for Runtime { fn offchain_worker(number: NumberFor<Block>) { Executive::offchain_worker(number) } } impl fg_primitives::GrandpaApi<Block> for Runtime { fn grandpa_pending_change(digest: &DigestFor<Block>) -> Option<ScheduledChange<NumberFor<Block>>> { for log in digest.logs.iter().filter_map(|l| match l { Log(InternalLog::grandpa(grandpa_signal)) => Some(grandpa_signal), _=> None }) { if let Some(change) = Grandpa::scrape_digest_change(log) { return Some(change); } } None } fn grandpa_forced_change(digest: &DigestFor<Block>) -> Option<(NumberFor<Block>, ScheduledChange<NumberFor<Block>>)> { for log in digest.logs.iter().filter_map(|l| match l { Log(InternalLog::grandpa(grandpa_signal)) => Some(grandpa_signal), _ => None }) { if let Some(change) = Grandpa::scrape_digest_forced_change(log) { return Some(change); } } None } fn grandpa_authorities() -> Vec<(AuthorityId, u64)> { Grandpa::grandpa_authorities() } } impl consensus_aura::AuraApi<Block> for Runtime { fn slot_duration() -> u64 { Aura::slot_duration() } } impl consensus_authorities::AuthoritiesApi<Block> for Runtime { fn authorities() -> Vec<AuthorityIdFor<Block>> { Consensus::authorities() } } }
mod layout; use std::io; use std::time::Duration; pub use std::thread::JoinHandle; use super::{Backend, Key, StdinReader, Terminal}; use crate::util::future::abortable::AbortHandle; pub use crate::modules::{ beatport::tui::Widget as BeatportWidget, bandcamp::tui::Widget as BandcampWidget, slider::tui::Widget as SliderWidget, zippy::tui::Widget as ZippyWidget, music2k::tui::Widget as Music2kWidget, }; pub struct Window { pub aborter: AbortHandle, pub terminal: Terminal<Backend>, pub input: StdinReader, // Widgets: pub beatport_widget: BeatportWidget, pub bandcamp_widget: BandcampWidget, pub slider_widget: SliderWidget, pub zippy_widget: ZippyWidget, pub music2k_widget: Music2kWidget, } impl Window { pub fn run(self) -> JoinHandle<io::Result<()>> { std::thread::Builder ::new() .name( "slizzy ui".to_owned() ) .spawn( move || self.run_loop() ) .expect("failed to spawn thread") } fn update_widgets(&mut self) { self.beatport_widget.update(); self.bandcamp_widget.update(); self.slider_widget.update(); self.zippy_widget.update(); self.music2k_widget.update(); } fn draw(&mut self) -> io::Result<()> { let beatport_widget = self.beatport_widget.renderer(); let bandcamp_widget = self.bandcamp_widget.renderer(); let slider_widget = self.slider_widget.renderer(); let zippy_widget = self.zippy_widget.renderer(); let music2k_widget = self.music2k_widget.renderer(); self.terminal.draw( |mut frame| { let root_layout = layout::Root::new(frame.size()); let metasources_layout = layout::MetaSources::new(root_layout.metasources); let tracksources_layout = layout::TrackSources::new(root_layout.tracksources); { frame.render_widget(beatport_widget, metasources_layout.beatport); frame.render_widget(bandcamp_widget, metasources_layout.bandcamp); } { frame.render_widget(slider_widget, tracksources_layout.slider); frame.render_widget(zippy_widget, tracksources_layout.zippy); frame.render_widget(music2k_widget, tracksources_layout.music2k); } } ) } fn run_loop(mut self) -> io::Result<()> { self.terminal.hide_cursor()?; let mut quit = false; while !quit { for action in self.input.read_action() { match action? { Action::Quit => { self.aborter.abort(); quit = true; }, Action::Key(key) => { log::debug!("user input: {:?}", key); // TODO: forward to active widget }, } }; self.update_widgets(); self.draw()?; std::thread::sleep( Duration::from_millis(300) ); } self.terminal.show_cursor()?; Ok(()) } } impl std::fmt::Debug for Window { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { f.debug_struct("Params") .field("beatport_widget", &self.beatport_widget) .field("slider_widget", &self.slider_widget) .finish() } } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] enum Action { Quit, Key(Key), } impl From<Key> for Action { fn from(key: Key) -> Self { match key { Key::Char('q') => Action::Quit, other => Action::Key(other), } } }
#[derive(Debug)] pub enum Error { NetworkError(reqwest::Error), MalformedResponse(serde_json::error::Error), } impl From<reqwest::Error> for Error { fn from(error: reqwest::Error) -> Self { Error::NetworkError(error) } } impl From<serde_json::error::Error> for Error { fn from(error: serde_json::error::Error) -> Self { Error::MalformedResponse(error) } }
use sudo_test::{Command, Env, User}; use crate::{Result, PASSWORD, USERNAME}; #[test] fn etc_security_limits_rules_apply_according_to_the_target_user() -> Result<()> { let target_user = "ghost"; let original = "2048"; let expected = "1024"; let limits = format!( "{USERNAME} hard locks {original} {target_user} hard locks {expected}" ); let env = Env("") .file("/etc/security/limits.d/50-test.conf", limits) .user(USERNAME) .user(User(target_user).password(PASSWORD).shell("/bin/bash")) .build()?; // this appears to ignore the `limits` rules, perhaps because of docker // in any case, the assertion below and the rule above should be enough to check that the // *target* user's, and not the invoking user's, limits apply when su is involved // let normal_limit = Command::new("bash") // .args(["-c", "ulimit -x"]) // .as_user(USERNAME) // .output(&env)? // .stdout()?; // assert_eq!(original, normal_limit); // check that limits apply even when root is the invoking user let users = ["root", USERNAME]; for invoking_user in users { let su_limit = Command::new("su") .args(["-c", "ulimit -x", target_user]) .stdin(PASSWORD) .as_user(invoking_user) .output(&env)? .stdout()?; assert_eq!(expected, su_limit); } Ok(()) }
use std::io; fn main() { let mut input = String::new(); println!("Input a char sequence to be evaluated..."); match io::stdin().read_line(&mut input) { Ok(_) => { let trimmed_input = input.trim().to_string(); let result = is_palindrome(trimmed_input); println!("Is palindrome? {}", result); } Err(error) => println!("Error: {}", error), } } fn is_palindrome(input: String) -> bool { let length = input.len(); if length <= 1 { return false; } let chars: Vec<char> = input.chars().collect(); let mut i = 0; let mut j = length - 1; while i <= j { if chars[i] != chars[j] { return false; } i = i + 1; j = j - 1; } return true; } #[cfg(test)] mod tests { use super::*; #[test] fn valid_palindrome() { let input = String::from("ana"); let result = is_palindrome(input); println!("{:?}", result); assert_eq!(result, true); } #[test] fn invalid_palindrome() { let input = String::from("abcd"); let result = is_palindrome(input); assert_eq!(result, false); } }
use std::convert::TryInto; use syn::parse::{Parse, ParseStream}; use syn::{Error, Result, Type}; use syn::{LitInt, Token}; use crate::glsl::{Glsl, GlslLine}; use crate::yasl_ident::YaslIdent; use crate::yasl_type::YaslType; #[derive(Debug)] enum LayoutKind { Input, Output, } impl From<&LayoutKind> for Glsl { fn from(kind: &LayoutKind) -> Glsl { Glsl::Expr( match kind { LayoutKind::Input => "in", LayoutKind::Output => "out", } .to_string(), ) } } #[derive(Debug)] pub struct YaslItemLayout { kind: LayoutKind, pos: usize, ident: YaslIdent, ty: YaslType, } impl YaslItemLayout {} impl From<&YaslItemLayout> for Glsl { fn from(item: &YaslItemLayout) -> Glsl { Glsl::Line(GlslLine { span: Some(item.ident.span()), ends_with_semi: true, glsl_string: format!( "layout(location={}) {} {} {}", item.pos.to_string(), Glsl::from(&item.kind), Glsl::from(&item.ty), Glsl::from(&item.ident), ), }) } } syn::custom_keyword!(layout); syn::custom_keyword!(input); syn::custom_keyword!(output); impl Parse for YaslItemLayout { fn parse(ps: ParseStream) -> Result<Self> { let let_token = ps.parse::<layout>()?; let _ = ps.parse::<Token![<]>()?; let kind = if ps.peek(input) { ps.parse::<input>()?; LayoutKind::Input } else if ps.peek(output) { ps.parse::<output>()?; LayoutKind::Output } else { return Err(Error::new(let_token.span, "Expected input or output kind")); }; let _ = ps.parse::<Token![,]>()?; let pos = ps.parse::<LitInt>()?; let pos = pos.base10_parse()?; let _ = ps.parse::<Token![>]>()?; let ident: syn::Ident = ps.parse()?; let _ = ps.parse::<Token![:]>()?; let ty: Type = ps.parse()?; let _ = ps.parse::<Token![;]>()?; Ok(Self { kind, pos, ident: ident.into(), ty: ty.try_into()?, }) } }
#[derive(Debug, Clone, PartialEq)] pub enum DbPrivilege { CanRead, CanWrite, } #[derive(Debug, Clone)] pub struct User { pub username: String, pub password: String, pub privileges: Vec<DbPrivilege>, } impl Default for User { fn default() -> Self { Self { username: String::default(), password: String::default(), privileges: Vec::new(), } } } impl User { pub fn new(username: &str, password: &str, privileges: Vec<DbPrivilege>) -> Self { Self { username: username.to_string(), password: password.to_string(), privileges, } } }
use super::*; use log::debug; use opencv::{core::Mat as OpencvMat, core::Point as OpencvPoint, prelude::MatTrait}; use std::marker::PhantomData; pub mod convert_color; pub mod draw; pub mod filter; #[derive(Debug)] pub struct Mat<ColorSpace> { inner: OpencvMat, n_rows: i32, n_cols: i32, _type: PhantomData<ColorSpace>, } impl<ColorSpace> Mat<ColorSpace> { pub fn new<C: ToOpencvScalar>(rows: i32, cols: i32, cv_type: &CVType, background: C) -> Result<Mat<ColorSpace>> { Ok(Mat::pack(OpencvMat::new_rows_cols_with_default( rows, cols, cv_type.unpack(), background.to_opencv_scalar(), )?)) } pub fn pack(inner: opencv::core::Mat) -> Mat<ColorSpace> { let n_rows = inner.rows(); let n_cols = inner.cols(); Mat { inner, n_rows, n_cols, _type: PhantomData, } } pub fn unpack(&self) -> &opencv::core::Mat { &self.inner } pub fn roi(&self, roi: Rect) -> Result<Mat<ColorSpace>> { Ok(Mat::pack(opencv::core::Mat::roi(&self.inner, roi.unpack())?)) } pub fn lens(&self, points: &[Point; 4]) -> Result<Mat<ColorSpace>> { use opencv::{calib3d::find_homography, imgproc::warp_perspective, types::VectorOfPoint}; let w = points[1].x - points[0].x; let h = points[2].y - points[1].y; let mut dst_corners = VectorOfPoint::with_capacity(4); dst_corners.push(OpencvPoint::new(0, 0)); dst_corners.push(OpencvPoint::new(w, 0)); dst_corners.push(OpencvPoint::new(w, h)); dst_corners.push(OpencvPoint::new(0, h)); let roi_corners_mat = OpencvMat::from_exact_iter(points.iter().map(|p| p.unpack()))?; let dst_corners_mat = OpencvMat::from_exact_iter(dst_corners.iter())?; let hom = find_homography(&roi_corners_mat, &dst_corners_mat, &mut OpencvMat::default()?, 0, 3.)?; let mut warped = OpencvMat::default()?; let size = opencv::core::Size::new(w, h); // https://docs.rs/opencv/0.30.1/opencv/imgproc/fn.warp_perspective.html warp_perspective( &self.inner, &mut warped, &hom, size, opencv::imgproc::INTER_LINEAR, opencv::core::BORDER_CONSTANT, opencv::core::Scalar::default(), )?; Ok(Mat::pack(warped)) } pub fn n_rows(&self) -> i32 { self.n_rows } pub fn n_cols(&self) -> i32 { self.n_cols } pub fn is_empty(&self) -> Result<bool> { Ok(self.inner.empty()?) } pub fn at(&self, p: &Point) -> Result<HSV> { let vec3b = self.inner.at_2d::<opencv::core::Vec3b>(p.y, p.x)?; let hsv = HSV::from(RGB::from(*vec3b)); debug!("color at point: {} is: {}", p, hsv); Ok(hsv) } pub fn at_avg(&self, p: &Point, size: u8) -> Result<HSV> { let size = size as i32; let offset = size / 2; let (x, y) = if p.x - offset > 0 && p.y - offset > 0 { (p.x - offset, p.y - offset) } else { (p.x, p.y) }; let (mut h, mut s, mut v) = (0, 0, 0); for x in x..(x + size) { for y in y..(y + size) { let hsv = self.at(&Point::new(x, y))?; h += hsv.h as i32; s += hsv.s as i32; v += hsv.v as i32; } } let n = size * size; HSV::new((h / n) as u8, (s / n) as u8, (v / n) as u8) } pub fn copy_to(&self, target: &mut Mat<ColorSpace>) -> Result<()> { self.unpack().copy_to(&mut target.inner)?; Ok(()) } } impl<ColorSpace> Clone for Mat<ColorSpace> { fn clone(&self) -> Self { Self { inner: OpencvMat::clone(&self.inner).unwrap(), n_rows: self.n_rows, n_cols: self.n_cols, _type: self._type.clone(), } } } // pub trait FindCenter { // fn find_center(&self) -> Option<Point>; // } // impl FindCenter for Mat<Gray> where { // fn find_center(&self) -> Option<Point> { // let moments = moments(&self.inner, true); // if moments.m00 > 0.0 { // let center = Point { // x: (moments.m10 / moments.m00) as i32, // y: (moments.m01 / moments.m00) as i32, // }; // Some(center) // } else { // None // } // } // } pub trait InRange { fn in_range(&self, range: &HSVRange) -> Mat<Gray>; } impl InRange for Mat<HSV> { fn in_range(&self, range: &HSVRange) -> Mat<Gray> { let mut masked = OpencvMat::default().unwrap(); opencv::core::in_range( &self.inner, &range.min().to_opencv_scalar(), &range.max().to_opencv_scalar(), &mut masked, ) .expect("in_range error"); Mat::pack(masked) } } pub trait FindContours { fn find_contours(&mut self) -> Contours; } impl FindContours for Mat<Gray> { fn find_contours(&mut self) -> Contours { let mut contours = opencv::types::VectorOfVectorOfPoint::new(); opencv::imgproc::find_contours( &mut self.inner, &mut contours, opencv::imgproc::RETR_CCOMP, opencv::imgproc::CHAIN_APPROX_SIMPLE, OpencvPoint::new(0, 0), ) .expect("unable to find_countours"); Contours::pack(contours) } }
/* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under both the MIT license found in the * LICENSE-MIT file in the root directory of this source tree and the Apache * License, Version 2.0 found in the LICENSE-APACHE file in the root directory * of this source tree. */ use bytes::{BufMut, Bytes, BytesMut}; /// Wrapper for using Bytes in futures::TryStreamExt::try_collect which requires /// a trait bound T: Default + Extend<Self\::Ok>. With this wrapper we get to /// try_collect a stream of Bytes such as produced by Hyper clients. /// /// More explicitly, if `resp` is a `hyper::Response<hyper::Body>` then we write: /// ```ignore /// resp.into_body().try_collect::<BytesCollect>().into() /// ``` /// to get back Bytes. #[derive(Default)] pub struct BytesCollect { buffer: BytesMut, } impl BytesCollect { /// Create default instance of BytesCollect pub fn new() -> Self { Self::default() } } impl Extend<Bytes> for BytesCollect { fn extend<I>(&mut self, iter: I) where I: IntoIterator<Item = Bytes>, { for bytes in iter { self.buffer.put(bytes); } } } impl From<BytesCollect> for Bytes { fn from(collect: BytesCollect) -> Self { collect.buffer.freeze() } }
use std::path::Path; use nes::{LoadError, Emulator, StandardInput}; use sdl2::pixels::Color; use sdl2::event::Event; use sdl2::rect::Rect; use sdl2::keyboard::Keycode; use sdl2::audio::{AudioQueue, AudioSpecDesired}; use std::time::Duration; pub struct GuiObject { emulator: Emulator, save_slot: Option<Vec<u8>>, } impl GuiObject { pub fn new() -> Self { GuiObject { emulator: Emulator::new(), save_slot: None, } } pub fn load_rom_from_file(&mut self, path: &Path) -> Result<(), LoadError> { self.emulator.load_rom_from_file(path) } pub fn run(&mut self) { let mut frame_counter = 0usize; let mut frame_skipped = 0usize; use std::time::Instant; let sdl_context = sdl2::init().unwrap(); let video_subsystem = sdl_context.video().unwrap(); let audio_subsystem = sdl_context.audio().unwrap(); let magnifaction = 3u32; let window = video_subsystem.window("rust-sdl2 demo", 256 * magnifaction, 240 * magnifaction) .position_centered() .build() .unwrap(); let mut canvas = window.into_canvas().build().unwrap(); canvas.set_draw_color(Color::RGB(0, 0, 0)); canvas.clear(); canvas.present(); self.emulator.reset(); let desired_spec = AudioSpecDesired { freq: Some(44100), channels: Some(1), samples: None, }; let audio_device: AudioQueue<f32> = audio_subsystem.open_queue(None, &desired_spec).unwrap(); audio_device.resume(); let mut event_pump = sdl_context.event_pump().unwrap(); 'running: loop { let start = Instant::now(); // let start2 = Instant::now(); self.emulator.run_for_one_frame(); frame_counter += 1; // println!("time cost: {:?} ms", start2.elapsed().as_millis()); let frame_buffer = self.emulator.get_framebuffer(); for (i, rgb) in frame_buffer.iter().enumerate() { let i = i as i32; let x = i % 256; let y = i / 256; canvas.set_draw_color(Color::RGB(rgb.r, rgb.g, rgb.b)); canvas.fill_rect(Rect::new(x * magnifaction as i32, y * magnifaction as i32, magnifaction, magnifaction)).unwrap(); } for event in event_pump.poll_iter() { match event { Event::DropFile { timestamp, window_id, filename } => { let path = Path::new(&filename); self.emulator.load_rom_from_file(&path).unwrap(); self.emulator.reset(); } Event::KeyDown { keycode: Some(Keycode::E), repeat: false, .. } => { self.save_slot = Option::Some(self.emulator.save_state()); }, Event::KeyDown { keycode: Some(Keycode::Q), repeat: false, .. } => { if let Some(v) = &self.save_slot { self.emulator.load_state(&v) } }, Event::Quit {..} => { break 'running }, _ => {} } } let keyboard_state = sdl2::keyboard::KeyboardState::new(&event_pump); if keyboard_state.is_scancode_pressed(sdl2::keyboard::Scancode::Return) { self.emulator.set_input_1(StandardInput::START, true) } if keyboard_state.is_scancode_pressed(sdl2::keyboard::Scancode::Space) { self.emulator.set_input_1(StandardInput::SELECT, true) } if keyboard_state.is_scancode_pressed(sdl2::keyboard::Scancode::W) { self.emulator.set_input_1(StandardInput::UP, true) } if keyboard_state.is_scancode_pressed(sdl2::keyboard::Scancode::S) { self.emulator.set_input_1(StandardInput::DOWN, true) } if keyboard_state.is_scancode_pressed(sdl2::keyboard::Scancode::A) { self.emulator.set_input_1(StandardInput::LEFT, true) } if keyboard_state.is_scancode_pressed(sdl2::keyboard::Scancode::D) { self.emulator.set_input_1(StandardInput::RIGHT, true) } if keyboard_state.is_scancode_pressed(sdl2::keyboard::Scancode::J) { self.emulator.set_input_1(StandardInput::B, true) } if keyboard_state.is_scancode_pressed(sdl2::keyboard::Scancode::K) { self.emulator.set_input_1(StandardInput::A, true) } audio_device.queue_audio(self.emulator.get_sample().as_slice()).unwrap(); self.emulator.clear_sample(); // if frame_counter % 60 == 0 { // println!("{}", frame_skipped); // frame_skipped = 0; // } if audio_device.size() < 44100 / 2 && frame_counter & 1 == 0 { frame_skipped += 1; continue; } canvas.present(); let t = start.elapsed().as_nanos(); let wait = if (1_000_000_000u128 / 60) > t { ((1_000_000_000u128 / 60) - t) as u32 } else { 0 }; ::std::thread::sleep(Duration::new(0, wait)); } } }
#![allow(dead_code)] #![cfg_attr(feature = "cargo-clippy", allow(clippy))] pub(crate) use self::root::tflite::*; pub(crate) use self::root::*; include!(concat!(env!("OUT_DIR"), "/tflite_types.rs"));
//! Renderer use crate::scan::ScanlineU8; use crate::base::RenderingBase; use crate::color::Rgba8; use crate::POLY_SUBPIXEL_SCALE; use crate::POLY_SUBPIXEL_MASK; use crate::POLY_SUBPIXEL_SHIFT; use crate::POLY_MR_SUBPIXEL_SHIFT; use crate::MAX_HALF_WIDTH; use crate::line_interp::LineParameters; use crate::line_interp::line_mr; use crate::clip::Rectangle; use crate::raster::len_i64_xy; use crate::clip::{INSIDE, TOP,BOTTOM,LEFT,RIGHT}; use crate::pixfmt::Pixfmt; use crate::raster::RasterizerScanline; use crate::Rgb8; use crate::Transform; use crate::Source; use crate::VertexSource; use crate::Render; use crate::Color; use crate::DrawOutline; use crate::Pixel; use crate::outline::Subpixel; pub(crate) const LINE_MAX_LENGTH : i64 = 1 << (POLY_SUBPIXEL_SHIFT + 10); /// Aliased Renderer #[derive(Debug)] pub struct RenderingScanlineBinSolid<'a,T> where T: 'a { pub base: &'a mut RenderingBase<T>, pub color: Rgba8, } /// Anti-Aliased Renderer #[derive(Debug)] pub struct RenderingScanlineAASolid<'a,T> where T: 'a { base: &'a mut RenderingBase<T>, color: Rgba8, } #[derive(Debug)] pub struct RenderingScanlineAA<'a,T> { base: &'a mut RenderingBase<T>, span: SpanGradient, } #[derive(Debug)] pub struct SpanGradient { d1: i64, d2: i64, gradient: GradientX, color: Vec<Rgb8>, trans: Transform, } #[derive(Debug)] pub struct GradientX {} impl GradientX { pub fn calculate(&self, x: i64, _: i64, _: i64) -> i64 { x } } #[derive(Debug)] struct Interpolator { li_x: Option<LineInterpolator>, li_y: Option<LineInterpolator>, trans: Transform, } impl Interpolator { #[inline] pub fn subpixel_shift(&self) -> i64 { 8 } #[inline] pub fn subpixel_scale(&self) -> i64 { 1 << self.subpixel_shift() } pub fn new(trans: Transform) -> Self { Self { trans, li_x: None, li_y: None } } pub fn begin(&mut self, x: f64, y: f64, len: usize) { let tx = x; let ty = y; let (tx,ty) = self.trans.transform(tx,ty); let x1 = (tx * self.subpixel_scale() as f64).round() as i64; let y1 = (ty * self.subpixel_scale() as f64).round() as i64; let tx = x + len as f64; let ty = y; let (tx,ty) = self.trans.transform(tx,ty); let x2 = (tx * self.subpixel_scale() as f64).round() as i64; let y2 = (ty * self.subpixel_scale() as f64).round() as i64; self.li_x = Some(LineInterpolator::new(x1, x2, len as i64)); self.li_y = Some(LineInterpolator::new(y1, y2, len as i64)); } pub fn inc(&mut self) { if let Some(ref mut li) = self.li_x { (li).inc(); } if let Some(ref mut li) = self.li_y { (li).inc(); } } pub fn coordinates(&self) -> (i64, i64) { if let (Some(x),Some(y)) = (self.li_x.as_ref(), self.li_y.as_ref()) { (x.y, y.y) } else { panic!("Interpolator not Initialized"); } } } impl SpanGradient { #[inline] pub fn subpixel_shift(&self) -> i64 { 4 } #[inline] pub fn subpixel_scale(&self) -> i64 { 1 << self.subpixel_shift() } pub fn new(trans: Transform, gradient: GradientX, color: &[Rgb8], d1: f64, d2: f64) -> Self { let mut s = Self { d1: 0, d2: 1, color: color.to_vec(), gradient, trans }; s.d1(d1); s.d2(d2); s } pub fn d1(&mut self, d1: f64) { self.d1 = (d1 * self.subpixel_scale() as f64).round() as i64; } pub fn d2(&mut self, d2: f64) { self.d2 = (d2 * self.subpixel_scale() as f64).round() as i64; } pub fn prepare(&mut self) { } pub fn generate(&self, x: i64, y: i64, len: usize) -> Vec<Rgb8> { let mut interp = Interpolator::new(self.trans); let downscale_shift = interp.subpixel_shift() - self.subpixel_shift(); let mut dd = self.d2 - self.d1; if dd < 1 { dd = 1; } let ncolors = self.color.len() as i64; let mut span = vec![Rgb8::white() ; len]; interp.begin(x as f64 + 0.5, y as f64 + 0.5, len); for i in 0 .. len { let (x,y) = interp.coordinates(); let d = self.gradient.calculate(x >> downscale_shift, y >> downscale_shift, self.d2); let mut d = ((d-self.d1) * ncolors) / dd; if d < 0 { d = 0; } if d >= ncolors { d = ncolors - 1; } span[i] = self.color[d as usize]; interp.inc(); } span } } /// Render a single Scanline (y-row) without Anti-Aliasing (Binary?) fn render_scanline_bin_solid<T,C: Color>(sl: &ScanlineU8, ren: &mut RenderingBase<T>, color: C) where T: Pixel { let cover_full = 255; for span in &sl.spans { ren.blend_hline(span.x, sl.y, span.x - 1 + span.len.abs(), color, cover_full); } } /// Render a single Scanline (y-row) with Anti Aliasing fn render_scanline_aa_solid<T,C: Color>(sl: &ScanlineU8, ren: &mut RenderingBase<T>, color: C) where T: Pixel { let y = sl.y; for span in & sl.spans { let x = span.x; if span.len > 0 { ren.blend_solid_hspan(x, y, span.len, color, &span.covers); } else { ren.blend_hline(x, y, x-span.len-1, color, span.covers[0]); } } } /// Render a single Scanline (y-row) with Anti-Aliasing fn render_scanline_aa<T>(sl: &ScanlineU8, ren: &mut RenderingBase<T>, span_gen: &SpanGradient) where T: Pixel { let y = sl.y; for span in &sl.spans { let x = span.x; let mut len = span.len; let covers = &span.covers; if len < 0 { len = -len; } //dbg!(x); //dbg!(y); //dbg!(len); let colors = span_gen.generate(x, y, len as usize); //dbg!(&colors); ren.blend_color_hspan(x, y, len, &colors, if span.len < 0 { &[] } else { &covers }, covers[0]); } } #[derive(Debug)] pub struct RenderData { sl: ScanlineU8 } impl RenderData { pub fn new() -> Self { Self { sl: ScanlineU8::new() } } } impl<T> Render for RenderingScanlineAASolid<'_,T> where T: Pixel { /// Render a single Scanline Row fn render(&mut self, data: &RenderData) { render_scanline_aa_solid(&data.sl, &mut self.base, self.color); } /// Set the current color fn color<C: Color>(&mut self, color: C) { self.color = Rgba8::new(color.red8(), color.green8(), color.blue8(), color.alpha8()); } } impl<T> Render for RenderingScanlineBinSolid<'_,T> where T: Pixel { /// Render a single Scanline Row fn render(&mut self, data: &RenderData) { render_scanline_bin_solid(&data.sl, &mut self.base, self.color); } /// Set the current Color fn color<C: Color>(&mut self, color: C) { self.color = Rgba8::new(color.red8(),color.green8(), color.blue8(), color.alpha8()); } } impl<T> Render for RenderingScanlineAA<'_,T> where T: Pixel { /// Render a single Scanline Row fn render(&mut self, data: &RenderData) { render_scanline_aa(&data.sl, &mut self.base, &self.span); } /// Set the current Color fn color<C: Color>(&mut self, _color: C) { unimplemented!("oops"); } } impl<'a,T> RenderingScanlineBinSolid<'a,T> where T: Pixel { /// Create a new Renderer from a Rendering Base pub fn with_base(base: &'a mut RenderingBase<T>) -> Self { let color = Rgba8::black(); Self { base, color } } pub fn as_bytes(&self) -> &[u8] { self.base.as_bytes() } pub fn to_file<P: AsRef<std::path::Path>>(&self, filename: P) -> Result<(), std::io::Error> { self.base.to_file(filename) } } impl<'a,T> RenderingScanlineAA<'a,T> where T: Pixel { pub fn new(base: &'a mut RenderingBase<T>, span: SpanGradient) -> Self { Self { base, span } } } impl<'a,T> RenderingScanlineAASolid<'a,T> where T: Pixel { /// Create a new Renderer from a Rendering Base pub fn with_base(base: &'a mut RenderingBase<T>) -> Self { let color = Rgba8::black(); Self { base, color } } pub fn as_bytes(&self) -> &[u8] { self.base.as_bytes() } pub fn to_file<P: AsRef<std::path::Path>>(&self, filename: P) -> Result<(), std::io::Error> { self.base.to_file(filename) } } /* pub trait Scale<T> { fn upscale(v: f64) -> T; fn downscale(v: i64) -> T; }*/ /// Render rasterized data to an image using a single color, Binary pub fn render_scanlines_bin_solid<C,T>(ras: &mut RasterizerScanline, ren: &mut RenderingBase<T>, color: C) where C: Color, T: Pixel { let mut sl = ScanlineU8::new(); if ras.rewind_scanlines() { sl.reset(ras.min_x(), ras.max_x()); while ras.sweep_scanline(&mut sl) { render_scanline_bin_solid(&sl, ren, color); } } } /// Render rasterized data to an image using a single color, Anti-aliased pub fn render_scanlines_aa_solid<C,T>(ras: &mut RasterizerScanline, ren: &mut RenderingBase<T>, color: C) where C: Color, T: Pixel { let mut sl = ScanlineU8::new(); if ras.rewind_scanlines() { sl.reset(ras.min_x(), ras.max_x()); while ras.sweep_scanline(&mut sl) { render_scanline_aa_solid(&sl, ren, color); } } } /// Render rasterized data to an image using the current color pub fn render_scanlines<REN>(ras: &mut RasterizerScanline, ren: &mut REN) where REN: Render { let mut data = RenderData::new(); if ras.rewind_scanlines() { data.sl.reset( ras.min_x(), ras.max_x() ); ren.prepare(); while ras.sweep_scanline(&mut data.sl) { ren.render(&data); } } } /// Render paths after rasterizing to an image using a set of colors pub fn render_all_paths<REN,VS,C>(ras: &mut RasterizerScanline, ren: &mut REN, paths: &[VS], colors: &[C]) where C: Color, REN: Render, VS: VertexSource { debug_assert!(paths.len() == colors.len()); for (path, color) in paths.iter().zip(colors.iter()) { ras.reset(); ras.add_path(path); ren.color(*color); render_scanlines(ras, ren); } } pub(crate) struct BresehamInterpolator { /// First point, x position pub x1: i64, /// First point, y position pub y1: i64, /// Second point, x position pub x2: i64, /// Second point, y position pub y2: i64, /// Line is primarilly vertical pub ver: bool, pub len: i64, inc: i64, func: LineInterpolator, } impl BresehamInterpolator { pub fn new(x1_hr: Subpixel, y1_hr: Subpixel, x2_hr: Subpixel, y2_hr: Subpixel) -> Self { let x1 = i64::from(x1_hr); let x2 = i64::from(x2_hr); let y1 = i64::from(y1_hr); let y2 = i64::from(y2_hr); let dy = (y2 - y1).abs(); let dx = (x2 - x1).abs(); let ver = dy > dx; let len = if ver { dy } else { dx }; let inc = if ver { if y2 > y1 { 1 } else { -1 } } else { if x2 > x1 { 1 } else { -1 } }; let (z1,z2) = if ver { (x1_hr,x2_hr) } else { (y1_hr,y2_hr) }; // XXX - value() should not be used let func = LineInterpolator::new(z1.value(), z2.value(), len); let y2 = func.y >> POLY_SUBPIXEL_SHIFT; let x2 = func.y >> POLY_SUBPIXEL_SHIFT; Self { x1, y1, x2, y2, ver, len, inc, func } } pub fn vstep(&mut self) { self.func.inc(); self.y1 += self.inc as i64; self.x2 = self.func.y >> POLY_SUBPIXEL_SHIFT; } pub fn hstep(&mut self) { self.func.inc(); self.x1 += self.inc as i64; self.y2 = self.func.y >> POLY_SUBPIXEL_SHIFT; } } /// Line Interpolator using a Digital differential analyzer (DDA) /// /// Step through a range from numbers, from `y1` to `y2`, into `count` items /// /// See [https://en.wikipedia.org/wiki/Digital_differential_analyzer_(graphics_algorithm)]() /// /// This is equivalent to dda2 in the original agg /// #[derive(Debug)] pub(crate) struct LineInterpolator { /// Number of Segments count: i64, /// Minimum Step Size, Constant, (y2-y1)/count left: i64, /// Remainder, Constant, (y2-y1) % count rem: i64, /// Error term xmod: i64, /// Current y value pub y: i64, } impl LineInterpolator { /// Create a new Forward Adjust Interpolator /// /// Values should be in Subpixel coordinates /// /// Error term is initialized as: `rem` - `count` /// /// `xmod`, `rem` and `left` are adjusted if `xmod` is negative pub fn new(y1: i64, y2: i64, count: i64) -> Self { let cnt = std::cmp::max(1,count); let mut left = (y2 - y1) / cnt; let mut rem = (y2 - y1) % cnt; let mut xmod = rem; let y = y1; if xmod <= 0 { xmod += count; rem += count; left -= 1; } xmod -= count; Self { y, left, rem, xmod, count: cnt } } pub fn adjust_forward(&mut self) { self.xmod -= self.count; } // pub fn adjust_backward(&mut self) { // self.xmod += self.count; // } /// Create a Forward Adjusted Interpolator pub fn new_foward_adjusted(y1: i64, y2: i64, count: i64) -> Self { Self::new(y1, y2, count) } /// Create a Back Adjusted Interpolator /// /// Assumes the First point is 0 /// /// Error term is initialied as `rem` /// /// `xmod`, `rem` and `left` are adjusted if `xmod` is negative pub fn new_back_adjusted_2(y: i64, count: i64) -> Self { let cnt = std::cmp::max(1,count); let mut left = y / cnt; let mut rem = y % cnt; let mut xmod = rem; let m_y = 0; if xmod <= 0 { xmod += count; rem += count; left -= 1; } Self { y: m_y, left, rem, xmod, count: cnt } } // pub fn new_back_adjusted_1(y1: i64, y2: i64, count: i64) -> Self { // let mut back = Self::new(y1, y2, count); // back.count += count; // back // } /// Increment the Interpolator pub fn inc(&mut self) { self.xmod += self.rem; self.y += self.left; if self.xmod > 0 { self.xmod -= self.count; self.y += 1; } } /// Decement the Interpolator pub fn dec(&mut self) { if self.xmod <= self.rem { self.xmod += self.count; self.y -= 1; } self.xmod -= self.rem; self.y -= self.left; } pub fn xmod(&self) -> i64 { self.xmod } pub fn count(&self) -> i64 { self.count } pub fn left(&self) -> i64 { self.left } pub fn rem(&self) -> i64 { self.rem } } /// Clip a Line segment to a Rectangle /// /// # Arguments /// - x1 - Starting x point of line /// - y1 - Starting y point of line /// - x2 - Ending x point of line /// - y1 - Ending y point of line /// - clip_box - Rectangle to clip line to /// /// # Return /// - Clipped line segment (x1,y1,x2,y2) and flag descringing the clip /// - flag /// - 0 - Not Clipped, line is fully within the clip box /// - 4 - Line fully outside of the clip box /// - 1 - First Point Clipped /// - 2 - Second Point Clipped /// - 3 - Both Points Clipped /// pub(crate) fn clip_line_segment(x1: i64, y1: i64, x2: i64, y2: i64, clip_box: Rectangle<i64>) -> (i64, i64, i64, i64, u8) { let f1 = clip_box.clip_flags(x1,y1); let f2 = clip_box.clip_flags(x2,y2); let mut ret = 0; if f1 == INSIDE && f2 == INSIDE { return (x1,y1,x2,y2,0); } let x_side = LEFT | RIGHT; let y_side = TOP | BOTTOM; if f1 & x_side != 0 && f1 & x_side == f2 & x_side { return (x1,y1,x2,y2,4); // Outside } if f1 & y_side != 0 && f1 & y_side == f2 & y_side { return (x1,y1,x2,y2,4); // Outside } let (mut x1, mut y1) = (x1,y1); let (mut x2, mut y2) = (x2,y2); if f1 != 0 { if let Some((x,y)) = clip_move_point(x1, y1, x2, y2, clip_box, x1, y1, f1) { x1 = x; y1 = y; } else { return (x1,y1,x2,y2,4); } if x1 == x2 && y1 == y2 { return (x1,y1,x2,y2,4); } ret |= 1; } if f2 != 0 { if let Some((x,y)) = clip_move_point(x1, y1, x2, y2, clip_box, x2, y2, f2) { x2 = x; y2 = y; } else { return (x1,y1,x2,y2,4); } if x1 == x2 && y1 == y2 { return (x1,y1,x2,y2,4); } ret |= 2; } (x1,y1,x2,y2,ret) } fn clip_move_point(x1: i64, y1: i64, x2: i64, y2: i64, clip_box: Rectangle<i64>, x: i64, y: i64, flags: u8) -> Option<(i64,i64)>{ let (mut x, mut y) = (x,y); if flags & (LEFT | RIGHT) != 0 { if x1 == x2 { return None; } else { let x = if flags & LEFT != 0 { clip_box.x1() } else { clip_box.x2() }; y = ((x - x1) as f64 * (y2-y1) as f64 / (x2-x1) as f64 + y1 as f64) as i64; } } let flags = clip_box.clip_flags(x,y); if flags & (TOP | BOTTOM) != 0 { if y1 == y2 { return None; } else { let y = if flags & BOTTOM != 0 { clip_box.y1() } else { clip_box.y2() }; x = ((y - y1) as f64 * (x2-x1) as f64 / (y2-y1) as f64 + x1 as f64) as i64; } } Some((x,y)) } #[derive(Debug)] pub struct RendererOutlineImg<'a,T> { ren: &'a mut RenderingBase<T>, pattern: LineImagePatternPow2, start: i64, scale_x: f64, clip_box: Option<Rectangle<i64>>, } impl<T> DrawOutline for RendererOutlineImg<'_, T> where T: Pixel { fn accurate_join_only(&self) -> bool{ true } fn color<C: Color>(&mut self, _color: C) { unimplemented!("no color for outline img"); } fn line0(&mut self, _lp: &LineParameters) { } fn line1(&mut self, _lp: &LineParameters, _sx: i64, _sy: i64) { } fn line2(&mut self, _lp: &LineParameters, _ex: i64, _ey: i64) { } fn line3(&mut self, lp: &LineParameters, sx: i64, sy: i64, ex: i64, ey: i64) { if let Some(clip_box) = self.clip_box { let x1 = lp.x1; let y1 = lp.y1; let x2 = lp.x2; let y2 = lp.y2; let (x1,y1,x2,y2,flags) = clip_line_segment(x1, y1, x2, y2, clip_box); let start = self.start; let (mut sx, mut sy, mut ex, mut ey) = (sx,sy,ex,ey); if (flags & 4) == 0 { if flags != 0 { let lp2 = LineParameters::new(x1, y1, x2, y2, len_i64_xy(x1, y1, x2, y2)); if flags & 1 != 0 { self.start += (len_i64_xy(lp.x1, lp.y1, x1, y1) as f64 / self.scale_x as f64).round() as i64; sx = x1 + (y2 - y1); sy = y1 - (x2 - x1); } else { while (sx - lp.x1).abs() + (sy - lp.y1).abs() > lp2.len { sx = (lp.x1 + sx) >> 1; sy = (lp.y1 + sy) >> 1; } } if flags & 2 != 0{ ex = x2 + (y2 - y1); ey = y2 - (x2 - x1); } else { while (ex - lp.x2).abs() + (ey - lp.y2).abs() > lp2.len { ex = (lp.x2 + ex) >> 1; ey = (lp.y2 + ey) >> 1; } } self.line3_no_clip(&lp2, sx, sy, ex, ey); } else { self.line3_no_clip(lp, sx, sy, ex, ey); } } self.start = start + (lp.len as f64 / self.scale_x as f64).round() as i64; } else { self.line3_no_clip(lp, sx, sy, ex, ey); } } fn semidot<F>(&mut self, _cmp: F, _xc1: i64, _yc1: i64, _xc2: i64, _yc2: i64) where F: Fn(i64) -> bool { } fn pie(&mut self, _xc: i64, _y: i64, _x1: i64, _y1: i64, _x2: i64, _y2: i64) { } } impl<'a,T> RendererOutlineImg<'a,T> where T: Pixel { pub fn with_base_and_pattern(ren: &'a mut RenderingBase<T>, pattern: LineImagePatternPow2) -> Self { Self { ren, pattern, start: 0, scale_x: 1.0, clip_box: None } } pub fn scale_x(&mut self, scale_x: f64) { self.scale_x = scale_x; } pub fn start_x(&mut self, s: f64) { self.start = (s * POLY_SUBPIXEL_SCALE as f64).round() as i64; } fn subpixel_width(&self) -> i64 { self.pattern.line_width() } fn pattern_width(&self) -> i64 { self.pattern.pattern_width() } // fn width(&self) -> f64 { // self.subpixel_width() as f64 / POLY_SUBPIXEL_SCALE as f64 // } fn pixel(&mut self, x: i64, y: i64) -> Rgba8 { self.pattern.pixel(x, y) } fn blend_color_hspan(&mut self, x: i64, y: i64, len: i64, colors: &[Rgba8]) { self.ren.blend_color_hspan(x, y, len, colors, &[], 255); } fn blend_color_vspan(&mut self, x: i64, y: i64, len: i64, colors: &[Rgba8]) { self.ren.blend_color_vspan(x, y, len, colors, &[], 255); } fn line3_no_clip(&mut self, lp: &LineParameters, sx: i64, sy: i64, ex: i64, ey: i64) { if lp.len > LINE_MAX_LENGTH { let (lp1, lp2) = lp.divide(); let mx = lp1.x2 + (lp1.y2 - lp1.y1); let my = lp1.y2 - (lp1.x2 - lp1.x1); self.line3_no_clip(&lp1, (lp.x1 + sx) >> 1, (lp.y1 + sy) >> 1, mx, my); self.line3_no_clip(&lp2, mx, my, (lp.x2 + ex) >> 1, (lp.y2 + ey) >> 1); return; } let (sx, sy) = lp.fix_degenerate_bisectrix_start(sx, sy); let (ex, ey) = lp.fix_degenerate_bisectrix_end(ex, ey); let mut li = lp.interp_image(sx, sy, ex, ey, self.subpixel_width(), self.start, self.pattern_width(), self.scale_x); if li.vertical() { while li.step_ver(self) {} } else { while li.step_hor(self) {} } self.start += (lp.len as f64/ self.scale_x).round() as i64; } } #[derive(Debug)] pub struct LineImagePattern { pix: Pixfmt<Rgba8>, filter: PatternFilterBilinear, dilation: u64, dilation_hr: i64, //data: Vec<u8>, width: u64, height: u64, width_hr: i64, half_height_hr: i64, offset_y_hr: i64, } impl LineImagePattern { pub fn new(filter: PatternFilterBilinear) -> Self { let dilation = filter.dilation() + 1; let dilation_hr = (dilation as i64) << POLY_SUBPIXEL_SHIFT; Self { filter, dilation, dilation_hr, width: 0, height: 0, width_hr: 0, half_height_hr: 0, offset_y_hr: 0, pix: Pixfmt::new(1,1) } } pub fn create<T>(&mut self, src: &T) where T: Source + Pixel { self.height = src.height() as u64; self.width = src.width() as u64; self.width_hr = src.width() as i64 * POLY_SUBPIXEL_SCALE; self.half_height_hr = src.height() as i64 * POLY_SUBPIXEL_SCALE/2; self.offset_y_hr = self.dilation_hr + self.half_height_hr - POLY_SUBPIXEL_SCALE/2; self.half_height_hr += POLY_SUBPIXEL_SCALE/2; self.pix = Pixfmt::<Rgba8>::new((self.width + self.dilation * 2) as usize, (self.height + self.dilation * 2) as usize); for y in 0 .. self.height as usize { let x1 = self.dilation as usize; let y1 = y + self.dilation as usize; for x in 0 .. self.width as usize { self.pix.set((x1+x,y1), src.get((x,y))); } } //const color_type* s1; //const color_type* s2; let none = Rgba8::new(0,0,0,0); let dill = self.dilation as usize; for y in 0 .. dill { //s1 = self.buf.row_ptr(self.height + self.dilation - 1) + self.dilation; //s2 = self.buf.row_ptr(self.dilation) + self.dilation; //let d1 = self.buf.row_ptr(self.dilation + self.height + y) + self.dilation; //let d2 = self.buf.row_ptr(self.dilation - y - 1) + self.dilation; let (x1,y1) = (dill, dill + y + self.height as usize); let (x2,y2) = (dill, dill - y - 1); for x in 0 .. self.width as usize{ //*d1++ = color_type(*s1++, 0); //*d2++ = color_type(*s2++, 0); //*d1++ = color_type::no_color(); //*d2++ = color_type::no_color(); self.pix.set((x1+x,y1), none); self.pix.set((x2+x,y2), none); } } let h = self.height + self.dilation * 2; for y in 0 .. h as usize { let sx1 = self.dilation as usize; let sx2 = (self.dilation + self.width) as usize; let dx1 = sx2; let dx2 = sx1; //s1 = self.buf.row_ptr(y) + self.dilation; //s2 = self.buf.row_ptr(y) + self.dilation + self.width; //d1 = self.buf.row_ptr(y) + self.dilation + self.width; //d2 = self.buf.row_ptr(y) + self.dilation; for x in 0 .. self.dilation as usize { //*d1++ = *s1++; //*--d2 = *--s2; self.pix.set((dx1 + x,y), self.pix.get((sx1 + x,y))); self.pix.set((dx2 - x - 1,y), self.pix.get((sx2 - x - 1,y))); } } } pub fn pattern_width(&self) -> i64 { self.width_hr } pub fn line_width(&self) -> i64 { self.half_height_hr } pub fn width(&self) -> u64 { self.height } } #[derive(Debug)] pub struct LineImagePatternPow2 { base: LineImagePattern, mask: u64 } impl LineImagePatternPow2 { pub fn new(filter: PatternFilterBilinear) -> Self { let base = LineImagePattern::new( filter ); Self { base, mask: POLY_SUBPIXEL_MASK as u64} } pub fn create<T>(&mut self, src: &T) where T: Source + Pixel { self.base.create(src); self.mask = 1; while self.mask < self.base.width { self.mask <<= 1; self.mask |= 1; } self.mask <<= POLY_SUBPIXEL_SHIFT - 1; self.mask |= POLY_SUBPIXEL_MASK as u64 ; self.base.width_hr = (self.mask + 1) as i64; } pub fn pattern_width(&self) -> i64 { self.base.width_hr } pub fn line_width(&self) -> i64 { self.base.half_height_hr } pub fn width(&self) -> u64 { self.base.height } pub fn pixel(&self, x: i64, y: i64) -> Rgba8 { self.base.filter.pixel_high_res(&self.base.pix, (x & self.mask as i64) + self.base.dilation_hr, y + self.base.offset_y_hr) } } #[derive(Debug,Default)] pub struct PatternFilterBilinear(); impl PatternFilterBilinear { pub fn new() -> Self { Self{ } } pub fn dilation(&self) -> u64 { 1 } pub fn pixel_low_res(&self, pix: &Pixfmt<Rgba8>, x: i64, y: i64) -> Rgba8 { pix.get((x as usize, y as usize)) } pub fn pixel_high_res(&self, pix: &Pixfmt<Rgba8>, x: i64, y: i64) -> Rgba8 { let (mut red, mut green, mut blue, mut alpha) = (0i64, 0i64, 0i64, 0i64); let x_lr = (x as usize) >> POLY_SUBPIXEL_SHIFT; let y_lr = (y as usize) >> POLY_SUBPIXEL_SHIFT; let x = x & POLY_SUBPIXEL_MASK; let y = y & POLY_SUBPIXEL_MASK; let ptr = pix.get((x_lr,y_lr)); let weight = (POLY_SUBPIXEL_SCALE - x) * (POLY_SUBPIXEL_SCALE - y); red += weight * i64::from(ptr.r); green += weight * i64::from(ptr.g); blue += weight * i64::from(ptr.b); alpha += weight * i64::from(ptr.a); let ptr = pix.get((x_lr + 1,y_lr)); let weight = x * (POLY_SUBPIXEL_SCALE - y); red += weight * i64::from(ptr.r); green += weight * i64::from(ptr.g); blue += weight * i64::from(ptr.b); alpha += weight * i64::from(ptr.a); let ptr = pix.get((x_lr,y_lr+1)); let weight = (POLY_SUBPIXEL_SCALE - x) * y; red += weight * i64::from(ptr.r); green += weight * i64::from(ptr.g); blue += weight * i64::from(ptr.b); alpha += weight * i64::from(ptr.a); let ptr = pix.get((x_lr+1,y_lr+1)); let weight = x * y; red += weight * i64::from(ptr.r); green += weight * i64::from(ptr.g); blue += weight * i64::from(ptr.b); alpha += weight * i64::from(ptr.a); let red = (red >> (POLY_SUBPIXEL_SHIFT * 2)) as u8; let green = (green >> (POLY_SUBPIXEL_SHIFT * 2)) as u8; let blue = (blue >> (POLY_SUBPIXEL_SHIFT * 2)) as u8; let alpha = (alpha >> (POLY_SUBPIXEL_SHIFT * 2)) as u8; Rgba8::new(red,green,blue,alpha) } } #[derive(Debug)] pub struct LineInterpolatorImage { lp: LineParameters, li: LineInterpolator, di: DistanceInterpolator4, //pub plen: i64, x: i64, y: i64, old_x: i64, old_y: i64, count: i64, width: i64, max_extent: i64, start: i64, step: i64, //pub dist_pos: [i64; MAX_HALF_WIDTH + 1], dist_pos: Vec<i64>, //pub colors: [Rgba8; MAX_HALF_WIDTH * 2 + 4], colors: Vec<Rgba8>, } impl LineInterpolatorImage { pub fn new(lp: LineParameters, sx: i64, sy: i64, ex: i64, ey: i64, subpixel_width: i64, pattern_start: i64, pattern_width: i64, scale_x: f64) -> Self { let n = if lp.vertical { (lp.y2-lp.y1).abs() } else { (lp.x2-lp.x1).abs() + 1 }; let y1 = if lp.vertical { (lp.x2-lp.x1) << POLY_SUBPIXEL_SHIFT } else { (lp.y2-lp.y1) << POLY_SUBPIXEL_SHIFT }; let mut m_li = LineInterpolator::new_back_adjusted_2(y1, n); let mut x = lp.x1 >> POLY_SUBPIXEL_SHIFT; let mut y = lp.y1 >> POLY_SUBPIXEL_SHIFT; let mut old_x = x; let mut old_y = y; let count = if lp.vertical { ((lp.y2 >> POLY_SUBPIXEL_SHIFT) - y).abs() } else { ((lp.x2 >> POLY_SUBPIXEL_SHIFT) - x).abs() }; let width = subpixel_width; let max_extent = (width + POLY_SUBPIXEL_SCALE) >> POLY_SUBPIXEL_SHIFT; let mut step = 0; let start = pattern_start + (max_extent + 2) * pattern_width; let mut dist_pos = vec![0i64; MAX_HALF_WIDTH + 1]; let colors = vec![Rgba8::black(); MAX_HALF_WIDTH * 2 + 4]; let mut di = DistanceInterpolator4::new(lp.x1, lp.y1, lp.x2, lp.y2, sx, sy, ex, ey, lp.len, scale_x, lp.x1 & ! POLY_SUBPIXEL_MASK, lp.y1 & ! POLY_SUBPIXEL_MASK); let dd = if lp.vertical { lp.dy << POLY_SUBPIXEL_SHIFT } else { lp.dx << POLY_SUBPIXEL_SHIFT }; let mut li = LineInterpolator::new(0, dd, lp.len); let stop = width + POLY_SUBPIXEL_SCALE * 2; for i in 0 .. MAX_HALF_WIDTH { dist_pos[i] = li.y; if dist_pos[i] >= stop { break; } li.inc(); } dist_pos[MAX_HALF_WIDTH] = 0x7FFF_0000; let mut npix = 1; if lp.vertical { loop { m_li.dec(); y -= lp.inc; x = (lp.x1 + m_li.y) >> POLY_SUBPIXEL_SHIFT; if lp.inc > 0 { di.dec_y_by(x - old_x); } else { di.inc_y_by(x - old_x); } old_x = x; let mut dist1_start = di.dist_start; let mut dist2_start = di.dist_start; let mut dx = 0; if dist1_start < 0 { npix += 1; } loop { dist1_start += di.dy_start; dist2_start -= di.dy_start; if dist1_start < 0 { npix += 1; } if dist2_start < 0 { npix += 1; } dx += 1; if dist_pos[dx] > width { break; } } if npix == 0 { break; } npix = 0; step -= 1; if step < -max_extent { break; } } } else { loop { m_li.dec(); x -= lp.inc; y = (lp.y1 + m_li.y) >> POLY_SUBPIXEL_SHIFT; if lp.inc > 0 { di.dec_x_by(y - old_y); } else { di.inc_x_by(y - old_y); } old_y = y; let mut dist1_start = di.dist_start; let mut dist2_start = di.dist_start; let mut dy = 0; if dist1_start < 0 { npix += 1; } loop { dist1_start -= di.dx_start; dist2_start += di.dx_start; if dist1_start < 0 { npix += 1; } if dist2_start < 0 { npix += 1; } dy += 1; if dist_pos[dy] > width { break; } } if npix == 0 { break; } npix = 0; step -= 1; if step < -max_extent { break; } } } m_li.adjust_forward(); step -= max_extent; Self { lp, x, y, old_x, old_y, count, width, max_extent, step, dist_pos, colors, di, start, li: m_li, } } fn vertical(&self) -> bool { self.lp.vertical } fn step_ver<T>(&mut self, ren: &mut RendererOutlineImg<T>) -> bool where T: Pixel { self.li.inc(); self.y += self.lp.inc; self.x = (self.lp.x1 + self.li.y) >> POLY_SUBPIXEL_SHIFT; if self.lp.inc > 0 { self.di.inc_y_by(self.x - self.old_x); } else { self.di.dec_y_by(self.x - self.old_x); } self.old_x = self.x; let mut s1 = self.di.dist / self.lp.len; let s2 = -s1; if self.lp.inc > 0 { s1 = -s1; } let mut dist_start = self.di.dist_start; let mut dist_pict = self.di.dist_pict + self.start; let mut dist_end = self.di.dist_end; let mut p0 = MAX_HALF_WIDTH + 2; let mut p1 = p0; let mut npix = 0; self.colors[p1].clear(); if dist_end > 0 { if dist_start <= 0 { self.colors[p1] = ren.pixel(dist_pict, s2); } npix += 1; } p1 += 1; let mut dx = 1; let mut dist = self.dist_pos[dx]; while dist - s1 <= self.width { dist_start += self.di.dy_start; dist_pict += self.di.dy_pict; dist_end += self.di.dy_end; self.colors[p1].clear(); if dist_end > 0 && dist_start <= 0 { if self.lp.inc > 0 { dist = -dist; } self.colors[p1] = ren.pixel(dist_pict, s2 + dist); npix += 1; } p1 += 1; dx += 1; dist = self.dist_pos[dx]; } dx = 1; dist_start = self.di.dist_start; dist_pict = self.di.dist_pict + self.start; dist_end = self.di.dist_end; dist = self.dist_pos[dx]; while dist + s1 <= self.width { dist_start -= self.di.dy_start; dist_pict -= self.di.dy_pict; dist_end -= self.di.dy_end; p0 -= 1; self.colors[p0].clear(); if dist_end > 0 && dist_start <= 0 { if self.lp.inc > 0 { dist = -dist; } self.colors[p0] = ren.pixel(dist_pict, s2 - dist); npix += 1; } dx += 1; dist = self.dist_pos[dx]; } ren.blend_color_hspan(self.x - dx as i64 + 1, self.y, (p1 - p0) as i64, &self.colors[p0..p1]); self.step += 1; npix != 0 && self.step < self.count } fn step_hor<T>(&mut self, ren: &mut RendererOutlineImg<T>) -> bool where T: Pixel { self.li.inc(); self.x += self.lp.inc; self.y = (self.lp.y1 + self.li.y) >> POLY_SUBPIXEL_SHIFT; if self.lp.inc > 0 { self.di.inc_x_by(self.y - self.old_y); } else { self.di.dec_x_by(self.y - self.old_y); } self.old_y = self.y; let mut s1 = self.di.dist / self.lp.len; let s2 = -s1; if self.lp.inc < 0 { s1 = -s1; } let mut dist_start = self.di.dist_start; let mut dist_pict = self.di.dist_pict + self.start; let mut dist_end = self.di.dist_end; let mut p0 = MAX_HALF_WIDTH + 2; let mut p1 = p0; let mut npix = 0; self.colors[p1].clear(); if dist_end > 0 { if dist_start <= 0 { self.colors[p1] = ren.pixel(dist_pict, s2); } npix += 1; } p1 += 1; let mut dy = 1; let mut dist = self.dist_pos[dy]; while dist - s1 <= self.width { dist_start -= self.di.dx_start; dist_pict -= self.di.dx_pict; dist_end -= self.di.dx_end; self.colors[p1].clear(); if dist_end > 0 && dist_start <= 0 { if self.lp.inc > 0 { dist = -dist; } self.colors[p1] = ren.pixel(dist_pict, s2 - dist); npix += 1; } p1 += 1; dy += 1; dist = self.dist_pos[dy]; } dy = 1; dist_start = self.di.dist_start; dist_pict = self.di.dist_pict + self.start; dist_end = self.di.dist_end; dist = self.dist_pos[dy]; while dist + s1 <= self.width { dist_start += self.di.dx_start; dist_pict += self.di.dx_pict; dist_end += self.di.dx_end; p0 -= 1; self.colors[p0].clear(); if dist_end > 0 && dist_start <= 0 { if self.lp.inc > 0 { dist = -dist; } self.colors[p0] = ren.pixel(dist_pict, s2 + dist); npix += 1; } dy += 1; dist = self.dist_pos[dy]; } ren.blend_color_vspan(self.x, self.y - dy as i64 + 1, (p1 - p0) as i64, &self.colors[p0..p1]); self.step += 1; npix != 0 && self.step < self.count } } #[derive(Debug)] struct DistanceInterpolator4 { dx: i64, dy: i64, dx_start: i64, dy_start: i64, dx_pict: i64, dy_pict: i64, dx_end: i64, dy_end: i64, dist: i64, dist_start: i64, dist_pict: i64, dist_end: i64, len: i64, } impl DistanceInterpolator4 { pub fn new(x1: i64, y1: i64, x2: i64, y2: i64, sx: i64, sy: i64, ex: i64, ey: i64, len: i64, scale: f64, x: i64, y: i64) -> Self { let dx = x2 - x1; let dy = y2 - y1; let dx_start = line_mr(sx) - line_mr(x1); let dy_start = line_mr(sy) - line_mr(y1); let dx_end = line_mr(ex) - line_mr(x2); let dy_end = line_mr(ey) - line_mr(y2); let dist = ((x + POLY_SUBPIXEL_SCALE/2 - x2) as f64 * dy as f64 - (y + POLY_SUBPIXEL_SCALE/2 - y2) as f64 * dx as f64).round() as i64; let dist_start = (line_mr(x + POLY_SUBPIXEL_SCALE/2) - line_mr(sx)) * dy_start - (line_mr(y + POLY_SUBPIXEL_SCALE/2) - line_mr(sy)) * dx_start; let dist_end = (line_mr(x + POLY_SUBPIXEL_SCALE/2) - line_mr(ex)) * dy_end - (line_mr(y + POLY_SUBPIXEL_SCALE/2) - line_mr(ey)) * dx_end; let len = (len as f64 / scale).round() as i64; let d = len as f64 * scale; let tdx = (((x2 - x1) << POLY_SUBPIXEL_SHIFT) as f64 / d).round() as i64; let tdy = (((y2 - y1) << POLY_SUBPIXEL_SHIFT) as f64 / d).round() as i64; let dx_pict = -tdy; let dy_pict = tdx; let dist_pict = ((x + POLY_SUBPIXEL_SCALE/2 - (x1 - tdy)) * dy_pict - (y + POLY_SUBPIXEL_SCALE/2 - (y1 + tdx)) * dx_pict) >> POLY_SUBPIXEL_SHIFT; let dx = dx << POLY_SUBPIXEL_SHIFT; let dy = dy << POLY_SUBPIXEL_SHIFT; let dx_start = dx_start << POLY_MR_SUBPIXEL_SHIFT; let dy_start = dy_start << POLY_MR_SUBPIXEL_SHIFT; let dx_end = dx_end << POLY_MR_SUBPIXEL_SHIFT; let dy_end = dy_end << POLY_MR_SUBPIXEL_SHIFT; Self { dx, dy, dx_start, dx_end, dy_start, dy_end, dx_pict, dy_pict, dist, dist_pict, dist_start, dist_end, len } } // pub fn inc_x(&mut self) { // self.dist += self.dy; // self.dist_start += self.dy_start; // self.dist_pict += self.dy_pict; // self.dist_end += self.dy_end; // } // pub fn dec_x(&mut self) { // self.dist -= self.dy; // self.dist_start -= self.dy_start; // self.dist_pict -= self.dy_pict; // self.dist_end -= self.dy_end; // } // pub fn inc_y(&mut self) { // self.dist -= self.dx; // self.dist_start -= self.dx_start; // self.dist_pict -= self.dx_pict; // self.dist_end -= self.dx_end; // } // pub fn dec_y(&mut self) { // self.dist += self.dx; // self.dist_start += self.dx_start; // self.dist_pict += self.dx_pict; // self.dist_end += self.dx_end; // } pub fn inc_x_by(&mut self, dy: i64) { self.dist += self.dy; self.dist_start += self.dy_start; self.dist_pict += self.dy_pict; self.dist_end += self.dy_end; if dy > 0 { self.dist -= self.dx; self.dist_start -= self.dx_start; self.dist_pict -= self.dx_pict; self.dist_end -= self.dx_end; } if dy < 0 { self.dist += self.dx; self.dist_start += self.dx_start; self.dist_pict += self.dx_pict; self.dist_end += self.dx_end; } } pub fn dec_x_by(&mut self, dy: i64) { self.dist -= self.dy; self.dist_start -= self.dy_start; self.dist_pict -= self.dy_pict; self.dist_end -= self.dy_end; if dy > 0 { self.dist -= self.dx; self.dist_start -= self.dx_start; self.dist_pict -= self.dx_pict; self.dist_end -= self.dx_end; } if dy < 0 { self.dist += self.dx; self.dist_start += self.dx_start; self.dist_pict += self.dx_pict; self.dist_end += self.dx_end; } } pub fn inc_y_by(&mut self, dx: i64) { self.dist -= self.dx; self.dist_start -= self.dx_start; self.dist_pict -= self.dx_pict; self.dist_end -= self.dx_end; if dx > 0 { self.dist += self.dy; self.dist_start += self.dy_start; self.dist_pict += self.dy_pict; self.dist_end += self.dy_end; } if dx < 0 { self.dist -= self.dy; self.dist_start -= self.dy_start; self.dist_pict -= self.dy_pict; self.dist_end -= self.dy_end; } } pub fn dec_y_by(&mut self, dx: i64) { self.dist += self.dx; self.dist_start += self.dx_start; self.dist_pict += self.dx_pict; self.dist_end += self.dx_end; if dx > 0 { self.dist += self.dy; self.dist_start += self.dy_start; self.dist_pict += self.dy_pict; self.dist_end += self.dy_end; } if dx < 0 { self.dist -= self.dy; self.dist_start -= self.dy_start; self.dist_pict -= self.dy_pict; self.dist_end -= self.dy_end; } } } #[cfg(test)] mod tests { use super::LineInterpolator; #[test] fn line_interpolator() { let mut lp = LineInterpolator::new(0<<8,10<<8,10<<8); for i in 0..=10 { assert_eq!(lp.y, i); lp.inc(); } let mut lp = LineInterpolator::new(0,100,2); for &i in [0,50,100,150].iter() { assert_eq!(lp.y, i); lp.inc(); } let mut lp = LineInterpolator::new(0,10,3); let y0 = vec![0,3,6,10]; let left0 = vec![3,3,3,3]; let xmod0 = vec![-2, -1, 0, -2]; let rem0 = vec![1,1,1,1]; let mut left = vec![]; let mut xmod = vec![]; let mut rem = vec![]; let mut y = vec![]; for _ in 0..4 { left.push( lp.left() ); y.push( lp.y ); xmod.push( lp.xmod() ); rem.push( lp.rem() ); lp.inc(); } assert_eq!(left0, left); assert_eq!(xmod0, xmod); assert_eq!(rem0, rem); assert_eq!(y0, y); let mut lp = LineInterpolator::new(0,10,4); let y0 = vec![0,2,5,7,10]; let left0 = vec![2,2,2,2,2]; let xmod0 = vec![-2, 0, -2, 0, -2]; let rem0 = vec![2,2,2,2,2]; let mut left = vec![]; let mut xmod = vec![]; let mut rem = vec![]; let mut y = vec![]; for _ in 0..5 { left.push( lp.left() ); y.push( lp.y ); xmod.push( lp.xmod() ); rem.push( lp.rem() ); lp.inc(); } assert_eq!(left0, left); assert_eq!(xmod0, xmod); assert_eq!(rem0, rem); assert_eq!(y0, y); } }
// Copyright (c) 2016, <daggerbot@gmail.com> // This software is available under the terms of the zlib license. // See COPYING.md for more information. use winapi; use pixel_format::PixelFormatBridge; use util; /// Windows implementation for `PixelFormat`. #[derive(Clone)] pub struct PixelFormatProvider { pfd: Option<winapi::PIXELFORMATDESCRIPTOR>, } impl PixelFormatProvider { pub fn pfd (&self) -> Option<winapi::PIXELFORMATDESCRIPTOR> { self.pfd } } impl Default for PixelFormatProvider { fn default () -> PixelFormatProvider { PixelFormatProvider { pfd: None } } } impl From<winapi::PIXELFORMATDESCRIPTOR> for PixelFormatProvider { fn from (pfd: winapi::PIXELFORMATDESCRIPTOR) -> PixelFormatProvider { PixelFormatProvider { pfd: Some(pfd) } } } impl PartialEq for PixelFormatProvider { fn eq (&self, rhs: &PixelFormatProvider) -> bool { match (self.pfd, rhs.pfd) { (Some(ref lpfd), Some(ref rpfd)) => { unsafe { util::memeq(lpfd, rpfd) } }, (None, None) => true, _ => false, } } } impl PixelFormatBridge for PixelFormatProvider { fn supports_view (&self) -> bool { self.supports_window() } fn supports_window (&self) -> bool { match self.pfd { Some(ref pfd) => pfd.dwFlags & winapi::PFD_DRAW_TO_WINDOW != 0, None => true, } } }
pub fn read_clock_counter() -> u64 { unsafe { x86::time::rdtscp() } }
use super::{chunk_header::*, chunk_type::*, *}; use crate::param::{param_header::*, param_type::*, *}; use bytes::{Bytes, BytesMut}; use std::fmt; ///chunkHeartbeat represents an SCTP Chunk of type HEARTBEAT /// ///An endpoint should send this chunk to its peer endpoint to probe the ///reachability of a particular destination transport address defined in ///the present association. /// ///The parameter field contains the Heartbeat Information, which is a ///variable-length opaque data structure understood only by the sender. /// /// /// 0 1 2 3 /// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 ///+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ ///| Type = 4 | Chunk Flags | Heartbeat Length | ///+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ ///| | ///| Heartbeat Information TLV (Variable-Length) | ///| | ///+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ /// ///Defined as a variable-length parameter using the format described ///in Section 3.2.1, i.e.: /// ///Variable Parameters Status Type Value ///------------------------------------------------------------- ///heartbeat Info Mandatory 1 #[derive(Default, Debug)] pub(crate) struct ChunkHeartbeat { pub(crate) params: Vec<Box<dyn Param + Send + Sync>>, } /// makes ChunkHeartbeat printable impl fmt::Display for ChunkHeartbeat { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.header()) } } impl Chunk for ChunkHeartbeat { fn header(&self) -> ChunkHeader { ChunkHeader { typ: CT_HEARTBEAT, flags: 0, value_length: self.value_length() as u16, } } fn unmarshal(raw: &Bytes) -> Result<Self, Error> { let header = ChunkHeader::unmarshal(raw)?; if header.typ != CT_HEARTBEAT { return Err(Error::ErrChunkTypeNotHeartbeat); } if raw.len() <= CHUNK_HEADER_SIZE { return Err(Error::ErrHeartbeatNotLongEnoughInfo); } let p = build_param(&raw.slice(CHUNK_HEADER_SIZE..CHUNK_HEADER_SIZE + header.value_length()))?; if p.header().typ != ParamType::HeartbeatInfo { return Err(Error::ErrHeartbeatParam); } let params = vec![p]; Ok(ChunkHeartbeat { params }) } fn marshal_to(&self, buf: &mut BytesMut) -> Result<usize, Error> { self.header().marshal_to(buf)?; for p in &self.params { buf.extend(p.marshal()?); } Ok(buf.len()) } fn check(&self) -> Result<(), Error> { Ok(()) } fn value_length(&self) -> usize { self.params.iter().fold(0, |length, p| { length + PARAM_HEADER_LENGTH + p.value_length() }) } fn as_any(&self) -> &(dyn Any + Send + Sync) { self } }
/// varibles fn immutable_test() { let x = 5; // println! is a macro println!("The value of x is: {}", x); // can not assign twice to immutable varible x // x = 4; // println!("The new value of x is {}", x); } fn mutable_test() { let mut x = 5; println!("The value of x is: {}", x); x = 6; println!("The new value of x is: {}", x); } fn shadowing() { let x = 5; let x = x + 1; let x = x * 2; println!("The shadow value of x is: {}", x); } /// types /// scalar types /// integer tyeps /// i8 (-128 -> 127) / u8 (0 -> 255) | i16 / u16 ... | i128 / u128 // isize / usize (64-bit or 32 bits architecture computer) fn float_point() { let _x = 2.0; // f64 by default let _y: f32 = 3.0; } /// addition, subtraction, multiplication, division, and remainder fn number_operations() { // addition let _sum = 5 + 10; // subtraction let _difference = 95.5 - 4.3; // multiplication let _product = 4 * 30; // division let _quotient = 56.7 / 32.2; // remainder let _remainder = 43 % 5; } /// boolean fn boolean_type() { let _t = true; let _f: bool = false; } /// char fn char_type() { // Rust’s char type represents a Unicode Scalar Value // Unicode Scalar Values range from U+0000 to U+D7FF and U+E000 to U+10FFFF inclusive. let _c = 'z'; } /// tuples have a fixed length: once declared, they cannot grow or shrink in size. /// values with a variety of types fn tuple_types() { let t: (i64, f64, u8) = (500, 6.4, 1); let (_x, _y, _z) = t; let x = t.0; println!("The value of x in tuple: {}", x); } /// array has fixed length /// the elements can be modified, it cannot grow or shrink fn array_type() { // [type; number] let a: [i32; 5] = [1, 2, 3, 4, 5]; let _e = a[0]; } /// !!!functions /// statements and expressions /// statement doesn't return a value /// Expressions do not include ending semicolons. (fn, if...) /// If you add a semicolon to the end of an expression, you turn it into a statement, /// which will then not return a value. Keep this in mind as you explore function return values and expressions next. fn plus_one(x: i32) -> i32 { x + 1 } /// control fow fn if_example(condition: bool) { let x = 3; // Rust will not automatically try to convert non-Boolean types to a Boolean if x != 2 { println!("x is not equal to 2"); } let _n = if condition { 5 } else { // all types in if flow should be same, can not be string or other types // Rust wouldn’t be able to do that if the type of number was only determined at runtime; 6 }; } /// loop fn loop_ex() { let mut counter = 0; let result = loop { counter += 1; if counter == 10 { break counter * 2; } }; assert_eq!(result, 20); } /// while /// for fn for_ex() { let a = [10, 11, 12, 12]; for i in a.iter() { println!("the value in array is {}", i); } for number in (1..4).rev() { println!("{}!", number); } println!("LIFTOFF"); } fn main() { immutable_test(); mutable_test(); shadowing(); float_point(); number_operations(); boolean_type(); char_type(); tuple_types(); array_type(); plus_one(3); if_example(true); loop_ex(); for_ex(); }
#[macro_use] extern crate clap; mod lib; mod cli; fn main() { let matches = cli::build_cli().get_matches(); let opt_name = matches.value_of("name"); let message = lib::build_message(opt_name); println!("{}", message); }
#![allow(dead_code)] use image; use glium::texture::Texture2d; use glium::Display; use std::collections::HashMap; use image::{ImageBuffer, Rgb, Rgba}; pub fn load(path: String, disp: &Display) -> Texture2d{ use std::path::Path; use glium::texture::RawImage2d; let img = image::open(Path::new(&path)).unwrap().to_rgba(); let dis = img.dimensions(); let glium_raw_tex = RawImage2d::from_raw_rgba_reversed(&img.into_raw(), dis); let tex = Texture2d::new(disp, glium_raw_tex).unwrap(); tex } pub fn into_texture(img: &ImageBuffer<Rgb<u8>, Vec<u8>>, disp: &Display) -> Texture2d{ use glium::texture::RawImage2d; let dis = img.dimensions(); let glium_raw_tex = RawImage2d::from_raw_rgb_reversed(&img.clone().into_raw(), dis); let tex = Texture2d::new(disp, glium_raw_tex).unwrap(); tex } pub fn into_texture_rgba(img: &ImageBuffer<Rgba<u8>, Vec<u8>>, disp: &Display) -> Texture2d{ use glium::texture::RawImage2d; let dis = img.dimensions(); let glium_raw_tex = RawImage2d::from_raw_rgba_reversed(&img.clone().into_raw(), dis); let tex = Texture2d::new(disp, glium_raw_tex).unwrap(); tex } pub fn gen_buffer(disp: &Display) -> Option<HashMap<String, Texture2d>>{ use std::fs; use std::path::Path; let dir = Path::new("./assets/textures/"); if dir.exists(){ let paths = fs::read_dir(dir).unwrap(); let mut textures: HashMap<String, Texture2d> = HashMap::with_capacity(1024); for path in paths { let path = path.unwrap().path(); if path.is_file() { let name = path.display().to_string(); if path.extension().unwrap() == "png"{ print!("Loading texture {} ... ", path.display()); let tex = load(path.display().to_string(), disp); textures.insert(name, tex); println!("Done!"); } } } Some(textures) } else{ None } }
pub fn run() { //Print to console println!("Hola desde el archivo print.rs"); // Basic Formatting println!("{} es de {}","Moises", "Ensenada"); //Positional Arguments println!("{0} es de {1} y a {0} le gusta {2}", "Moises", "Ensenada", "programar" ); // Named Arguments println!("{name} le gusta {activity}", name = "Moises", activity = "programar"); // Placeholder traits println!("Binario: {:b} Hex: {:x} Octal: {:o}", 10, 10, 10); // Placeholder for debug trait println!("{:?}", (12, true, "hello")); // Basic math println!("10 + 10 = {}", 10 + 10); }
use crate::client::Client; use serde::{Deserialize, Serialize}; use serde_json::Value; use crate::Error; use futures::compat::Future01CompatExt; use serde::de::DeserializeOwned; /// A request to retrieve a document from a CouchDB database. /// /// The request is lazy- it doesn't do a thing until you call its '[send](GetRequest::send)' /// method. /// /// see [CouchDB API docs](https://docs.couchdb.org/en/stable/api/document/common.html#get--db-docid) /// for details. pub struct GetRequest { id: String, client: Client, query: GetRequestQuery, } impl GetRequest { pub(crate) fn new(client: &Client, id: impl Into<String>) -> Self { GetRequest { id: id.into(), client: client.into(), query: GetRequestQuery::default(), } } /// Includes attachments bodies in response. /// /// Default is false. pub fn attachments(mut self, value: bool) -> Self { self.query.attachments = value; self } /// Includes attachment encoding information in response. /// /// Default is false. pub fn attachment_encoding_info(mut self, value: bool) -> Self { self.query.att_encoding_info = value; self } /// Includes only the attachments since the specified revisions. /// Doesn’t include attachments for specified revisions /// /// Default is false. pub fn attachments_since(mut self, revisions: impl Into<Vec<String>>) -> Self { self.query.atts_since = revisions.into(); self } /// Includes information about conflicts in document. /// /// Default is false. pub fn conflicts(mut self, value: bool) -> Self { self.query.conflicts = value; self } /// Includes information about deleted conflict revisions. /// /// Default is false pub fn deleted_conflicts(mut self, value: bool) -> Self { self.query.deleted_conflicts = value; self } /// Forces retrieving latest 'leaf' revision, no matter which revision /// was requested. /// /// Default is false. pub fn latest(mut self, value: bool) -> Self { self.query.latest = value; self } /// Includes last 'update sequence' for this document. /// /// The update sequence is specific to this node (in the case /// of a cluster of CouchDB nodes). /// /// Default is false. pub fn local_sequence(mut self, value: bool) -> Self { self.query.local_seq = value; self } /// This is the same as setting all of 'conflicts', /// 'deleted_conflicts', and 'revisions_info' to true. /// /// Default is false. pub fn meta(mut self, value: bool) -> Self { self.query.meta = value; self } /// retrieve documents of specified leaf revisions. pub fn open_revisions(mut self, revisions: impl Into<Vec<String>>) -> Self { self.query.open_revs = Some(OpenRevs::Revisions(revisions.into())); self } /// retrieve documents of all leaf revisions. /// /// Default is false. pub fn all_open_revisions(mut self, value: bool) -> Self { if value { self.query.open_revs = Some(OpenRevs::All("all")); } else { self.query.open_revs = None } self } /// retrieve document of specified revision. pub fn revision(mut self, revision: impl Into<Option<String>>) -> Self { self.query.rev = revision.into(); self } /// Retrieve list of known document revisions. /// /// Default is false. pub fn revisions(mut self, value: bool) -> Self { self.query.revs = value; self } /// included detailed information for all know document revisions. /// /// Default is false. pub fn revisions_info(mut self, value: bool) -> Self { self.query.revs_info = value; self } /// Send the request. /// /// This will consume the 'get' request and return a [GetResponse](GetResponse). /// The response is generic, so occasionally you might need type annotations. pub async fn send<T: DeserializeOwned>(self) -> Result<GetResponse<T>, Error> { let response = self .client .join(&self.id)? .get() .query(&self.query) .send() .compat() .await? .json() .compat() .await?; Ok(response) } } #[derive(Serialize, Clone)] pub struct GetRequestQuery { attachments: bool, att_encoding_info: bool, #[serde(skip_serializing_if = "Vec::is_empty")] atts_since: Vec<String>, conflicts: bool, deleted_conflicts: bool, latest: bool, local_seq: bool, meta: bool, #[serde(skip_serializing_if = "Option::is_none")] open_revs: Option<OpenRevs>, #[serde(skip_serializing_if = "Option::is_none")] rev: Option<String>, revs: bool, revs_info: bool, } #[derive(Serialize, Clone, Debug)] #[serde(untagged)] enum OpenRevs { Revisions(Vec<String>), All(&'static str), } impl Default for GetRequestQuery { fn default() -> Self { GetRequestQuery { attachments: false, att_encoding_info: false, atts_since: Vec::default(), conflicts: false, deleted_conflicts: false, latest: false, local_seq: false, meta: false, open_revs: None, rev: None, revs: false, revs_info: false, } } } #[derive(Debug, Deserialize)] pub struct GetResponseMeta { pub _id: String, pub _rev: String, #[serde(skip_serializing_if = "Option::is_none")] pub _deleted: Option<bool>, #[serde(skip_serializing_if = "Option::is_none")] pub _attachments: Option<Value>, #[serde(skip_serializing_if = "Option::is_none")] pub _conflicts: Option<Vec<Value>>, #[serde(skip_serializing_if = "Option::is_none")] pub _deleted_conflicts: Option<Vec<Value>>, #[serde(skip_serializing_if = "Option::is_none")] pub _local_seq: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] pub _revs_info: Option<Vec<Value>>, #[serde(skip_serializing_if = "Option::is_none")] pub _revisions: Option<Value>, } /// A response from a GetRequest. /// /// The response is generic over a type parameter T. You can use this /// to strongly type the response. Alternatively you can use the /// default generic parameter of serde_json::Value, which can represent /// any valid response from the database. /// /// The GetResponse implements Deref with respect to the returned document. /// You can also consume the response and retrieve the document with the [into_inner](GetResponse::into_inner) /// method. #[derive(Debug, Deserialize)] pub struct GetResponse<T = Value> { #[serde(flatten)] document: Option<T>, #[serde(flatten)] meta_data: GetResponseMeta, } impl<T> GetResponse<T> { /// Return metadata about the response. /// /// Which metadata is available will depend on the options set /// in the request. pub fn meta_data(&self) -> &GetResponseMeta { &self.meta_data } /// Consume the response and return the contained document pub fn into_inner(self) -> Option<T> { self.document } }
use std::time::Instant; use indicatif::{ProgressBar, ProgressStyle}; use minifier; use std::{ fs::File, io::{Read, Write}, path::Path, }; use walkdir::WalkDir; use flate2::write::GzEncoder; use flate2::Compression; enum SourceType { Css, Js, Json, } use std::{fs, io, path::PathBuf}; fn dir_size(path: impl Into<PathBuf>) -> io::Result<u64> { fn dir_size(mut dir: fs::ReadDir) -> io::Result<u64> { dir.try_fold(0, |acc, file| { let file = file?; let size = match file.metadata()? { data if data.is_dir() => dir_size(fs::read_dir(file.path())?)?, data => data.len(), }; Ok(acc + size) }) } dir_size(fs::read_dir(path.into())?) } fn minifiable<P: AsRef<Path>>(path: P) -> Option<SourceType> { let ext = path.as_ref().extension()?; if ext == "css" { Some(SourceType::Css) } else if ext == "js" { Some(SourceType::Js) } else if ext == "json" { Some(SourceType::Json) } else { Some(SourceType::Js) } } fn minify_file<P: AsRef<Path>>( path: P, src_ty: SourceType, buf: &mut String, ) -> std::io::Result<u64> { // Read buf.clear(); File::open(&path)?.read_to_string(buf)?; let old_size = buf.len() as u64; // Minify let minified = match src_ty { SourceType::Css => minifier::css::minify(&buf).unwrap(), SourceType::Json => minifier::json::minify(&buf), SourceType::Js => minifier::js::minify(&buf), }; let new_size = minified.len() as u64; // Don't bother writing if the minification didn't help. if new_size >= old_size { return Ok(0); } // Write File::create(&path)?.write_all(minified.as_bytes())?; Ok(old_size - new_size) } fn main() { let start = Instant::now(); let files = WalkDir::new("node_modules"); let to_minify: Vec<_> = files .into_iter() // Skip filesystem errors rather than panicking .filter_map(Result::ok) // Only look at files, not dirs or symlinks .filter(|entry| entry.file_type().is_file()) // If something's minifiable, determine its type. If not, skip it. .filter_map(|entry| { let src_ty = minifiable(entry.file_name())?; Some((entry, src_ty)) }) .collect(); // // Use this if it turns out the scanning phase takes a while. // let mut to_minify = Vec::new(); // let pb = ProgressBar::new(0); // pb.set_message("Scanning..."); // for entry in files.into_iter().filter_map(...).filter(...) { // if let Some(src_ty) = minifiable(entry.file_name()) { // to_minify.push((entry, src_ty)); // pb.inc_len(1); // } // } let pb = ProgressBar::new(to_minify.len() as u64); pb.set_style( ProgressStyle::default_bar() .template("[{elapsed_precise}] {bar:40.cyan/blue} {pos:>7}/{len:7} {msg}") .progress_chars("=> "), ); let mut files_minified: u64 = 0; let mut space_saved = 0; let mut buf = String::new(); for (entry, src_ty) in to_minify { let path = entry.path(); // if you want to say which thing you're minifying... // this does involve allocation due to indicatif's API // so if each file goes by fast enough, maybe don't do this let name = path.to_string_lossy().into_owned(); pb.set_message(name); match minify_file(path, src_ty, &mut buf) { Ok(0) => {} Ok(n) => { files_minified += 1; space_saved += n; } Err(_e) => { // Communicate the error? } } // Either way, we're done with this file pb.inc(1); } pb.finish_and_clear(); let msg = format!( "Minified {} files, saving {} bytes in {} seconds", files_minified, space_saved, start.elapsed().as_secs_f32() ); let old_size = dir_size("node_modules").unwrap(); println!("Minified Size: {}", old_size); let tar_gz = File::create("node_modules.tgz").unwrap(); let enc = GzEncoder::new(&tar_gz, Compression::default()); let mut tar = tar::Builder::new(enc); tar.append_dir_all("node_modules", ".").unwrap(); println!( "Minified + Compressed Size: {} bytes", tar_gz.metadata().unwrap().len() ); println!("{}", msg); }
mod commitfs; pub use commitfs::*;