text stringlengths 8 4.13M |
|---|
// Copyright 2022 Sebastian Ramacher
// SPDX-License-Identifier: MIT
use ascon_aead::{
aead::{generic_array::typenum::Unsigned, Aead, AeadInPlace, KeyInit},
Ascon128, Ascon128a, Ascon80pq,
};
use criterion::{
black_box, criterion_group, criterion_main, Bencher, BenchmarkId, Criterion, Throughput,
};
use rand::{rngs::StdRng, RngCore, SeedableRng};
const KB: usize = 1024;
fn bench_for_size<A: KeyInit + Aead>(b: &mut Bencher, rng: &mut dyn RngCore, size: usize) {
let mut key = vec![0u8; A::KeySize::USIZE];
rng.fill_bytes(key.as_mut_slice());
let mut nonce = vec![0u8; A::NonceSize::USIZE];
rng.fill_bytes(nonce.as_mut_slice());
let mut plaintext = vec![0u8; size];
rng.fill_bytes(plaintext.as_mut_slice());
let cipher = A::new(key.as_slice().into());
let nonce = nonce.as_slice().into();
b.iter(|| black_box(cipher.encrypt(nonce, plaintext.as_slice())));
}
fn bench_for_size_inplace<A: KeyInit + AeadInPlace>(
b: &mut Bencher,
rng: &mut dyn RngCore,
size: usize,
) {
let mut key = vec![0u8; A::KeySize::USIZE];
rng.fill_bytes(key.as_mut_slice());
let mut nonce = vec![0u8; A::NonceSize::USIZE];
rng.fill_bytes(nonce.as_mut_slice());
let mut buffer = vec![0u8; size + 16];
rng.fill_bytes(buffer.as_mut_slice());
let cipher = A::new(key.as_slice().into());
let nonce = nonce.as_slice().into();
b.iter(|| black_box(cipher.encrypt_in_place(nonce, b"", &mut buffer)));
}
fn criterion_benchmark<A: KeyInit + Aead>(c: &mut Criterion, name: &str) {
let mut rng = StdRng::from_entropy();
let mut group = c.benchmark_group(name);
for size in [KB, 2 * KB, 4 * KB, 8 * KB, 16 * KB, 32 * KB, 64 * KB].iter() {
group.throughput(Throughput::Bytes(*size as u64));
group.bench_with_input(BenchmarkId::from_parameter(size), size, |b, &size| {
bench_for_size::<A>(b, &mut rng, size)
});
}
group.finish();
}
fn criterion_benchmark_inplace<A: KeyInit + AeadInPlace>(c: &mut Criterion, name: &str) {
let mut rng = StdRng::from_entropy();
let mut group = c.benchmark_group(name);
for size in [KB, 2 * KB, 4 * KB, 8 * KB, 16 * KB, 32 * KB, 64 * KB].iter() {
group.throughput(Throughput::Bytes(*size as u64));
group.bench_with_input(BenchmarkId::from_parameter(size), size, |b, &size| {
bench_for_size_inplace::<A>(b, &mut rng, size)
});
}
group.finish();
}
fn criterion_bench_ascon128(c: &mut Criterion) {
criterion_benchmark::<Ascon128>(c, "Ascon-128");
}
fn criterion_bench_ascon128a(c: &mut Criterion) {
criterion_benchmark::<Ascon128a>(c, "Ascon-128a");
}
fn criterion_bench_ascon80pq(c: &mut Criterion) {
criterion_benchmark::<Ascon80pq>(c, "Ascon-80pq");
}
fn criterion_bench_ascon128_inplace(c: &mut Criterion) {
criterion_benchmark_inplace::<Ascon128>(c, "Ascon-128 (inplace)");
}
fn criterion_bench_ascon128a_inplace(c: &mut Criterion) {
criterion_benchmark_inplace::<Ascon128a>(c, "Ascon-128a (inplace)");
}
fn criterion_bench_ascon80pq_inplace(c: &mut Criterion) {
criterion_benchmark_inplace::<Ascon80pq>(c, "Ascon-80pq (inplace)");
}
criterion_group!(
bench_ascon128,
criterion_bench_ascon128,
criterion_bench_ascon128_inplace,
);
criterion_group!(
bench_ascon128a,
criterion_bench_ascon128a,
criterion_bench_ascon128a_inplace
);
criterion_group!(
bench_ascon80pq,
criterion_bench_ascon80pq,
criterion_bench_ascon80pq_inplace
);
criterion_main!(bench_ascon128, bench_ascon128a, bench_ascon80pq);
|
// Copyright (C) 2021 Subspace Labs, Inc.
// SPDX-License-Identifier: Apache-2.0
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Pallet transporter used to move funds between chains.
#![cfg_attr(not(feature = "std"), no_std)]
#![forbid(unsafe_code)]
#![warn(rust_2018_idioms, missing_debug_implementations)]
use codec::{Decode, Encode};
use domain_runtime_primitives::{MultiAccountId, TryConvertBack};
use frame_support::traits::Currency;
pub use pallet::*;
use scale_info::TypeInfo;
use sp_messenger::messages::ChainId;
#[cfg(test)]
mod mock;
#[cfg(test)]
mod tests;
#[cfg(feature = "runtime-benchmarks")]
mod benchmarking;
pub mod weights;
/// Location that either sends or receives transfers between chains.
#[derive(Debug, Encode, Decode, Clone, Eq, PartialEq, TypeInfo)]
pub struct Location {
/// Unique identity of chain.
pub chain_id: ChainId,
/// Unique account on chain.
pub account_id: MultiAccountId,
}
/// Transfer of funds from one chain to another.
#[derive(Debug, Encode, Decode, Clone, Eq, PartialEq, TypeInfo)]
pub struct Transfer<Balance> {
/// Amount being transferred between entities.
pub amount: Balance,
/// Sender location of the transfer.
pub sender: Location,
/// Receiver location of the transfer.
pub receiver: Location,
}
/// Balance type used by the pallet.
pub(crate) type BalanceOf<T> =
<<T as Config>::Currency as Currency<<T as frame_system::Config>::AccountId>>::Balance;
type MessageIdOf<T> = <<T as Config>::Sender as sp_messenger::endpoint::Sender<
<T as frame_system::Config>::AccountId,
>>::MessageId;
#[frame_support::pallet]
mod pallet {
use crate::weights::WeightInfo;
use crate::{BalanceOf, Location, MessageIdOf, MultiAccountId, Transfer, TryConvertBack};
use codec::{Decode, Encode};
use frame_support::pallet_prelude::*;
use frame_support::traits::{Currency, ExistenceRequirement, WithdrawReasons};
use frame_support::weights::Weight;
use frame_system::pallet_prelude::*;
use sp_messenger::endpoint::{
Endpoint, EndpointHandler as EndpointHandlerT, EndpointId, EndpointRequest,
EndpointResponse, Sender,
};
use sp_messenger::messages::ChainId;
use sp_runtime::traits::Convert;
use sp_std::vec;
#[pallet::config]
pub trait Config: frame_system::Config {
/// Event type for this pallet.
type RuntimeEvent: From<Event<Self>> + IsType<<Self as frame_system::Config>::RuntimeEvent>;
/// Gets the chain_id of the current execution environment.
type SelfChainId: Get<ChainId>;
/// Gets the endpoint_id of the this pallet in a given execution environment.
type SelfEndpointId: Get<EndpointId>;
/// Currency used by this pallet.
type Currency: Currency<Self::AccountId>;
/// Sender used to transfer funds.
type Sender: Sender<Self::AccountId>;
/// MultiAccountID <> T::AccountId converter.
type AccountIdConverter: TryConvertBack<Self::AccountId, MultiAccountId>;
/// Weight information for extrinsics in this pallet.
type WeightInfo: WeightInfo;
}
/// Pallet transporter to move funds between chains.
#[pallet::pallet]
#[pallet::without_storage_info]
pub struct Pallet<T>(_);
/// All the outgoing transfers on this execution environment.
#[pallet::storage]
#[pallet::getter(fn outgoing_transfers)]
pub(super) type OutgoingTransfers<T: Config> = StorageDoubleMap<
_,
Identity,
ChainId,
Identity,
MessageIdOf<T>,
Transfer<BalanceOf<T>>,
OptionQuery,
>;
/// Events emitted by pallet-transporter.
#[pallet::event]
#[pallet::generate_deposit(pub (super) fn deposit_event)]
pub enum Event<T: Config> {
/// Emits when there is a new outgoing transfer.
OutgoingTransferInitiated {
/// Destination chain the transfer is bound to.
chain_id: ChainId,
/// Id of the transfer.
message_id: MessageIdOf<T>,
},
/// Emits when a given outgoing transfer was failed on dst_chain.
OutgoingTransferFailed {
/// Destination chain the transfer is bound to.
chain_id: ChainId,
/// Id of the transfer.
message_id: MessageIdOf<T>,
/// Error from dst_chain endpoint.
err: DispatchError,
},
/// Emits when a given outgoing transfer was successful.
OutgoingTransferSuccessful {
/// Destination chain the transfer is bound to.
chain_id: ChainId,
/// Id of the transfer.
message_id: MessageIdOf<T>,
},
/// Emits when a given incoming transfer was successfully processed.
IncomingTransferSuccessful {
/// Source chain the transfer is coming from.
chain_id: ChainId,
/// Id of the transfer.
message_id: MessageIdOf<T>,
},
}
/// Errors emitted by pallet-transporter.
#[pallet::error]
pub enum Error<T> {
/// Emits when the account has low balance to make a transfer.
LowBalance,
/// Failed to decode transfer payload.
InvalidPayload,
/// Emits when the request for a response received is missing.
MissingTransferRequest,
/// Emits when the request doesn't match the expected one..
InvalidTransferRequest,
/// Emits when the incoming message is not bound to this chain.
UnexpectedMessage,
/// Emits when the account id type is invalid.
InvalidAccountId,
}
#[pallet::call]
impl<T: Config> Pallet<T> {
/// Initiates transfer of funds from account on src_chain to account on dst_chain.
/// Funds are burned on src_chain first and are minted on dst_chain using Messenger.
#[pallet::call_index(0)]
#[pallet::weight((T::WeightInfo::transfer(), Pays::No))]
pub fn transfer(
origin: OriginFor<T>,
dst_location: Location,
amount: BalanceOf<T>,
) -> DispatchResult {
let sender = ensure_signed(origin)?;
// burn transfer amount
T::Currency::withdraw(
&sender,
amount,
WithdrawReasons::TRANSFER,
ExistenceRequirement::AllowDeath,
)
.map_err(|_| Error::<T>::LowBalance)?;
// initiate transfer
let dst_chain_id = dst_location.chain_id;
let transfer = Transfer {
amount,
sender: Location {
chain_id: T::SelfChainId::get(),
account_id: T::AccountIdConverter::convert(sender.clone()),
},
receiver: dst_location,
};
// send message
let message_id = T::Sender::send_message(
&sender,
dst_chain_id,
EndpointRequest {
src_endpoint: Endpoint::Id(T::SelfEndpointId::get()),
// destination endpoint must be transporter with same id
dst_endpoint: Endpoint::Id(T::SelfEndpointId::get()),
payload: transfer.encode(),
},
)?;
OutgoingTransfers::<T>::insert(dst_chain_id, message_id, transfer);
Self::deposit_event(Event::<T>::OutgoingTransferInitiated {
chain_id: dst_chain_id,
message_id,
});
Ok(())
}
}
/// Endpoint handler implementation for pallet transporter.
#[derive(Debug)]
pub struct EndpointHandler<T>(pub PhantomData<T>);
impl<T: Config> EndpointHandlerT<MessageIdOf<T>> for EndpointHandler<T> {
fn message(
&self,
src_chain_id: ChainId,
message_id: MessageIdOf<T>,
req: EndpointRequest,
) -> EndpointResponse {
// ensure message is not from the self
ensure!(
T::SelfChainId::get() != src_chain_id,
Error::<T>::InvalidTransferRequest
);
// check the endpoint id
ensure!(
req.dst_endpoint == Endpoint::Id(T::SelfEndpointId::get()),
Error::<T>::UnexpectedMessage
);
// decode payload and process message
let req = match Transfer::decode(&mut req.payload.as_slice()) {
Ok(req) => req,
Err(_) => return Err(Error::<T>::InvalidPayload.into()),
};
// mint the funds to dst_account
let account_id = T::AccountIdConverter::try_convert_back(req.receiver.account_id)
.ok_or(Error::<T>::InvalidAccountId)?;
T::Currency::deposit_creating(&account_id, req.amount);
frame_system::Pallet::<T>::deposit_event(Into::<<T as Config>::RuntimeEvent>::into(
Event::<T>::IncomingTransferSuccessful {
chain_id: src_chain_id,
message_id,
},
));
Ok(vec![])
}
fn message_weight(&self) -> Weight {
T::WeightInfo::message()
}
fn message_response(
&self,
dst_chain_id: ChainId,
message_id: MessageIdOf<T>,
req: EndpointRequest,
resp: EndpointResponse,
) -> DispatchResult {
// ensure request is valid
let transfer = OutgoingTransfers::<T>::take(dst_chain_id, message_id)
.ok_or(Error::<T>::MissingTransferRequest)?;
ensure!(
req.payload == transfer.encode(),
Error::<T>::InvalidTransferRequest
);
// process response
match resp {
Ok(_) => {
// transfer is successful
frame_system::Pallet::<T>::deposit_event(
Into::<<T as Config>::RuntimeEvent>::into(
Event::<T>::OutgoingTransferSuccessful {
chain_id: dst_chain_id,
message_id,
},
),
);
}
Err(err) => {
// transfer failed
// revert burned funds
let account_id =
T::AccountIdConverter::try_convert_back(transfer.sender.account_id)
.ok_or(Error::<T>::InvalidAccountId)?;
T::Currency::deposit_creating(&account_id, transfer.amount);
frame_system::Pallet::<T>::deposit_event(
Into::<<T as Config>::RuntimeEvent>::into(
Event::<T>::OutgoingTransferFailed {
chain_id: dst_chain_id,
message_id,
err,
},
),
);
}
}
Ok(())
}
fn message_response_weight(&self) -> Weight {
T::WeightInfo::message_response()
}
}
}
|
use nu_engine::{eval_block, eval_expression, CallExt};
use nu_protocol::ast::Call;
use nu_protocol::engine::{CaptureBlock, Command, EngineState, Stack};
use nu_protocol::{
Category, Example, IntoInterruptiblePipelineData, ListStream, PipelineData, Signature, Span,
SyntaxShape, Value,
};
#[derive(Clone)]
pub struct For;
impl Command for For {
fn name(&self) -> &str {
"for"
}
fn usage(&self) -> &str {
"Loop over a range"
}
fn signature(&self) -> nu_protocol::Signature {
Signature::build("for")
.required(
"var_name",
SyntaxShape::VarWithOptType,
"name of the looping variable",
)
.required(
"range",
SyntaxShape::Keyword(b"in".to_vec(), Box::new(SyntaxShape::Any)),
"range of the loop",
)
.required(
"block",
SyntaxShape::Block(Some(vec![])),
"the block to run",
)
.switch(
"numbered",
"returned a numbered item ($it.index and $it.item)",
Some('n'),
)
.creates_scope()
.category(Category::Core)
}
fn extra_usage(&self) -> &str {
r#"This command is a parser keyword. For details, check:
https://www.nushell.sh/book/thinking_in_nu.html"#
}
fn is_parser_keyword(&self) -> bool {
true
}
fn run(
&self,
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
_input: PipelineData,
) -> Result<nu_protocol::PipelineData, nu_protocol::ShellError> {
let head = call.head;
let var_id = call
.positional_nth(0)
.expect("checked through parser")
.as_var()
.expect("internal error: missing variable");
let keyword_expr = call
.positional_nth(1)
.expect("checked through parser")
.as_keyword()
.expect("internal error: missing keyword");
let values = eval_expression(engine_state, stack, keyword_expr)?;
let capture_block: CaptureBlock = call.req(engine_state, stack, 2)?;
let numbered = call.has_flag("numbered");
let ctrlc = engine_state.ctrlc.clone();
let engine_state = engine_state.clone();
let block = engine_state.get_block(capture_block.block_id).clone();
let mut stack = stack.captures_to_stack(&capture_block.captures);
let orig_env_vars = stack.env_vars.clone();
let orig_env_hidden = stack.env_hidden.clone();
let redirect_stdout = call.redirect_stdout;
let redirect_stderr = call.redirect_stderr;
match values {
Value::List { vals, .. } => {
Ok(ListStream::from_stream(vals.into_iter(), ctrlc.clone())
.enumerate()
.map(move |(idx, x)| {
stack.with_env(&orig_env_vars, &orig_env_hidden);
stack.add_var(
var_id,
if numbered {
Value::Record {
cols: vec!["index".into(), "item".into()],
vals: vec![
Value::Int {
val: idx as i64,
span: head,
},
x,
],
span: head,
}
} else {
x
},
);
//let block = engine_state.get_block(block_id);
match eval_block(
&engine_state,
&mut stack,
&block,
PipelineData::new(head),
redirect_stdout,
redirect_stderr,
) {
Ok(pipeline_data) => pipeline_data.into_value(head),
Err(error) => Value::Error { error },
}
})
.filter(|x| !x.is_nothing())
.into_pipeline_data(ctrlc))
}
Value::Range { val, .. } => Ok(val
.into_range_iter(ctrlc.clone())?
.enumerate()
.map(move |(idx, x)| {
stack.with_env(&orig_env_vars, &orig_env_hidden);
stack.add_var(
var_id,
if numbered {
Value::Record {
cols: vec!["index".into(), "item".into()],
vals: vec![
Value::Int {
val: idx as i64,
span: head,
},
x,
],
span: head,
}
} else {
x
},
);
//let block = engine_state.get_block(block_id);
match eval_block(
&engine_state,
&mut stack,
&block,
PipelineData::new(head),
redirect_stdout,
redirect_stderr,
) {
Ok(pipeline_data) => pipeline_data.into_value(head),
Err(error) => Value::Error { error },
}
})
.filter(|x| !x.is_nothing())
.into_pipeline_data(ctrlc)),
x => {
stack.add_var(var_id, x);
eval_block(
&engine_state,
&mut stack,
&block,
PipelineData::new(head),
redirect_stdout,
redirect_stderr,
)
}
}
}
fn examples(&self) -> Vec<Example> {
let span = Span::test_data();
vec![
Example {
description: "Echo the square of each integer",
example: "for x in [1 2 3] { $x * $x }",
result: Some(Value::List {
vals: vec![
Value::Int { val: 1, span },
Value::Int { val: 4, span },
Value::Int { val: 9, span },
],
span,
}),
},
Example {
description: "Work with elements of a range",
example: "for $x in 1..3 { $x }",
result: Some(Value::List {
vals: vec![
Value::Int { val: 1, span },
Value::Int { val: 2, span },
Value::Int { val: 3, span },
],
span,
}),
},
Example {
description: "Number each item and echo a message",
example: "for $it in ['bob' 'fred'] --numbered { $\"($it.index) is ($it.item)\" }",
result: Some(Value::List {
vals: vec![
Value::String {
val: "0 is bob".into(),
span,
},
Value::String {
val: "1 is fred".into(),
span,
},
],
span,
}),
},
]
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_examples() {
use crate::test_examples;
test_examples(For {})
}
}
|
use crate::ui::components::image_button::ImageButtonProps;
use raui_core::prelude::*;
widget_component! {
pub button_state_image(key, props) {
let ButtonProps {
selected,
trigger,
context,
..
} = props.read_cloned_or_default();
let ImageButtonProps {
id,
horizontal_alignment,
} = props.read_cloned_or_default();
let image_props = ImageBoxProps {
material: ImageBoxMaterial::Image(ImageBoxImage {
id,
tint: if trigger {
Color { r: 0.0, g: 1.0, b: 0.0, a: 1.0 }
} else if context {
Color { r: 1.0, g: 0.0, b: 0.0, a: 1.0 }
} else if selected {
Color { r: 1.0, g: 1.0, b: 1.0, a: 0.85 }
} else {
Color::default()
},
..Default::default()
}),
content_keep_aspect_ratio: Some(ImageBoxAspectRatio {
horizontal_alignment,
vertical_alignment: 0.5
}),
..Default::default()
};
widget! {
(#{key} image_box: {image_props})
}
}
}
|
use nix;
use std::{cmp, fmt, os, ptr};
#[derive(Clone)]
pub struct CircularBuffer<T> {
head: usize,
tail: usize,
buf: Vec<T>,
read: usize,
written: usize,
}
impl<T> CircularBuffer<T> {
pub fn new(cap: usize) -> Self {
let mut buf = Vec::with_capacity(cap);
unsafe { buf.set_len(cap) };
Self {
head: 0,
tail: 0,
buf,
read: 0,
written: 0,
}
}
#[inline(always)]
pub fn capacity(&self) -> usize {
self.buf.len()
}
#[inline(always)]
pub fn read_available(&self) -> usize {
self.head - self.tail
}
#[inline(always)]
pub fn write_available(&self) -> usize {
self.capacity() - self.read_available()
}
#[must_use]
#[inline(always)]
pub fn read<'a>(&'a mut self) -> Option<impl FnOnce() -> T + 'a> {
if self.read_available() > 0 {
Some(move || {
let off = self.tail;
let ret = unsafe { ptr::read(self.buf.get_unchecked(off)) };
self.tail += 1;
if self.tail >= self.capacity() {
self.head -= self.capacity();
self.tail -= self.capacity();
}
self.read += 1;
ret
})
} else {
None
}
}
#[must_use]
#[inline(always)]
pub fn write<'a>(&'a mut self) -> Option<impl FnOnce(T) + 'a> {
if self.write_available() > 0 {
Some(move |t| {
let off = self.head % self.capacity();
unsafe { ptr::write(self.buf.get_unchecked_mut(off), t) };
self.head += 1;
self.written += 1;
})
} else {
None
}
}
}
impl<T> Drop for CircularBuffer<T> {
fn drop(&mut self) {
while self.read_available() > 0 {
let _ = self.read();
}
unsafe { self.buf.set_len(0) };
}
}
impl CircularBuffer<u8> {
pub fn read_to_fd(&mut self, fd: os::unix::io::RawFd) -> Result<usize, nix::Error> {
let mut written = 0;
loop {
if self.read_available() > 0 {
let a_start = self.tail;
let a_end = cmp::min(self.capacity(), a_start + self.read_available());
let b_start = 0;
let b_end = self.read_available() - (a_end - a_start);
match nix::sys::socket::sendmsg(
fd,
&[
nix::sys::uio::IoVec::from_slice(&self.buf[a_start..a_end]),
nix::sys::uio::IoVec::from_slice(&self.buf[b_start..b_end]),
][0..if b_start != b_end { 2 } else { 1 }],
&[],
nix::sys::socket::MsgFlags::empty(),
None,
) {
Err(nix::Error::Sys(nix::errno::Errno::EAGAIN)) => return Ok(written),
Ok(n) => {
self.tail += n;
if self.tail >= self.capacity() {
self.head -= self.capacity();
self.tail -= self.capacity();
}
self.read += n;
written += n;
}
Err(err) => return Err(err),
}
} else {
match nix::sys::socket::getsockopt(fd, nix::sys::socket::sockopt::SocketError)
.unwrap()
{
0 => return Ok(written),
err => return Err(nix::Error::Sys(nix::errno::Errno::from_i32(err))),
}
}
}
}
pub fn write_from_fd(&mut self, fd: os::unix::io::RawFd) -> Result<(usize, bool), nix::Error> {
let mut read = 0;
loop {
if self.write_available() > 0 {
let a_start = self.head % self.capacity();
let a_end = cmp::min(self.capacity(), a_start + self.write_available());
let b_start = 0;
let b_end = self.write_available() - (a_end - a_start);
let (b, a) = self.buf.split_at_mut(b_end);
match nix::sys::socket::recvmsg(
fd,
&[
nix::sys::uio::IoVec::from_mut_slice(
&mut a[a_start - b_end..a_end - b_end],
),
nix::sys::uio::IoVec::from_mut_slice(b),
][0..if b_start != b_end { 2 } else { 1 }],
None,
nix::sys::socket::MsgFlags::empty(),
)
.map(|x| x.bytes)
{
Err(nix::Error::Sys(nix::errno::Errno::EAGAIN)) => return Ok((read, false)),
Ok(0) => return Ok((read, true)),
Ok(n) => {
self.head += n;
self.written += n;
read += n;
}
Err(err) => return Err(err),
}
} else {
match nix::sys::socket::recvmsg(
fd,
&[nix::sys::uio::IoVec::from_mut_slice(&mut [0])],
None,
nix::sys::socket::MsgFlags::MSG_PEEK,
)
.map(|x| x.bytes)
{
Err(nix::Error::Sys(nix::errno::Errno::EAGAIN)) | Ok(1) => {
return Ok((read, false))
}
Ok(0) => return Ok((read, true)),
Err(err) => return Err(err),
Ok(_) => unreachable!(),
}
}
}
}
}
impl<T> fmt::Debug for CircularBuffer<T>
where
for<'a> &'a T: fmt::Debug,
{
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.debug_struct("CircularBuffer")
.field("written", &self.written)
.field("read", &self.read)
.field(
"contents",
&format!(
"{}/{} \"{:?}\"",
self.read_available(),
self.capacity(),
(0..self.read_available())
.map(|i| unsafe {
self.buf.get_unchecked((self.tail + i) % self.capacity())
})
.collect::<Vec<_>>()
),
)
.finish()
}
}
|
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use uvll;
use raw::{Request, Allocated, Loop};
use UvResult;
pub struct GetAddrInfo {
handle: *mut uvll::uv_getaddrinfo_t,
}
impl GetAddrInfo {
pub fn send(&mut self,
uv_loop: &Loop,
node: Option<&str>,
service: Option<&str>,
cb: uvll::uv_getaddrinfo_cb) -> UvResult<()> {
let node = node.map(|s| s.to_c_str());
let service = service.map(|s| s.to_c_str());
let node = node.as_ref().map(|c| c.as_ptr()).unwrap_or(0 as *const _);
let service = service.as_ref().map(|c| c.as_ptr()).unwrap_or(0 as *const _);
unsafe {
try!(call!(uvll::uv_getaddrinfo(uv_loop.raw(),
self.handle,
cb,
node,
service,
0 as *const _)));
}
Ok(())
}
}
impl Allocated for uvll::uv_getaddrinfo_t {
fn size(_self: Option<uvll::uv_getaddrinfo_t>) -> uint {
unsafe { uvll::uv_req_size(uvll::UV_GETADDRINFO) as uint }
}
}
impl Request<uvll::uv_getaddrinfo_t> for GetAddrInfo {
fn raw(&self) -> *mut uvll::uv_getaddrinfo_t { self.handle }
fn from_raw(t: *mut uvll::uv_getaddrinfo_t) -> GetAddrInfo {
GetAddrInfo { handle: t }
}
}
|
pub trait StutteringIterator: Iterator {
/// Creates an iterator that duplicates each item count times.
#[inline]
fn stutter(self, count: usize) -> StutteringIter<Self>
where
Self: Sized,
Self::Item: Copy,
{
StutteringIter::new(self, count)
}
}
impl<T: ?Sized> StutteringIterator for T where T: Iterator {}
pub struct StutteringIter<I: Iterator> {
iter: I,
curr: Option<I::Item>,
nr: usize,
count: usize,
}
impl<I> StutteringIter<I>
where
I: Iterator,
{
pub fn new(iter: I, count: usize) -> Self {
Self {
iter,
curr: None,
nr: count, // start at count, so we immediately set curr
count,
}
}
}
impl<I> Iterator for StutteringIter<I>
where
I: Iterator,
I::Item: Copy,
{
type Item = <I as Iterator>::Item;
#[inline]
fn next(&mut self) -> Option<I::Item> {
if self.nr == self.count {
self.curr = self.iter.next();
self.nr = 0;
} else {
self.nr += 1;
}
self.curr
}
}
#[cfg(test)]
mod tests {
use super::StutteringIterator;
#[test]
fn test_0() {
let a = vec![0, 1, 2];
let mut iter = a.iter().stutter(0);
assert_eq!(iter.next(), Some(&0));
assert_eq!(iter.next(), Some(&1));
assert_eq!(iter.next(), Some(&2));
}
#[test]
fn test_1() {
let a = vec![0, 1, 2];
let mut iter = a.iter().stutter(1);
assert_eq!(iter.next(), Some(&0));
assert_eq!(iter.next(), Some(&0));
assert_eq!(iter.next(), Some(&1));
assert_eq!(iter.next(), Some(&1));
assert_eq!(iter.next(), Some(&2));
assert_eq!(iter.next(), Some(&2));
assert_eq!(iter.next(), None);
}
#[test]
fn test_2() {
let a = vec![0, 1, 2];
let mut iter = a.iter().stutter(2);
assert_eq!(iter.next(), Some(&0));
assert_eq!(iter.next(), Some(&0));
assert_eq!(iter.next(), Some(&0));
assert_eq!(iter.next(), Some(&1));
assert_eq!(iter.next(), Some(&1));
assert_eq!(iter.next(), Some(&1));
assert_eq!(iter.next(), Some(&2));
assert_eq!(iter.next(), Some(&2));
assert_eq!(iter.next(), Some(&2));
assert_eq!(iter.next(), None);
}
}
|
// src/token/mod.rs
mod token;
pub use token::*;
|
///
/// Blitz Explorer
///
/// Request on the tcp server
///
/// Copyright 2019 Luis Fernando Batels <luisfbatels@gmail.com>
///
use std::net::{TcpStream, SocketAddr};
use std::io::{BufReader, BufRead, Write, BufWriter, copy};
use catalog::catalog::Catalog;
pub struct Request {
}
impl Request {
// Handle the client connection
pub fn handle(conn: TcpStream, catalog: &mut Catalog) {
let pa = conn.peer_addr();
if pa.is_err() {
error!("Error on get the remote addr {:?}", conn);
return;
}
let client = pa.unwrap();
info!("Handling {}...", client);
let mut command = String::new();
let mut buf_reader = BufReader::new(&conn);
if buf_reader.read_line(&mut command).is_err() {
error!("Error on read the command from the client {}", client);
return;
}
let mut command_ok = false;
if command.starts_with("/search/") {
let mut search = command.replacen("/search/", "", 1);
search = search.trim().to_string();
if !search.is_empty() {
for tar in catalog.get_catalogs() {
for file in catalog.get_catalog(&tar) {
if file.file_name.contains(search.as_str()) {
Request::response(&conn, client, format!("{}:{}\n", tar.file_name.clone(), file.full_path.clone()));
}
}
}
command_ok = true;
}
} else if command.starts_with("/download/") && command.contains(":") {
let mut download = command.replacen("/download/", "", 1);
download = download.trim().to_string();
if !download.is_empty() {
let mut download_slices = download.split(":");
let tar_file = download_slices.next();
if tar_file.is_none() {
error!("Tar file not setted: {}", download);
return;
}
let name_file = download_slices.next();
if name_file.is_none() {
error!("Name of file not setted: {}", download);
return;
}
for tar in catalog.get_catalogs() {
if tar.file_name == tar_file.unwrap() {
for file in catalog.get_catalog(&tar) {
if file.full_path == name_file.unwrap() {
if let Some(extracted) = catalog.extract_file(&tar, &file) {
if copy(&mut BufReader::new(extracted), &mut BufWriter::new(&conn)).is_err() {
error!("Error on write on buffer: {}", download);
return;
}
} else {
error!("Error on extract: {}", download);
return;
}
break;
}
}
break;
}
}
command_ok = true;
}
}
if !command_ok {
warn!("Invalid command {}", client);
Request::response(&conn, client, "Invalid command\n".to_string());
return;
}
info!("Handling {}...OK", client);
}
// Create and flush the response to the client
fn response(mut conn: &TcpStream, client: SocketAddr, text: String) -> bool {
if conn.write_all(text.as_bytes()).is_err() {
error!("Error on send '{}' message to client {}", text, client);
return false;
}
return true;
}
}
|
#[doc = "Reader of register LL_DBG_7"]
pub type R = crate::R<u32, super::LL_DBG_7>;
#[doc = "Reader of field `ADV_RX_WR_PTR`"]
pub type ADV_RX_WR_PTR_R = crate::R<u8, u8>;
#[doc = "Reader of field `ADV_RX_RD_PTR`"]
pub type ADV_RX_RD_PTR_R = crate::R<u8, u8>;
impl R {
#[doc = "Bits 0:6 - Advertiser Receive FIFO write pointer"]
#[inline(always)]
pub fn adv_rx_wr_ptr(&self) -> ADV_RX_WR_PTR_R {
ADV_RX_WR_PTR_R::new((self.bits & 0x7f) as u8)
}
#[doc = "Bits 7:13 - Advertiser Receive FIFO read pointer"]
#[inline(always)]
pub fn adv_rx_rd_ptr(&self) -> ADV_RX_RD_PTR_R {
ADV_RX_RD_PTR_R::new(((self.bits >> 7) & 0x7f) as u8)
}
}
|
use std::fmt;
use std::time::Instant;
struct Struct {
data: [u8; 32],
}
impl fmt::Display for Struct {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?}", self.data)
}
}
fn main() {
// --------------------------------
println!("loge");
::std::env::set_var("RUST_LOG", "trace");
::std::env::set_var("LOGE_FORMAT", "target");
loge::init();
// --------------------------------
// $> Set-Item -Path Env:RUST_LOG -Value "trace"
// println!("env_logger");
// env_logger::init();
// $> Set-Item -Path Env:RUST_LOG
// --------------------------------
let mut structs = Vec::new();
for i in 0..100 {
structs.push(Struct { data: [i as u8; 32] });
}
{
// With format
let start = Instant::now();
for s in &structs {
log::info!("{}", format!("{}", s));
}
eprintln!("with format: {:?}", start.elapsed());
}
{
// Plain logger
let start = Instant::now();
for s in &structs {
log::info!("{}", s);
}
eprintln!("plain: {:?}", start.elapsed());
}
}
|
use ckb_core::transaction::Transaction as CoreTransaction;
use ckb_network::NetworkController;
use ckb_shared::shared::Shared;
use ckb_store::ChainStore;
use jsonrpc_core::{Error, Result};
use jsonrpc_derive::rpc;
use jsonrpc_types::Transaction;
use numext_fixed_hash::H256;
use std::convert::TryInto;
#[rpc]
pub trait IntegrationTestRpc {
// curl -d '{"id": 2, "jsonrpc": "2.0", "method":"add_node","params": ["QmUsZHPbjjzU627UZFt4k8j6ycEcNvXRnVGxCPKqwbAfQS", "/ip4/192.168.2.100/tcp/30002"]}' -H 'content-type:application/json' 'http://localhost:8114'
#[rpc(name = "add_node")]
fn add_node(&self, peer_id: String, address: String) -> Result<()>;
#[rpc(name = "enqueue_test_transaction")]
fn enqueue_test_transaction(&self, _tx: Transaction) -> Result<H256>;
}
pub(crate) struct IntegrationTestRpcImpl<CS> {
pub network_controller: NetworkController,
pub shared: Shared<CS>,
}
impl<CS: ChainStore + 'static> IntegrationTestRpc for IntegrationTestRpcImpl<CS> {
fn add_node(&self, peer_id: String, address: String) -> Result<()> {
self.network_controller.add_node(
&peer_id.parse().expect("invalid peer_id"),
address.parse().expect("invalid address"),
);
Ok(())
}
fn enqueue_test_transaction(&self, tx: Transaction) -> Result<H256> {
let tx: CoreTransaction = tx.try_into().map_err(|_| Error::parse_error())?;
let mut chain_state = self.shared.chain_state().lock();
let tx_hash = tx.hash();
chain_state.mut_tx_pool().enqueue_tx(None, tx);
Ok(tx_hash)
}
}
|
// Returns the market price of a given coin pair
// TODO: Remove this market prices simulations
// TODO: Dynamically retrieve market price for wallet coins from broker API
pub fn get_pair_price(pair: &String) -> f64 {
let mut price: f64 = 0.;
if pair == "BTC/USDT" {
price = 55818.12;
} else if pair == "SOL/USDT" {
price = 148.05;
} else if pair == "TRX/USDT" {
price = 0.09453;
} else if pair == "SHIB/USDT" {
price = 0.00002872;
} else if pair == "BTT/USDT" {
price = 0.003648;
} else if pair == "MBOX/USDT" {
price = 4.80;
}
return price;
}
|
#![deny(missing_docs)]
//! Hello world
mod utilities;
#[allow(unused_imports)]
use crate::utilities::*;
mod components;
#[allow(unused_imports)]
use crate::components::*;
mod backbone;
use backbone::simulator::SimulatorRunState;
mod systems;
use crate::systems::*;
use amethyst::{
core::transform::TransformBundle,
input::{InputBundle, StringBindings},
prelude::*,
renderer::{
plugins::{RenderFlat2D, RenderToWindow},
types::DefaultBackend,
RenderingBundle,
},
utils::{application_root_dir, auto_fov::AutoFovSystem},
};
fn main() -> amethyst::Result<()> {
amethyst::start_logger(Default::default());
let app_root = application_root_dir()?;
// display config
let display_config_path = app_root.join("config").join("display.ron");
// assets config
let assets_dir = app_root.join("assets");
// keyboard bindings
let binding_path = app_root.join("config").join("bindings.ron");
let input_bundle =
InputBundle::<StringBindings>::new().with_bindings_from_file(binding_path)?;
let game_data = GameDataBuilder::default()
.with_bundle(
RenderingBundle::<DefaultBackend>::new()
// The RenderToWindow plugin provides all the scaffolding for opening a window and drawing on it
.with_plugin(
RenderToWindow::from_config_path(display_config_path)
.unwrap()
.with_clear([0.0, 0.0, 0.0, 1.0]),
)
// RenderFlat2D plugin is used to render entities with a `SpriteRender` component.
.with_plugin(RenderFlat2D::default()),
// plugin for rendering GUI elements, like the score
// .with_plugin(RenderUi::default()),
)?
// Add the inpput bundle which handles keyboard / mouse input
.with_bundle(input_bundle)?
// Add the transform bundle which handles tracking entity positions
.with_bundle(TransformBundle::new())?
.with(AutoFovSystem::new(), "fov_system", &[])
.with(ZoomingSystem, "zooming_system", &["input_system"])
.with(PanningSystem, "panning_system", &["input_system"])
.with(MovementSystem, "movement_system", &[])
.with(PositionSystem, "position_system", &["movement_system"]);
// let mut world = World::new();
let mut game = Application::new(assets_dir, SimulatorRunState::default(), game_data)?;
game.run();
Ok(())
}
|
use crate::text::Message;
use serde::{Deserialize, Serialize};
use std::borrow::Cow;
/// Message sent by client to server.
#[derive(Deserialize, Serialize, Clone, Debug, Eq, PartialEq)]
pub enum ClientMessage<'a, 'b> {
/// Subscribe to a groups updates.
JoinGroup { gid: usize },
/// Unsubscribe from a groups messages.
LeaveGroup { gid: usize },
/// Join a group as an user.
JoinUser { gid: usize, name: Cow<'a, str> },
/// Leave a group as an user.
LeaveUser { gid: usize, uid: usize },
/// Change the name of an user.
RenameUser {
gid: usize,
uid: usize,
name: Cow<'a, str>,
},
/// Send a message as an user.
SendMessage {
gid: usize,
uid: usize,
message: Cow<'b, Message<'a>>,
},
}
|
use super::*;
///////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct UserFull {
name: String,
email: String,
#[serde(rename = "hashedPassword")]
hashed_password: String,
#[serde(rename = "isAdmin")]
is_admin: bool,
#[serde(rename = "canEdit")]
can_edit: bool,
}
impl UserFull {
pub fn only_roles(&self) -> UserWithRoles {
UserWithRoles {
name: self.name.clone(),
email: self.email.clone(),
is_admin: self.is_admin,
can_edit: self.can_edit,
}
}
}
#[derive(Debug)]
pub enum LoginError {
NoSessionIdCookieOnRequest,
BadSessionIdCookie,
NoExistingSessionForUser,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct UserWithRoles {
pub name: String,
pub email: String,
#[serde(rename = "isAdmin")]
is_admin: bool,
#[serde(rename = "canEdit")]
pub can_edit: bool,
}
#[rocket::async_trait]
impl<'r> FromRequest<'r> for UserWithRoles {
type Error = LoginError;
async fn from_request(
request: &'r Request<'_>,
) -> request::Outcome<UserWithRoles, Self::Error> {
let session_id_cookie = match request.cookies().get_private("session_id") {
Some(v) => v,
None => return get_login_failure(LoginError::NoSessionIdCookieOnRequest),
};
let session_id_str = session_id_cookie.value();
let session_id = match uuid::Uuid::parse_str(session_id_str) {
Ok(v) => v,
Err(_) => return get_login_failure(LoginError::BadSessionIdCookie),
};
let tsm = request
.rocket()
.state::<users_and_sessions::TimSessionsMap>()
.expect("Should be able to get state");
let sessions_map = tsm.sessions_map.lock().expect("Should unlock");
let user = match sessions_map.retrieve_user_from_session_id(&session_id) {
Some(v) => v,
None => return get_login_failure(LoginError::NoExistingSessionForUser),
};
request::Outcome::Success(user.clone())
}
}
fn get_login_failure(le: LoginError) -> request::Outcome<UserWithRoles, LoginError> {
request::Outcome::Failure((rocket::http::Status::Unauthorized, le))
}
///////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////
#[derive(Clone)]
pub struct TimSessionsMap {
pub sessions_map: Arc<Mutex<SessionsMap>>,
}
impl TimSessionsMap {
pub fn new_instance() -> TimSessionsMap {
TimSessionsMap {
sessions_map: Arc::new(Mutex::new(SessionsMap {
sessions: HashMap::new(),
})),
}
}
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct SessionsMap {
pub sessions: HashMap<uuid::Uuid, UserWithRoles>,
}
impl SessionsMap {
pub fn add_session_for_user(&mut self, user: UserWithRoles) -> uuid::Uuid {
let mut session_id = uuid::Uuid::new_v4();
while self.sessions.contains_key(&session_id) {
session_id = uuid::Uuid::new_v4();
}
let _old_value = self.sessions.insert(session_id, user);
session_id
}
pub fn change_user_roles(&mut self, email: String, is_admin: bool, can_edit: bool) {
for v in self.sessions.iter_mut() {
if v.1.email == email {
v.1.is_admin = is_admin;
v.1.can_edit = can_edit;
}
}
}
pub fn retrieve_user_from_session_id(&self, session_id: &uuid::Uuid) -> Option<&UserWithRoles> {
self.sessions.get(session_id)
}
pub fn remove_session(&mut self, session_id: &uuid::Uuid) -> Option<()> {
match self.sessions.remove(&session_id) {
Some(_v) => Some(()),
None => None,
}
}
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct LoginRequest {
pub email: String,
pub password: String,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct NewUserRequest {
pub email: String,
pub password: String,
pub name: String,
#[serde(rename = "isAdmin")]
is_admin: bool,
#[serde(rename = "canEdit")]
pub can_edit: bool,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct DeleteUserRequest {
pub email: String,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct LoginReturn {
pub user: UserWithRoles,
}
///////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////
#[get("/all_users")]
fn all_users(user: UserWithRoles) -> Json<Vec<UserWithRoles>> {
if !user.is_admin {
return Json(Vec::new());
}
let users = get_users().unwrap_or(Vec::new());
let users_with_roles: Vec<UserWithRoles> = users.iter().map(|x| x.only_roles()).collect();
Json(users_with_roles)
}
#[post("/reset_password", format = "application/json", data = "<lr>")]
fn reset_password(user: UserWithRoles, lr: Json<LoginRequest>) -> Option<()> {
let email = lr.email.clone();
if !user.is_admin && user.email != email {
return None;
}
let new_password = lr.password.clone();
let hashed_password = hash(new_password, DEFAULT_COST).ok()?;
let mut users = get_users()?;
let user = users.iter_mut().find(|x| x.email == email)?;
user.hashed_password = hashed_password;
save_new_users(users)?;
Some(())
}
#[post("/new_user", format = "application/json", data = "<nur>")]
fn new_user(user: UserWithRoles, nur: Json<NewUserRequest>) -> Option<()> {
if !user.is_admin {
return None;
}
let new_user = nur.into_inner();
let hashed_password = hash(new_user.password.clone(), DEFAULT_COST).ok()?;
let mut users = get_users()?;
if users.iter().any(|x| x.email == new_user.email) {
return None;
}
let new_user_full = UserFull {
name: new_user.name,
email: new_user.email,
hashed_password,
is_admin: new_user.is_admin,
can_edit: new_user.can_edit,
};
users.push(new_user_full);
save_new_users(users)?;
Some(())
}
#[post("/update_user", format = "application/json", data = "<uwr>")]
fn update_user(
user: UserWithRoles,
uwr: Json<UserWithRoles>,
tsm: &State<TimSessionsMap>,
) -> Option<()> {
let new_user = uwr.into_inner();
if !user.is_admin && user.email != new_user.email {
return None;
}
let mut users = get_users()?;
let user = users.iter_mut().find(|x| x.email == new_user.email)?;
user.name = new_user.name;
user.is_admin = new_user.is_admin;
user.can_edit = new_user.can_edit;
save_new_users(users)?;
// Update session map with new roles
let mut sessions_map = tsm.sessions_map.lock().ok()?;
sessions_map.change_user_roles(new_user.email, new_user.is_admin, new_user.can_edit);
Some(())
}
#[post("/delete_user", format = "application/json", data = "<dur>")]
fn delete_user(user: UserWithRoles, dur: Json<DeleteUserRequest>) -> Option<()> {
if !user.is_admin {
return None;
}
let user_to_delete = dur.into_inner();
let mut users = get_users()?;
users.retain(|x| x.email != user_to_delete.email);
save_new_users(users)?;
Some(())
}
// #[post(
// "/update_users",
// format = "application/json",
// data = "<new_users_json>"
// )]
// fn update_users(_user: UserWithRoles, new_users_json: Json<Vec<UserWithRoles>>) -> Json<u8> {
// let new_users = new_users_json.into_inner();
// save_new_users(new_users);
// Json(1)
// }
#[get("/user")]
fn user(user: UserWithRoles) -> Json<UserWithRoles> {
Json(user)
}
#[post("/login", format = "application/json", data = "<lr>")]
fn login(
jar: &CookieJar<'_>,
tsm: &State<TimSessionsMap>,
lr: Json<LoginRequest>,
) -> Option<Json<LoginReturn>> {
let email = lr.email.clone();
let password_attempt = lr.password.clone();
let users = get_users()?;
let user = users.iter().find(|x| x.email == email)?;
let password_is_valid = verify(password_attempt, &user.hashed_password).ok()?;
if !password_is_valid {
return None;
}
let user_for_session = user.only_roles();
let user_for_client = user.only_roles();
let mut sessions_map = tsm.sessions_map.lock().ok()?;
let session_id = sessions_map.add_session_for_user(user_for_session);
let mut c = Cookie::new("session_id", session_id.to_string());
//
// Change this if you want it to work on HTTP as well as HTTPS
c.set_secure(true);
//
// Change this to ::None if using an API model
c.set_same_site(SameSite::Strict);
//
jar.add_private(c);
Some(Json(LoginReturn {
user: user_for_client,
}))
}
#[get("/logout")]
fn logout(jar: &CookieJar<'_>, tsm: &State<TimSessionsMap>) -> Option<()> {
let session_id_cookie = jar.get_private("session_id")?;
let session_id_str = session_id_cookie.value();
let session_id = uuid::Uuid::parse_str(session_id_str).ok()?;
let mut sessions_map = tsm.sessions_map.lock().unwrap();
sessions_map.remove_session(&session_id);
jar.remove_private(Cookie::named("session_id"));
Some(())
}
pub fn user_routes() -> Vec<rocket::Route> {
routes![
all_users,
reset_password,
new_user,
update_user,
delete_user,
user,
login,
logout
]
}
|
use archiver::config;
use archiver::ctx::Ctx;
fn main() {
archiver::cli::run(|| {
let cfg = config::Config::from_file("archiver.toml");
let ctx = Ctx::create(cfg?)?;
ctx.notify("Test notification!")?;
Ok(())
})
}
|
//------------------------------------------------------------------------------
// from+git_me@luketitley.com
//------------------------------------------------------------------------------
mod args;
mod tasks;
use args::*;
//------------------------------------------------------------------------------
fn main() {
let tasks: Tasks = argh::from_env();
match tasks.task {
// Feature
Task::Feature(Feature { status }) => match status {
Status::Start(Start { name }) => tasks::feature::start(&name),
Status::Review(Review { reviewer }) => {
tasks::feature::review(&reviewer)
}
Status::Finish(Finish {}) => tasks::feature::finish(),
Status::Rebase(Rebase {}) => tasks::feature::rebase(),
Status::Enter(Enter {}) => tasks::feature::enter(),
Status::Exit(Exit {}) => tasks::feature::exit(),
Status::Switch(Switch {}) => tasks::feature::switch(),
Status::List(List {}) => tasks::feature::list(),
},
Task::Hotfix(Hotfix {
status: Status::Start(Start { name }),
}) => {
println!("start hotfix/{}", name);
}
Task::Release(Release {
branch: Branch::Master(Master {}),
}) => {
println!("Release patch");
}
Task::Release(Release {
branch: Branch::Develop(Develop {}),
}) => {
println!("Release minor");
}
Task::Setup(Setup { server, token }) => {
println!("setup {} {}", server, token);
}
_ => (),
}
}
|
use server::location::Location;
use vector::Vector2;
pub trait Player {
fn name(&self) -> String;
fn uuid(&self) -> String; // TODO: Are we using UUIDs here? Need to check proto
// TODO: Verify that f32 is right here
fn location(&self) -> Location;
fn rotation(&self) -> Vector2<f32>;
}
|
use byteorder::{LittleEndian, WriteBytesExt};
use failure::Error;
use crate::{
chunks::TOKEN_XML_TAG_END,
model::{owned::OwnedBuf, TagEnd},
};
#[derive(Debug, Copy, Clone)]
pub struct XmlTagEndBuf {
id: u32,
}
impl XmlTagEndBuf {
pub fn new(id: u32) -> Self {
Self { id }
}
}
impl OwnedBuf for XmlTagEndBuf {
fn get_token(&self) -> u16 {
TOKEN_XML_TAG_END
}
fn get_body_data(&self) -> Result<Vec<u8>, Error> {
let mut out = Vec::new();
// ??
out.write_u32::<LittleEndian>(0xFFFF_FFFF)?;
// Id
out.write_u32::<LittleEndian>(self.id)?;
Ok(out)
}
fn get_header(&self) -> Result<Vec<u8>, Error> {
let mut out = Vec::new();
// Amount of writes
out.write_u32::<LittleEndian>(3)?;
// ??
out.write_u32::<LittleEndian>(0xFFFF_FFFF)?;
Ok(out)
}
}
impl TagEnd for XmlTagEndBuf {
fn get_id(&self) -> Result<u32, Error> {
Ok(self.id)
}
}
#[cfg(test)]
mod tests {
use super::{OwnedBuf, XmlTagEndBuf};
use crate::{chunks::XmlTagEndWrapper, raw_chunks, test::compare_chunks};
#[test]
fn it_can_generate_an_empty_chunk() {
let tag_end = XmlTagEndBuf::new(0);
let out = tag_end.to_vec().unwrap();
let expected = [
3, 1, 16, 0, 24, 0, 0, 0, 3, 0, 0, 0, 255, 255, 255, 255, 255, 255, 255, 255, 0, 0, 0,
0,
];
assert_eq!(expected, out.as_slice());
}
#[test]
fn identity() {
let raw = raw_chunks::EXAMPLE_TAG_END;
let wrapper = XmlTagEndWrapper::new(&raw);
let owned = wrapper.to_buffer().unwrap();
let new_raw = owned.to_vec().unwrap();
compare_chunks(&raw, &new_raw);
}
}
|
extern crate crypto2;
use crypto2::aeadcipher::Chacha20Poly1305;
fn main() {
let key = [
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e,
0x0f, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d,
0x1e, 0x1f,
];
let nonce = [
0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x4a, 0x00, 0x00, 0x00, 0x00,
];
let aad = [1u8; Chacha20Poly1305::BLOCK_LEN];
let plaintext = [1u8; 64];
let plen = plaintext.len();
let cipher = Chacha20Poly1305::new(&key);
let mut ciphertext_and_tag = plaintext.to_vec();
ciphertext_and_tag.resize(plen + Chacha20Poly1305::TAG_LEN, 0);
cipher.encrypt_slice(&nonce, &aad, &mut ciphertext_and_tag[..]);
println!("plaintext : {:?}", &plaintext[..]);
println!("ciphertext: {:?}", &ciphertext_and_tag[..plen]);
println!("tag: {:?}", &ciphertext_and_tag[plen..]);
}
|
//! Пример с асинхронными параллельно работающими чтением/записью в сокет.
use futures::Future;
use std::net::SocketAddr;
use tokio::net::TcpListener;
mod server {
use futures::{Future, Stream};
use tokio::{
io::{copy, AsyncRead},
net::TcpListener,
};
/// Простенький ехо-сервер.
pub fn run(listener: TcpListener) -> impl Future<Item = (), Error = ()> {
listener
.incoming()
.map_err(|err| eprintln!("[server] I/O error while accepting connections: {}", err))
// `for_each` принимает замыкание, которое возвращает фьючу с `Item = ()` и такой же
// ошибкой, как и поток, и именно поэтому выше мы мапим ошибку `io::Error` на `()`,
// чтобы мы могли вернуть фьючу с `Error = ()`. Точнее говоря, ожидается не `Future`,
// а `IntoFuture`, но это детали :) Ну и именно это и возвращает `tokio::spawn`.
.for_each(|connection| {
// Метод `AsyncRead::split()` разделяет поток на две части (при условии, что поток
// имплементит ещё и `AsyncWrite`).
let (reader, writer) = connection.split();
// Просто копируем (асинхронно) всё обратно.
let echoing = copy(reader, writer)
.map(|(bytes_copied, _reader, _writer)| {
println!("[server] Copied {} bytes", bytes_copied);
})
.map_err(|err| {
eprintln!(
"[server] I/O error while echoing data back to the client: {}",
err
)
});
// Запускаем фьючу в отдельной таске, то есть на каждое входящее соединение мы создаём
// отдельную таску. Если этого не делать, то следующее соединение будет обработано
// только тогда, когда полностью обработано (и, вероятно, закрыто) текущее.
tokio::spawn(echoing)
})
}
}
mod client {
use futures::{
future::{loop_fn, Loop},
stream::unfold,
sync::oneshot::{channel, Receiver, Sender},
Future, Stream,
};
use rand::{rngs::SmallRng, Rng, SeedableRng};
use std::{
net::SocketAddr,
time::{Duration, Instant},
};
use tokio::{
io::{read, write_all, AsyncRead, AsyncWrite},
net::TcpStream,
timer::Delay,
};
/// Считываем всё, что сервер имеет нам сказать, либо до момента, когда соединение закрывается
/// (`bytes_read` == 0), либо когда получаем сигнал к завершению.
///
/// Тут важно отметить, что если клиент закроет соединение до того, как сервер закончит
/// отправку данных (а такое более чем вероятно, особенно на локалхосте), то при попытке
/// отправить следующую порцию, он получит ошибку "Connection reset by peer", так что не
/// удивляйтесь, ежели увидите эту ошибку в самом конце во время запуска примера.
fn read_all<T: AsyncRead>(
input: T,
when_to_end: Receiver<()>,
) -> impl Future<Item = (), Error = ()> {
// В принципе, можно было бы обойтись и без того, чтобы аллоцировать заранее буфер и потом
// каждый раз гонять его туда-сюда, но это очень полезная техника, позволяющая избежать
// лишних аллокаций при каждом чтении.
let buf = vec![0; 200];
// См. `futures::future::loop_fn`.
let reading = loop_fn((input, buf), |(input, buf)| {
// Функция `tokio::io::read` считывает из потока столько данных, сколько получится
// за один раз, но при этом не больше, чем размер буфера. Впрочем, это очевидно, т.к.
// она принимает буфер в виде `AsMut<[u8]>` и, соответственно, не имеет возможности
// как-то повлиять на размер оного.
read(input, buf)
.map(move |(input, buf, bytes_read)| {
println!("[client] Received {} bytes", bytes_read);
if bytes_read == 0 {
Loop::Break(bytes_read)
} else {
Loop::Continue((input, buf))
}
})
.map_err(move |err| eprintln!("[client] I/O error while reading data: {}", err))
})
.map(|bytes| println!("[client] Read {} bytes in total", bytes));
reading
.select2(when_to_end.map(|_| println!("[client] Stop reading")))
.map(|_| ())
.map_err(|_| ())
}
/// Генерирует бесконечный поток, который резолвится через случайные промежутки времени
/// (от 0 до 30 мсек).
fn random_instants() -> impl Stream<Item = (), Error = ()> {
let rng = SmallRng::seed_from_u64(0);
// См. `futures::stream::unfold`.
unfold(rng, |mut rng| {
let duration = Duration::from_micros(rng.gen_range(0, 30));
let when = Instant::now() + duration;
// Мы указываем `((), rng)`, потому что `unfold` ожидает, что возвращаемое фьючей
// значение состоит из того, что будет возвращать генерируемый поток и части,
// которая будет передана этому замыканию на следующей итерации.
Some(Delay::new(when).map(move |()| ((), rng)))
})
.map_err(|_| ())
}
/// Отсылаем вектор случайной длины через случайные промежутки времени.
fn write_randomly<T: AsyncWrite>(
output: T,
sender: Sender<()>,
) -> impl Future<Item = (), Error = ()> {
let rng = SmallRng::seed_from_u64(0);
let buf = Vec::new();
random_instants()
// Достаточно 10 раз.
.take(10)
// См `futures::stream::fold`.
.fold(
(rng, buf, output),
move |(mut rng, mut buf, output), _instant| {
buf.resize(rng.gen_range(1, 400), 0);
write_all(output, buf)
.map_err(|err| eprintln!("[client] I/O error while sending data: {}", err))
.map(|(output, buf)| {
println!("[client] Sent {} bytes", buf.len());
(rng, buf, output)
})
},
)
.map(|(_rng, _buf, _output)| {})
.then(move |_| {
// По завершении отправляем по каналу сообщение о том, что пора сворачиваться.
sender.send(()).map(|_| ()).map_err(|_| ())
})
}
pub fn run(addr: &SocketAddr) -> impl Future<Item = (), Error = ()> {
TcpStream::connect(addr)
.map_err(|err| eprintln!("[client] Can't connect: {}", err))
.and_then(|socket| {
// Разбиваем сокет на принимающую (`AsyncRead`) и передающую (`AsyncWrite`) части.
let (reader, writer) = socket.split();
// Канал используем для того, чтобы завершить чтение данных от сервера после того,
// как отправим ему всё, что собирались.
let (sender, receiver) = channel();
// Начинаем читать данные от сервера в отдельной таске ...
tokio::spawn(read_all(reader, receiver));
// ... а читать будем в текущей. Таким образом эта таска завершится сразу после
// того, как мы закончим с отправкой данных.
write_randomly(writer, sender)
})
}
}
fn main() {
// Указываем порт 0, чтобы операционная система сама назначила свободный порт.
let addr: SocketAddr = ([127, 0, 0, 1], 0).into();
let listener = TcpListener::bind(&addr).unwrap();
let addr = listener.local_addr().unwrap();
// Теперь порт уже должен быть ненулевым.
assert_ne!(0, addr.port());
let srv = server::run(listener);
let client = client::run(&addr);
tokio::run(
// Эта комбинированная фьюча завершится либо когда завершится `srv`, либо `client`.
srv.select(client)
.map(|((), _next_ready)| ())
.map_err(|((), _next_ready)| ()),
);
}
|
use super::EMPTY;
/// Bucket array with bi-directional pointers.
pub struct Bucket {
bounds: [u32; 257],
ps: [u32; 256],
qs: [u32; 256],
}
impl Bucket {
/// Construct the bucket array.
#[inline]
pub fn compute(s: &[u8]) -> Box<Self> {
let mut bkt = Box::new(Bucket {
bounds: [0; 257],
ps: [0; 256],
qs: [0; 256],
});
let mut sum = 1;
s.iter().for_each(|&ch| bkt.bounds[ch as usize] += 1);
bkt.bounds.iter_mut().for_each(|acc| {
let n = *acc;
*acc = sum;
sum += n;
});
bkt.reset_l_ptrs();
bkt.reset_s_ptrs();
bkt
}
/// Insert l-type characters in corresponding bucket head.
#[inline]
pub fn insert_head(&mut self, s: &[u8], sa: &mut [u32], i: usize) {
let c = s[i] as usize;
sa[self.ps[c] as usize] = i as u32;
self.ps[c] += 1;
}
/// Insert s-type characters in corresponding bucket tail.
#[inline]
pub fn insert_tail(&mut self, s: &[u8], sa: &mut [u32], i: usize) {
let c = s[i] as usize;
self.qs[c] -= 1;
sa[self.qs[c] as usize] = i as u32;
}
/// Clear content of all the bucket tails and reset all the s-pointers.
#[inline]
pub fn clear_tails(&mut self, sa: &mut [u32]) {
for c in 0..=255 {
let t = self.get_tail_ptr(c);
let q = self.get_s_ptr(c);
sa[q..t].iter_mut().for_each(|i| *i = EMPTY);
}
self.reset_s_ptrs();
}
/// Get the bucket tail.
#[inline]
pub fn get_tail_ptr(&self, c: u8) -> usize {
self.bounds[c as usize + 1] as usize
}
/// Get the l-pointer.
#[inline]
pub fn get_l_ptr(&self, c: u8) -> usize {
self.ps[c as usize] as usize
}
/// Get the s-pointer.
#[inline]
pub fn get_s_ptr(&self, c: u8) -> usize {
self.qs[c as usize] as usize
}
/// Reset all the l-pointers.
#[inline]
pub fn reset_l_ptrs(&mut self) {
self.ps.copy_from_slice(&mut self.bounds[..256]);
}
/// Reset all the s-pointers.
#[inline]
pub fn reset_s_ptrs(&mut self) {
self.qs.copy_from_slice(&mut self.bounds[1..257]);
}
}
|
extern crate serde;
extern crate serde_derive;
extern crate serde_json;
// https://www.goldmansachs.com/insights/insights-articles.json
#[derive(Default, Debug, Clone, PartialEq, serde_derive::Serialize, serde_derive::Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Root {
pub items: Vec<GSItem>,
pub articles: Vec<GSArticle>,
}
impl crate::HasRecs for Root {
fn to_recs(&self) -> Vec<Vec<String>> {
self.articles.iter().map(|x| x.to_rec()).collect()
}
}
#[derive(Default, Debug, Clone, PartialEq, serde_derive::Serialize, serde_derive::Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct GSItem {
pub description: String,
pub title: String,
pub node_id: i64,
pub url: String,
pub featured_articles: Option<Vec<GSArticle>>,
}
#[derive(Default, Debug, Clone, PartialEq, serde_derive::Serialize, serde_derive::Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct GSArticle {
pub has_video: bool,
pub date: Option<String>,
pub has_audio: bool,
pub topics: Option<Vec<GSTopic>>,
pub image_url: Option<String>,
pub description: String,
pub title: String,
pub node_id: i64,
pub url: String,
pub series: Option<GSTopic>,
}
impl GSArticle {
pub fn to_rec(&self) -> Vec<String> {
return vec![
self.node_id.to_string(),
self.date.clone().unwrap_or("".to_string()),
self.title.to_string(),
self.description.to_string(),
self.has_video.to_string(),
self.has_audio.to_string(),
];
//rec.append(&mut lilmatcher_gstopic(self.series.clone()));
}
}
#[derive(Default, Debug, Clone, PartialEq, serde_derive::Serialize, serde_derive::Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct GSTopic {
pub title: String,
pub node_id: i64,
pub url: String,
}
impl GSTopic {
pub fn to_rec(&self) -> Vec<String> {
vec![
self.title.to_string(),
self.node_id.to_string(),
self.url.to_string(),
]
}
}
|
use std::convert::TryInto;
use std::io;
use std::mem;
use std::net::{SocketAddr, SocketAddrV4, SocketAddrV6, TcpListener, TcpStream};
use std::os::unix::io::{AsRawFd, FromRawFd};
use std::time::Duration;
use nix::sys::time::{TimeVal, TimeValLike};
pub trait TcpListenerExt {
fn accept_timeout(&self, timeout: Option<Duration>) -> io::Result<(TcpStream, SocketAddr)>;
}
impl TcpListenerExt for TcpListener {
/// accept(2) with timeout
///
/// * `timeout`
/// Timeout for _accept_. If the value is `None`, wait connection indefinitely.
fn accept_timeout(&self, timeout: Option<Duration>) -> io::Result<(TcpStream, SocketAddr)> {
use nix::sys::select::*;
let fd = self.as_raw_fd();
let mut tm = timeout.map(dur_to_timeval::<TimeVal>).transpose()?;
let mut fds = FdSet::new();
fds.insert(fd);
let r = select(None, &mut fds, None, None, &mut tm)
.map_err(|err| io::Error::from_raw_os_error(err.as_errno().unwrap() as i32))?;
if r == 0 {
return Err(io::Error::new(io::ErrorKind::TimedOut, "select accept"));
}
assert!(r == 1);
assert!(fds.contains(fd));
let mut storage: libc::sockaddr_storage = unsafe { mem::zeroed() };
let mut len = mem::size_of_val(&storage) as libc::socklen_t;
unsafe {
let accepted =
libc::accept(fd, &mut storage as *mut _ as *mut libc::sockaddr, &mut len);
if accepted < 0 {
return Err(io::Error::last_os_error());
}
let addr = sockaddr_to_addr(&storage, len as usize)?;
Ok((TcpStream::from_raw_fd(accepted), addr))
}
}
}
/// Convert Duration to timeval in microseconds
fn dur_to_timeval<T: TimeValLike>(dur: Duration) -> io::Result<T> {
dur.as_micros()
.try_into()
.map(T::microseconds)
.map_err(|_| {
io::Error::new(
io::ErrorKind::InvalidInput,
format!("timeout convert error: {:?}", dur),
)
})
}
/// Convert sockaddr_storage to SocketAddr
///
/// * `storage`
/// The target value contains an address.
/// The actual type of the value depends on it's address family.
/// * `len`
/// The sizeof `storage` in bytes.
/// This should larger than or equals to the size of the *actual* type of `storage`.
fn sockaddr_to_addr(storage: &libc::sockaddr_storage, len: usize) -> io::Result<SocketAddr> {
match storage.ss_family as libc::c_int {
libc::AF_INET => {
assert!(len as usize >= mem::size_of::<libc::sockaddr_in>());
let addr = unsafe { *(storage as *const _ as *const libc::sockaddr_in) };
Ok(SocketAddrV4::new(addr.sin_addr.s_addr.into(), addr.sin_port).into())
}
libc::AF_INET6 => {
assert!(len as usize >= mem::size_of::<libc::sockaddr_in6>());
let addr = unsafe { *(storage as *const _ as *const libc::sockaddr_in6) };
Ok(SocketAddrV6::new(
addr.sin6_addr.s6_addr.into(),
addr.sin6_port,
addr.sin6_flowinfo,
addr.sin6_scope_id,
)
.into())
}
af_family => Err(io::Error::new(
io::ErrorKind::InvalidInput,
format!("invalid argument: {}", af_family),
)),
}
}
|
use anyhow::{anyhow, Result};
use chrono::{DateTime, Utc};
use uuid::Uuid;
use crate::note::DbNote;
pub fn init_schema(db: &sqlite::Connection) -> Result<()> {
db.execute("
-- This table is a dictionary of note statuses.
create table if not exists note_status(
id integer primary key,
label text
);
insert or ignore into note_status (id, label) values
(1, 'active'), -- is ready for review
(2, 'retired'); -- was updated by a newer version or deleted
-- This table holds our notes with some metadata.
-- Notes are read-only. Hash of tags+data is used as a primary key.
-- When updating a note, a new version is created and the original one
-- is marked as 'retired'.
-- UUID is used to link versions together and track history of updates.
create table if not exists notes(
hash text primary key,
uuid text not null,
ctime text not null,
mtime text,
tags text not null,
data json not null,
status integer not null references note_status(id) default 1
);
create index if not exists notes_uuid_ix
on notes(uuid);
create trigger if not exists retire_updated_notes
before insert on notes
begin
update notes
set status = 2,
mtime = strftime('%Y-%m-%dT%H:%M:%SZ', 'now')
where true
and hash <> new.hash
and uuid = new.uuid
and status = 1;
end;
create trigger if not exists add_fresh_notes_to_queue
after insert on notes
begin
insert into queue
(note_id, next_review)
values
(new.uuid, strftime('%Y-%m-%dT%H:%M:%SZ', 'now'))
on conflict (note_id) do nothing;
end;
create view if not exists current_notes as
select * from notes
where status = 1;
-- This table holds history of reviews.
-- Each row references some note and contains a review outcome.
create table if not exists review(
id integer primary key,
note_id text not null references notes(uuid),
ctime text not null,
result text not null, -- FIXME: dictionary of possible results?
decision json not null -- free form details of the decision
);
-- Queue is used to select notes that are due for review.
create table if not exists queue(
note_id text not null unique references notes(uuid),
next_review text not null
);
create index if not exists queue_next_review_ix
on queue(next_review);
").map_err(|e| anyhow!(e))
}
struct InsertNoteQuery<'l>(sqlite::Statement<'l>);
impl<'l> InsertNoteQuery<'l> {
fn init(db: &'l sqlite::Connection) -> Result<Self> {
db.prepare("
insert into notes
(hash, uuid, ctime, tags, data)
values
(?, ?, ?, ?, ?)
on conflict (hash) do nothing
").map_err(|e| anyhow!(e)).map(Self)
}
fn exec(&mut self, n: &DbNote) -> Result<()> {
self.0.reset()?;
let (hash, json) = n.hash_and_json();
self.0.bind(1, hash.as_str())?;
self.0.bind(2, n.uuid.to_string().as_str())?;
self.0.bind(3, n.ctime.to_rfc3339().as_str())?;
self.0.bind(4, n.tags.as_str())?;
self.0.bind(5, json.as_str())?;
while let sqlite::State::Row = self.0.next()? { }
Ok(())
}
}
// insert_notes must be idempotent (loading the same file again changes nothing).
// So when loading notes from a file it is ok to stop on the first error,
// fix that error and try to load the updated file again.
// We use hash(tags, note_data) to accomplish this.
pub fn insert_notes(
db: &sqlite::Connection,
notes: &[DbNote]
) -> Result<()> {
let mut insert_note = InsertNoteQuery::init(db)?;
for n in notes.iter() {
insert_note.exec(n)?;
}
Ok(())
}
pub fn select_notes_for_review(
db: &sqlite::Connection,
tags: &[String]
) -> Result<Vec<DbNote>> {
// FIXME: switch to rusqlite library and use regexp here
// " and tags regexp ('\\b' || ? || '\\b')"
// see example at https://docs.rs/rusqlite/latest/rusqlite/functions/
let tag_filter = vec![" and tags like ('%' || ? || '%')"; tags.len()].join("");
let mut q = db.prepare(
format!("
select
n.uuid, n.ctime, n.tags, n.data
from queue q, notes n
where true
and q.next_review <= strftime('%Y-%m-%dT%H:%M:%SZ', 'now')
and q.note_id = n.uuid
and n.status = 1
{tag_filter}
order by random()
limit 10")
)?;
for (i, tag) in tags.iter().enumerate() {
q.bind(i+1, tag.as_str())?;
}
let mut res = Vec::new();
while let sqlite::State::Row = q.next()? {
res.push(db_note_from_row(&q)?);
}
Ok(res)
}
pub fn active_notes(
db: &sqlite::Connection
) -> Result<DbNotes> {
let q = db.prepare("
select
uuid, ctime, tags, data
from notes
where status = 1
order by ctime asc
")?;
Ok(DbNotes(q))
}
// Assumes that q starts like "select uuid, ctime, tags, data ..".
fn db_note_from_row(q: &sqlite::Statement) -> Result<DbNote> {
Ok(DbNote {
uuid:
Uuid::parse_str(q.read::<String>(0)?.as_str())?,
ctime:
DateTime::parse_from_rfc3339(q.read::<String>(1)?.as_str())?
.with_timezone(&Utc),
tags:
q.read::<String>(2)?,
data:
serde_json::from_str(q.read::<String>(3)?.as_str())?,
})
}
pub struct DbNotes<'a>(sqlite::Statement<'a>);
impl<'a> Iterator for DbNotes<'a> {
type Item = DbNote;
fn next(&mut self) -> Option<DbNote> {
match self.0.next() {
Ok(sqlite::State::Row) => db_note_from_row(&self.0).ok(),
_ => None
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::note::*;
use chrono::{Local, Utc};
use uuid::Uuid;
#[test]
fn can_init_schema() -> Result<()> {
let db = sqlite::open(":memory:")?;
init_schema(&db)
}
#[test]
fn can_add_single_note() -> Result<()> {
let db = sqlite::open(":memory:")?;
init_schema(&db)?;
let note = text_note("hello\nworld", "hello!");
insert_notes(&db, &vec![note.clone()])?;
let mut iter = active_notes(&db)?;
assert_eq!(Some(note), iter.next());
assert_eq!(None, iter.next());
Ok(())
}
#[test]
fn insert_skips_duplicates() -> Result<()> {
let db = sqlite::open(":memory:")?;
init_schema(&db)?;
let note1 = text_note("hello\nworld", "hello!");
let note2 = text_note("bye\nworld", "bye!");
let notes = vec![note1.clone(), note2.clone(), note1.clone()];
insert_notes(&db, ¬es)?;
let notes = vec![note2.clone(), note2.clone()];
insert_notes(&db, ¬es)?;
let mut n1 = 0;
let mut n2 = 0;
for n in active_notes(&db)? {
if n == note1 { n1 += 1 }
else if n == note2 { n2 += 1 }
}
assert_eq!(1, n1);
assert_eq!(1, n2);
Ok(())
}
#[test]
fn can_update_note() -> Result<()> {
let db = sqlite::open(":memory:")?;
init_schema(&db)?;
let note1 = text_note("hello\nworld", "hello!");
let note2 = DbNote {
uuid: note1.uuid,
..text_note("hello\nworld", "bye!")
};
let notes = vec![note1, note2.clone()];
insert_notes(&db, ¬es)?;
let mut iter = active_notes(&db)?;
assert_eq!(Some(note2), iter.next());
assert_eq!(None, iter.next());
Ok(())
}
#[test]
fn insert_adds_to_queue() -> Result<()> {
let db = sqlite::open(":memory:")?;
init_schema(&db)?;
let note1 = text_note("hello\nworld", "hello!");
let note2 = text_note("bye\nworld", "bye!");
let notes = vec![note1.clone(), note2.clone()];
insert_notes(&db, ¬es)?;
let mut n1 = 0;
let mut n2 = 0;
for n in select_notes_for_review(&db)?.iter() {
if *n == note1 { n1 += 1 }
else if *n == note2 { n2 += 1 }
}
assert_eq!(1, n1);
assert_eq!(1, n2);
Ok(())
}
// helper function for tests
fn text_note(tags: &str, text: &str) -> DbNote {
DbNote {
uuid: Uuid::new_v4(),
ctime: Local::now().with_timezone(&Utc),
tags: tags.to_string(),
data: NoteData::Text(text.to_string())
}
}
}
|
use nom::character::is_digit;
use crate::parse::parse_u64;
use crate::attributes::SdpOptionalAttribute;
use std::fmt;
use nom::{
IResult,
character::complete::char,
combinator::map_res,
bytes::complete::{take_while, tag}
};
#[derive(Debug, PartialEq, Clone)]
pub struct SdpTiming(pub u64, pub u64);
impl fmt::Display for SdpTiming {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{} {}", self.0, self.1)
}
}
impl SdpTiming {
pub fn new(start: u64, end: u64) -> SdpTiming {
SdpTiming(start, end)
}
}
pub fn parse_time_line(input: &[u8]) -> IResult<&[u8], SdpOptionalAttribute> {
let (input, _) = tag("t=")(input)?;
let (input, timing) = parse_timing(input)?;
let (input, _) = tag("\r\n")(input)?;
Ok((input, SdpOptionalAttribute::Timing(timing)))
}
pub fn parse_timing(input: &[u8]) -> IResult<&[u8], SdpTiming> {
let (input, start) = map_res(take_while(is_digit), parse_u64)(input)?;
let (input, _) = char(' ')(input)?;
let (input, end) = map_res(take_while(is_digit), parse_u64)(input)?;
Ok((input, SdpTiming(start, end)))
}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use super::{models, API_VERSION};
#[non_exhaustive]
#[derive(Debug, thiserror :: Error)]
#[allow(non_camel_case_types)]
pub enum Error {
#[error(transparent)]
Managers_List(#[from] managers::list::Error),
#[error(transparent)]
AvailableProviderOperations_List(#[from] available_provider_operations::list::Error),
#[error(transparent)]
Managers_ListByResourceGroup(#[from] managers::list_by_resource_group::Error),
#[error(transparent)]
Managers_Get(#[from] managers::get::Error),
#[error(transparent)]
Managers_CreateOrUpdate(#[from] managers::create_or_update::Error),
#[error(transparent)]
Managers_Update(#[from] managers::update::Error),
#[error(transparent)]
Managers_Delete(#[from] managers::delete::Error),
#[error(transparent)]
AccessControlRecords_ListByManager(#[from] access_control_records::list_by_manager::Error),
#[error(transparent)]
AccessControlRecords_Get(#[from] access_control_records::get::Error),
#[error(transparent)]
AccessControlRecords_CreateOrUpdate(#[from] access_control_records::create_or_update::Error),
#[error(transparent)]
AccessControlRecords_Delete(#[from] access_control_records::delete::Error),
#[error(transparent)]
Alerts_ListByManager(#[from] alerts::list_by_manager::Error),
#[error(transparent)]
Backups_ListByManager(#[from] backups::list_by_manager::Error),
#[error(transparent)]
Managers_UploadRegistrationCertificate(#[from] managers::upload_registration_certificate::Error),
#[error(transparent)]
Alerts_Clear(#[from] alerts::clear::Error),
#[error(transparent)]
Devices_ListByManager(#[from] devices::list_by_manager::Error),
#[error(transparent)]
Devices_Get(#[from] devices::get::Error),
#[error(transparent)]
Devices_Patch(#[from] devices::patch::Error),
#[error(transparent)]
Devices_Delete(#[from] devices::delete::Error),
#[error(transparent)]
Devices_GetAlertSettings(#[from] devices::get_alert_settings::Error),
#[error(transparent)]
Devices_CreateOrUpdateAlertSettings(#[from] devices::create_or_update_alert_settings::Error),
#[error(transparent)]
Backups_ListByDevice(#[from] backups::list_by_device::Error),
#[error(transparent)]
Backups_Delete(#[from] backups::delete::Error),
#[error(transparent)]
Backups_Clone(#[from] backups::clone::Error),
#[error(transparent)]
BackupScheduleGroups_ListByDevice(#[from] backup_schedule_groups::list_by_device::Error),
#[error(transparent)]
BackupScheduleGroups_Get(#[from] backup_schedule_groups::get::Error),
#[error(transparent)]
BackupScheduleGroups_CreateOrUpdate(#[from] backup_schedule_groups::create_or_update::Error),
#[error(transparent)]
BackupScheduleGroups_Delete(#[from] backup_schedule_groups::delete::Error),
#[error(transparent)]
ChapSettings_ListByDevice(#[from] chap_settings::list_by_device::Error),
#[error(transparent)]
ChapSettings_Get(#[from] chap_settings::get::Error),
#[error(transparent)]
ChapSettings_CreateOrUpdate(#[from] chap_settings::create_or_update::Error),
#[error(transparent)]
ChapSettings_Delete(#[from] chap_settings::delete::Error),
#[error(transparent)]
Devices_Deactivate(#[from] devices::deactivate::Error),
#[error(transparent)]
IscsiDisks_ListByDevice(#[from] iscsi_disks::list_by_device::Error),
#[error(transparent)]
Devices_DownloadUpdates(#[from] devices::download_updates::Error),
#[error(transparent)]
Devices_Failover(#[from] devices::failover::Error),
#[error(transparent)]
Devices_ListFailoverTarget(#[from] devices::list_failover_target::Error),
#[error(transparent)]
FileServers_ListByDevice(#[from] file_servers::list_by_device::Error),
#[error(transparent)]
FileServers_Get(#[from] file_servers::get::Error),
#[error(transparent)]
FileServers_CreateOrUpdate(#[from] file_servers::create_or_update::Error),
#[error(transparent)]
FileServers_Delete(#[from] file_servers::delete::Error),
#[error(transparent)]
FileServers_BackupNow(#[from] file_servers::backup_now::Error),
#[error(transparent)]
FileServers_ListMetrics(#[from] file_servers::list_metrics::Error),
#[error(transparent)]
FileServers_ListMetricDefinition(#[from] file_servers::list_metric_definition::Error),
#[error(transparent)]
FileShares_ListByFileServer(#[from] file_shares::list_by_file_server::Error),
#[error(transparent)]
FileShares_Get(#[from] file_shares::get::Error),
#[error(transparent)]
FileShares_CreateOrUpdate(#[from] file_shares::create_or_update::Error),
#[error(transparent)]
FileShares_Delete(#[from] file_shares::delete::Error),
#[error(transparent)]
FileShares_ListMetrics(#[from] file_shares::list_metrics::Error),
#[error(transparent)]
FileShares_ListMetricDefinition(#[from] file_shares::list_metric_definition::Error),
#[error(transparent)]
Devices_InstallUpdates(#[from] devices::install_updates::Error),
#[error(transparent)]
IscsiServers_ListByDevice(#[from] iscsi_servers::list_by_device::Error),
#[error(transparent)]
IscsiServers_Get(#[from] iscsi_servers::get::Error),
#[error(transparent)]
IscsiServers_CreateOrUpdate(#[from] iscsi_servers::create_or_update::Error),
#[error(transparent)]
IscsiServers_Delete(#[from] iscsi_servers::delete::Error),
#[error(transparent)]
IscsiServers_BackupNow(#[from] iscsi_servers::backup_now::Error),
#[error(transparent)]
IscsiDisks_ListByIscsiServer(#[from] iscsi_disks::list_by_iscsi_server::Error),
#[error(transparent)]
IscsiDisks_Get(#[from] iscsi_disks::get::Error),
#[error(transparent)]
IscsiDisks_CreateOrUpdate(#[from] iscsi_disks::create_or_update::Error),
#[error(transparent)]
IscsiDisks_Delete(#[from] iscsi_disks::delete::Error),
#[error(transparent)]
IscsiDisks_ListMetrics(#[from] iscsi_disks::list_metrics::Error),
#[error(transparent)]
IscsiDisks_ListMetricDefinition(#[from] iscsi_disks::list_metric_definition::Error),
#[error(transparent)]
IscsiServers_ListMetrics(#[from] iscsi_servers::list_metrics::Error),
#[error(transparent)]
IscsiServers_ListMetricDefinition(#[from] iscsi_servers::list_metric_definition::Error),
#[error(transparent)]
Jobs_ListByDevice(#[from] jobs::list_by_device::Error),
#[error(transparent)]
Jobs_Get(#[from] jobs::get::Error),
#[error(transparent)]
Devices_ListMetrics(#[from] devices::list_metrics::Error),
#[error(transparent)]
Devices_ListMetricDefinition(#[from] devices::list_metric_definition::Error),
#[error(transparent)]
Devices_GetNetworkSettings(#[from] devices::get_network_settings::Error),
#[error(transparent)]
Devices_ScanForUpdates(#[from] devices::scan_for_updates::Error),
#[error(transparent)]
Devices_CreateOrUpdateSecuritySettings(#[from] devices::create_or_update_security_settings::Error),
#[error(transparent)]
Alerts_SendTestEmail(#[from] alerts::send_test_email::Error),
#[error(transparent)]
FileShares_ListByDevice(#[from] file_shares::list_by_device::Error),
#[error(transparent)]
Devices_GetTimeSettings(#[from] devices::get_time_settings::Error),
#[error(transparent)]
Devices_GetUpdateSummary(#[from] devices::get_update_summary::Error),
#[error(transparent)]
Managers_GetEncryptionSettings(#[from] managers::get_encryption_settings::Error),
#[error(transparent)]
Managers_GetExtendedInfo(#[from] managers::get_extended_info::Error),
#[error(transparent)]
Managers_CreateExtendedInfo(#[from] managers::create_extended_info::Error),
#[error(transparent)]
Managers_UpdateExtendedInfo(#[from] managers::update_extended_info::Error),
#[error(transparent)]
Managers_DeleteExtendedInfo(#[from] managers::delete_extended_info::Error),
#[error(transparent)]
FileServers_ListByManager(#[from] file_servers::list_by_manager::Error),
#[error(transparent)]
Managers_GetEncryptionKey(#[from] managers::get_encryption_key::Error),
#[error(transparent)]
IscsiServers_ListByManager(#[from] iscsi_servers::list_by_manager::Error),
#[error(transparent)]
Jobs_ListByManager(#[from] jobs::list_by_manager::Error),
#[error(transparent)]
Managers_ListMetrics(#[from] managers::list_metrics::Error),
#[error(transparent)]
Managers_ListMetricDefinition(#[from] managers::list_metric_definition::Error),
#[error(transparent)]
StorageAccountCredentials_ListByManager(#[from] storage_account_credentials::list_by_manager::Error),
#[error(transparent)]
StorageAccountCredentials_Get(#[from] storage_account_credentials::get::Error),
#[error(transparent)]
StorageAccountCredentials_CreateOrUpdate(#[from] storage_account_credentials::create_or_update::Error),
#[error(transparent)]
StorageAccountCredentials_Delete(#[from] storage_account_credentials::delete::Error),
#[error(transparent)]
StorageDomains_ListByManager(#[from] storage_domains::list_by_manager::Error),
#[error(transparent)]
StorageDomains_Get(#[from] storage_domains::get::Error),
#[error(transparent)]
StorageDomains_CreateOrUpdate(#[from] storage_domains::create_or_update::Error),
#[error(transparent)]
StorageDomains_Delete(#[from] storage_domains::delete::Error),
}
pub mod managers {
use super::{models, API_VERSION};
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
) -> std::result::Result<models::ManagerList, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.StorSimple/managers",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ManagerList =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_resource_group(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
) -> std::result::Result<models::ManagerList, list_by_resource_group::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers",
operation_config.base_path(),
subscription_id,
resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_resource_group::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_resource_group::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_by_resource_group::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_resource_group::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ManagerList = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_resource_group::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_resource_group::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_resource_group::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_resource_group {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<models::Manager, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Manager =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
manager: &models::Manager,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(manager).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Manager = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::Manager = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Created201(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_or_update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::Manager),
Created201(models::Manager),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
parameters: &models::ManagerPatch,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<models::Manager, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name
);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Manager =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(delete::Response::Ok200),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn upload_registration_certificate(
operation_config: &crate::OperationConfig,
certificate_name: &str,
upload_certificate_requestrequest: &models::UploadCertificateRequest,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<models::UploadCertificateResponse, upload_registration_certificate::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/certificates/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
certificate_name
);
let mut url = url::Url::parse(url_str).map_err(upload_registration_certificate::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(upload_registration_certificate::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body =
azure_core::to_json(upload_certificate_requestrequest).map_err(upload_registration_certificate::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(upload_registration_certificate::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(upload_registration_certificate::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::UploadCertificateResponse = serde_json::from_slice(rsp_body)
.map_err(|source| upload_registration_certificate::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error = serde_json::from_slice(rsp_body)
.map_err(|source| upload_registration_certificate::Error::DeserializeError(source, rsp_body.clone()))?;
Err(upload_registration_certificate::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod upload_registration_certificate {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_encryption_settings(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<models::EncryptionSettings, get_encryption_settings::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/encryptionSettings/default",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name
);
let mut url = url::Url::parse(url_str).map_err(get_encryption_settings::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_encryption_settings::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(get_encryption_settings::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_encryption_settings::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::EncryptionSettings = serde_json::from_slice(rsp_body)
.map_err(|source| get_encryption_settings::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error = serde_json::from_slice(rsp_body)
.map_err(|source| get_encryption_settings::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_encryption_settings::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_encryption_settings {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_extended_info(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<models::ManagerExtendedInfo, get_extended_info::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/extendedInformation/vaultExtendedInfo",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name
);
let mut url = url::Url::parse(url_str).map_err(get_extended_info::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_extended_info::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_extended_info::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_extended_info::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ManagerExtendedInfo = serde_json::from_slice(rsp_body)
.map_err(|source| get_extended_info::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error = serde_json::from_slice(rsp_body)
.map_err(|source| get_extended_info::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_extended_info::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_extended_info {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_extended_info(
operation_config: &crate::OperationConfig,
manager_extended_info: &models::ManagerExtendedInfo,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<models::ManagerExtendedInfo, create_extended_info::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/extendedInformation/vaultExtendedInfo",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name
);
let mut url = url::Url::parse(url_str).map_err(create_extended_info::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_extended_info::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(manager_extended_info).map_err(create_extended_info::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_extended_info::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_extended_info::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ManagerExtendedInfo = serde_json::from_slice(rsp_body)
.map_err(|source| create_extended_info::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error = serde_json::from_slice(rsp_body)
.map_err(|source| create_extended_info::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_extended_info::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_extended_info {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update_extended_info(
operation_config: &crate::OperationConfig,
manager_extended_info: &models::ManagerExtendedInfo,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
if_match: &str,
) -> std::result::Result<models::ManagerExtendedInfo, update_extended_info::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/extendedInformation/vaultExtendedInfo",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name
);
let mut url = url::Url::parse(url_str).map_err(update_extended_info::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update_extended_info::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(manager_extended_info).map_err(update_extended_info::Error::SerializeError)?;
req_builder = req_builder.header("If-Match", if_match);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update_extended_info::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(update_extended_info::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ManagerExtendedInfo = serde_json::from_slice(rsp_body)
.map_err(|source| update_extended_info::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error = serde_json::from_slice(rsp_body)
.map_err(|source| update_extended_info::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update_extended_info::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update_extended_info {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete_extended_info(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<(), delete_extended_info::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/extendedInformation/vaultExtendedInfo",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name
);
let mut url = url::Url::parse(url_str).map_err(delete_extended_info::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete_extended_info::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete_extended_info::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(delete_extended_info::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::NO_CONTENT => Ok(()),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error = serde_json::from_slice(rsp_body)
.map_err(|source| delete_extended_info::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete_extended_info::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete_extended_info {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_encryption_key(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<models::SymmetricEncryptedSecret, get_encryption_key::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/getEncryptionKey",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name
);
let mut url = url::Url::parse(url_str).map_err(get_encryption_key::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_encryption_key::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_encryption_key::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_encryption_key::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::SymmetricEncryptedSecret = serde_json::from_slice(rsp_body)
.map_err(|source| get_encryption_key::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error = serde_json::from_slice(rsp_body)
.map_err(|source| get_encryption_key::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_encryption_key::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_encryption_key {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_metrics(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
filter: Option<&str>,
) -> std::result::Result<models::MetricList, list_metrics::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/metrics",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name
);
let mut url = url::Url::parse(url_str).map_err(list_metrics::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_metrics::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_metrics::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_metrics::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::MetricList =
serde_json::from_slice(rsp_body).map_err(|source| list_metrics::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| list_metrics::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_metrics::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_metrics {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_metric_definition(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<models::MetricDefinitionList, list_metric_definition::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/metricsDefinitions",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name
);
let mut url = url::Url::parse(url_str).map_err(list_metric_definition::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_metric_definition::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_metric_definition::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_metric_definition::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::MetricDefinitionList = serde_json::from_slice(rsp_body)
.map_err(|source| list_metric_definition::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error = serde_json::from_slice(rsp_body)
.map_err(|source| list_metric_definition::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_metric_definition::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_metric_definition {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod available_provider_operations {
use super::{models, API_VERSION};
pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<models::AvailableProviderOperations, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/providers/Microsoft.StorSimple/operations", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::AvailableProviderOperations =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod access_control_records {
use super::{models, API_VERSION};
pub async fn list_by_manager(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<models::AccessControlRecordList, list_by_manager::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/accessControlRecords",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_manager::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_manager::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_manager::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_manager::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::AccessControlRecordList = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_manager::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_manager::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_manager::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_manager {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
access_control_record_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<models::AccessControlRecord, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/accessControlRecords/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
access_control_record_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::AccessControlRecord =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
access_control_record_name: &str,
access_control_record: &models::AccessControlRecord,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/accessControlRecords/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
access_control_record_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(access_control_record).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::AccessControlRecord = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(create_or_update::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_or_update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::AccessControlRecord),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
access_control_record_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/accessControlRecords/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
access_control_record_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod alerts {
use super::{models, API_VERSION};
pub async fn list_by_manager(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
filter: Option<&str>,
) -> std::result::Result<models::AlertList, list_by_manager::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/alerts",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_manager::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_manager::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_manager::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_manager::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::AlertList = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_manager::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_manager::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_manager::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_manager {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn clear(
operation_config: &crate::OperationConfig,
request: &models::ClearAlertRequest,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<(), clear::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/clearAlerts",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name
);
let mut url = url::Url::parse(url_str).map_err(clear::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(clear::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(request).map_err(clear::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(clear::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(clear::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::NO_CONTENT => Ok(()),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| clear::Error::DeserializeError(source, rsp_body.clone()))?;
Err(clear::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod clear {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn send_test_email(
operation_config: &crate::OperationConfig,
device_name: &str,
request: &models::SendTestAlertEmailRequest,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<(), send_test_email::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/sendTestAlertEmail",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name
);
let mut url = url::Url::parse(url_str).map_err(send_test_email::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(send_test_email::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(request).map_err(send_test_email::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(send_test_email::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(send_test_email::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::NO_CONTENT => Ok(()),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error = serde_json::from_slice(rsp_body)
.map_err(|source| send_test_email::Error::DeserializeError(source, rsp_body.clone()))?;
Err(send_test_email::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod send_test_email {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod backups {
use super::{models, API_VERSION};
pub async fn list_by_manager(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
filter: Option<&str>,
) -> std::result::Result<models::BackupList, list_by_manager::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/backups",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_manager::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_manager::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_manager::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_manager::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::BackupList = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_manager::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_manager::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_manager::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_manager {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_device(
operation_config: &crate::OperationConfig,
device_name: &str,
for_failover: Option<bool>,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
filter: Option<&str>,
) -> std::result::Result<models::BackupList, list_by_device::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/backups",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_device::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_device::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(for_failover) = for_failover {
url.query_pairs_mut().append_pair("forFailover", for_failover.to_string().as_str());
}
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_device::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_device::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::BackupList =
serde_json::from_slice(rsp_body).map_err(|source| list_by_device::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| list_by_device::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_device::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_device {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
device_name: &str,
backup_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/backups/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name,
backup_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn clone(
operation_config: &crate::OperationConfig,
device_name: &str,
backup_name: &str,
element_name: &str,
clone_request: &models::CloneRequest,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<clone::Response, clone::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/backups/{}/elements/{}/clone",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name,
backup_name,
element_name
);
let mut url = url::Url::parse(url_str).map_err(clone::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(clone::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(clone_request).map_err(clone::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(clone::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(clone::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(clone::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(clone::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| clone::Error::DeserializeError(source, rsp_body.clone()))?;
Err(clone::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod clone {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod devices {
use super::{models, API_VERSION};
pub async fn list_by_manager(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
expand: Option<&str>,
) -> std::result::Result<models::DeviceList, list_by_manager::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_manager::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_manager::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(expand) = expand {
url.query_pairs_mut().append_pair("$expand", expand);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_manager::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_manager::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DeviceList = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_manager::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_manager::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_manager::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_manager {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
device_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
expand: Option<&str>,
) -> std::result::Result<models::Device, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(expand) = expand {
url.query_pairs_mut().append_pair("$expand", expand);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Device =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn patch(
operation_config: &crate::OperationConfig,
device_name: &str,
device_patch: &models::DevicePatch,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<patch::Response, patch::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name
);
let mut url = url::Url::parse(url_str).map_err(patch::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(patch::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(device_patch).map_err(patch::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(patch::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(patch::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Device =
serde_json::from_slice(rsp_body).map_err(|source| patch::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(patch::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(patch::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| patch::Error::DeserializeError(source, rsp_body.clone()))?;
Err(patch::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod patch {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::Device),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
device_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_alert_settings(
operation_config: &crate::OperationConfig,
device_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<models::AlertSettings, get_alert_settings::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/alertSettings/default",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name
);
let mut url = url::Url::parse(url_str).map_err(get_alert_settings::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_alert_settings::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_alert_settings::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_alert_settings::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::AlertSettings = serde_json::from_slice(rsp_body)
.map_err(|source| get_alert_settings::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error = serde_json::from_slice(rsp_body)
.map_err(|source| get_alert_settings::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_alert_settings::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_alert_settings {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update_alert_settings(
operation_config: &crate::OperationConfig,
device_name: &str,
alert_settings: &models::AlertSettings,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<create_or_update_alert_settings::Response, create_or_update_alert_settings::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/alertSettings/default",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update_alert_settings::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update_alert_settings::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(alert_settings).map_err(create_or_update_alert_settings::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(create_or_update_alert_settings::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update_alert_settings::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::AlertSettings = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update_alert_settings::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update_alert_settings::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(create_or_update_alert_settings::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update_alert_settings::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_or_update_alert_settings::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_or_update_alert_settings {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::AlertSettings),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn deactivate(
operation_config: &crate::OperationConfig,
device_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<deactivate::Response, deactivate::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/deactivate",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name
);
let mut url = url::Url::parse(url_str).map_err(deactivate::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(deactivate::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(deactivate::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(deactivate::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::ACCEPTED => Ok(deactivate::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(deactivate::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| deactivate::Error::DeserializeError(source, rsp_body.clone()))?;
Err(deactivate::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod deactivate {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn download_updates(
operation_config: &crate::OperationConfig,
device_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<download_updates::Response, download_updates::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/download",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name
);
let mut url = url::Url::parse(url_str).map_err(download_updates::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(download_updates::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(download_updates::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(download_updates::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::ACCEPTED => Ok(download_updates::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(download_updates::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error = serde_json::from_slice(rsp_body)
.map_err(|source| download_updates::Error::DeserializeError(source, rsp_body.clone()))?;
Err(download_updates::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod download_updates {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn failover(
operation_config: &crate::OperationConfig,
device_name: &str,
failover_request: &models::FailoverRequest,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<failover::Response, failover::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/failover",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name
);
let mut url = url::Url::parse(url_str).map_err(failover::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(failover::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(failover_request).map_err(failover::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(failover::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(failover::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::ACCEPTED => Ok(failover::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(failover::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| failover::Error::DeserializeError(source, rsp_body.clone()))?;
Err(failover::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod failover {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_failover_target(
operation_config: &crate::OperationConfig,
device_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
expand: Option<&str>,
) -> std::result::Result<models::DeviceList, list_failover_target::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/failoverTargets",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name
);
let mut url = url::Url::parse(url_str).map_err(list_failover_target::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_failover_target::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(expand) = expand {
url.query_pairs_mut().append_pair("$expand", expand);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_failover_target::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_failover_target::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DeviceList = serde_json::from_slice(rsp_body)
.map_err(|source| list_failover_target::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error = serde_json::from_slice(rsp_body)
.map_err(|source| list_failover_target::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_failover_target::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_failover_target {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn install_updates(
operation_config: &crate::OperationConfig,
device_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<install_updates::Response, install_updates::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/install",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name
);
let mut url = url::Url::parse(url_str).map_err(install_updates::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(install_updates::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(install_updates::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(install_updates::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::ACCEPTED => Ok(install_updates::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(install_updates::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error = serde_json::from_slice(rsp_body)
.map_err(|source| install_updates::Error::DeserializeError(source, rsp_body.clone()))?;
Err(install_updates::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod install_updates {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_metrics(
operation_config: &crate::OperationConfig,
device_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
filter: Option<&str>,
) -> std::result::Result<models::MetricList, list_metrics::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/metrics",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name
);
let mut url = url::Url::parse(url_str).map_err(list_metrics::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_metrics::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_metrics::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_metrics::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::MetricList =
serde_json::from_slice(rsp_body).map_err(|source| list_metrics::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| list_metrics::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_metrics::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_metrics {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_metric_definition(
operation_config: &crate::OperationConfig,
device_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<models::MetricDefinitionList, list_metric_definition::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/metricsDefinitions",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name
);
let mut url = url::Url::parse(url_str).map_err(list_metric_definition::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_metric_definition::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_metric_definition::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_metric_definition::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::MetricDefinitionList = serde_json::from_slice(rsp_body)
.map_err(|source| list_metric_definition::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error = serde_json::from_slice(rsp_body)
.map_err(|source| list_metric_definition::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_metric_definition::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_metric_definition {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_network_settings(
operation_config: &crate::OperationConfig,
device_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<models::NetworkSettings, get_network_settings::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/networkSettings/default",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name
);
let mut url = url::Url::parse(url_str).map_err(get_network_settings::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_network_settings::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_network_settings::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_network_settings::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::NetworkSettings = serde_json::from_slice(rsp_body)
.map_err(|source| get_network_settings::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error = serde_json::from_slice(rsp_body)
.map_err(|source| get_network_settings::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_network_settings::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_network_settings {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn scan_for_updates(
operation_config: &crate::OperationConfig,
device_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<scan_for_updates::Response, scan_for_updates::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/scanForUpdates",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name
);
let mut url = url::Url::parse(url_str).map_err(scan_for_updates::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(scan_for_updates::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(scan_for_updates::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(scan_for_updates::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::ACCEPTED => Ok(scan_for_updates::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(scan_for_updates::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error = serde_json::from_slice(rsp_body)
.map_err(|source| scan_for_updates::Error::DeserializeError(source, rsp_body.clone()))?;
Err(scan_for_updates::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod scan_for_updates {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update_security_settings(
operation_config: &crate::OperationConfig,
device_name: &str,
security_settings: &models::SecuritySettings,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<create_or_update_security_settings::Response, create_or_update_security_settings::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/securitySettings/default/update",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update_security_settings::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update_security_settings::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(security_settings).map_err(create_or_update_security_settings::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(create_or_update_security_settings::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update_security_settings::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::ACCEPTED => Ok(create_or_update_security_settings::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(create_or_update_security_settings::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update_security_settings::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_or_update_security_settings::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_or_update_security_settings {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_time_settings(
operation_config: &crate::OperationConfig,
device_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<models::TimeSettings, get_time_settings::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/timeSettings/default",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name
);
let mut url = url::Url::parse(url_str).map_err(get_time_settings::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_time_settings::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_time_settings::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_time_settings::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::TimeSettings = serde_json::from_slice(rsp_body)
.map_err(|source| get_time_settings::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error = serde_json::from_slice(rsp_body)
.map_err(|source| get_time_settings::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_time_settings::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_time_settings {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_update_summary(
operation_config: &crate::OperationConfig,
device_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<models::Updates, get_update_summary::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/updateSummary/default",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name
);
let mut url = url::Url::parse(url_str).map_err(get_update_summary::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_update_summary::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_update_summary::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_update_summary::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Updates = serde_json::from_slice(rsp_body)
.map_err(|source| get_update_summary::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error = serde_json::from_slice(rsp_body)
.map_err(|source| get_update_summary::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_update_summary::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_update_summary {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod backup_schedule_groups {
use super::{models, API_VERSION};
pub async fn list_by_device(
operation_config: &crate::OperationConfig,
device_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<models::BackupScheduleGroupList, list_by_device::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/backupScheduleGroups",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_device::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_device::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_device::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_device::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::BackupScheduleGroupList =
serde_json::from_slice(rsp_body).map_err(|source| list_by_device::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| list_by_device::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_device::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_device {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
device_name: &str,
schedule_group_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<models::BackupScheduleGroup, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/backupScheduleGroups/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name,
schedule_group_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::BackupScheduleGroup =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
device_name: &str,
schedule_group_name: &str,
schedule_group: &models::BackupScheduleGroup,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/backupScheduleGroups/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name,
schedule_group_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(schedule_group).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::BackupScheduleGroup = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(create_or_update::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_or_update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::BackupScheduleGroup),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
device_name: &str,
schedule_group_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/backupScheduleGroups/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name,
schedule_group_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod chap_settings {
use super::{models, API_VERSION};
pub async fn list_by_device(
operation_config: &crate::OperationConfig,
device_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<models::ChapSettingsList, list_by_device::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/chapSettings",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_device::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_device::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_device::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_device::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ChapSettingsList =
serde_json::from_slice(rsp_body).map_err(|source| list_by_device::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| list_by_device::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_device::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_device {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
device_name: &str,
chap_user_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<models::ChapSettings, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/chapSettings/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name,
chap_user_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ChapSettings =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
device_name: &str,
chap_user_name: &str,
chap_setting: &models::ChapSettings,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/chapSettings/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name,
chap_user_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(chap_setting).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ChapSettings = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(create_or_update::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_or_update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::ChapSettings),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
device_name: &str,
chap_user_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/chapSettings/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name,
chap_user_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod iscsi_disks {
use super::{models, API_VERSION};
pub async fn list_by_device(
operation_config: &crate::OperationConfig,
device_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<models::IscsiDiskList, list_by_device::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/disks",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_device::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_device::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_device::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_device::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::IscsiDiskList =
serde_json::from_slice(rsp_body).map_err(|source| list_by_device::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| list_by_device::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_device::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_device {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_iscsi_server(
operation_config: &crate::OperationConfig,
device_name: &str,
iscsi_server_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<models::IscsiDiskList, list_by_iscsi_server::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/iscsiservers/{}/disks",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name,
iscsi_server_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_iscsi_server::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_iscsi_server::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_iscsi_server::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_iscsi_server::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::IscsiDiskList = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_iscsi_server::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_iscsi_server::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_iscsi_server::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_iscsi_server {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
device_name: &str,
iscsi_server_name: &str,
disk_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<models::IscsiDisk, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/iscsiservers/{}/disks/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name,
iscsi_server_name,
disk_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::IscsiDisk =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
device_name: &str,
iscsi_server_name: &str,
disk_name: &str,
iscsi_disk: &models::IscsiDisk,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/iscsiservers/{}/disks/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name,
iscsi_server_name,
disk_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(iscsi_disk).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::IscsiDisk = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(create_or_update::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_or_update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::IscsiDisk),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
device_name: &str,
iscsi_server_name: &str,
disk_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/iscsiservers/{}/disks/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name,
iscsi_server_name,
disk_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_metrics(
operation_config: &crate::OperationConfig,
device_name: &str,
iscsi_server_name: &str,
disk_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
filter: Option<&str>,
) -> std::result::Result<models::MetricList, list_metrics::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/iscsiservers/{}/disks/{}/metrics",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name,
iscsi_server_name,
disk_name
);
let mut url = url::Url::parse(url_str).map_err(list_metrics::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_metrics::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_metrics::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_metrics::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::MetricList =
serde_json::from_slice(rsp_body).map_err(|source| list_metrics::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| list_metrics::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_metrics::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_metrics {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_metric_definition(
operation_config: &crate::OperationConfig,
device_name: &str,
iscsi_server_name: &str,
disk_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<models::MetricDefinitionList, list_metric_definition::Error> {
let http_client = operation_config.http_client();
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/iscsiservers/{}/disks/{}/metricsDefinitions" , operation_config . base_path () , subscription_id , resource_group_name , manager_name , device_name , iscsi_server_name , disk_name) ;
let mut url = url::Url::parse(url_str).map_err(list_metric_definition::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_metric_definition::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_metric_definition::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_metric_definition::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::MetricDefinitionList = serde_json::from_slice(rsp_body)
.map_err(|source| list_metric_definition::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error = serde_json::from_slice(rsp_body)
.map_err(|source| list_metric_definition::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_metric_definition::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_metric_definition {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod file_servers {
use super::{models, API_VERSION};
pub async fn list_by_device(
operation_config: &crate::OperationConfig,
device_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<models::FileServerList, list_by_device::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/fileservers",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_device::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_device::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_device::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_device::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::FileServerList =
serde_json::from_slice(rsp_body).map_err(|source| list_by_device::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| list_by_device::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_device::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_device {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
device_name: &str,
file_server_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<models::FileServer, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/fileservers/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name,
file_server_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::FileServer =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
device_name: &str,
file_server_name: &str,
file_server: &models::FileServer,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/fileservers/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name,
file_server_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(file_server).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::FileServer = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(create_or_update::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_or_update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::FileServer),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
device_name: &str,
file_server_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/fileservers/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name,
file_server_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn backup_now(
operation_config: &crate::OperationConfig,
device_name: &str,
file_server_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<backup_now::Response, backup_now::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/fileservers/{}/backup",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name,
file_server_name
);
let mut url = url::Url::parse(url_str).map_err(backup_now::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(backup_now::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(backup_now::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(backup_now::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::ACCEPTED => Ok(backup_now::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(backup_now::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| backup_now::Error::DeserializeError(source, rsp_body.clone()))?;
Err(backup_now::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod backup_now {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_metrics(
operation_config: &crate::OperationConfig,
device_name: &str,
file_server_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
filter: Option<&str>,
) -> std::result::Result<models::MetricList, list_metrics::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/fileservers/{}/metrics",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name,
file_server_name
);
let mut url = url::Url::parse(url_str).map_err(list_metrics::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_metrics::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_metrics::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_metrics::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::MetricList =
serde_json::from_slice(rsp_body).map_err(|source| list_metrics::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| list_metrics::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_metrics::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_metrics {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_metric_definition(
operation_config: &crate::OperationConfig,
device_name: &str,
file_server_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<models::MetricDefinitionList, list_metric_definition::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/fileservers/{}/metricsDefinitions",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name,
file_server_name
);
let mut url = url::Url::parse(url_str).map_err(list_metric_definition::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_metric_definition::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_metric_definition::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_metric_definition::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::MetricDefinitionList = serde_json::from_slice(rsp_body)
.map_err(|source| list_metric_definition::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error = serde_json::from_slice(rsp_body)
.map_err(|source| list_metric_definition::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_metric_definition::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_metric_definition {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_manager(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<models::FileServerList, list_by_manager::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/fileservers",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_manager::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_manager::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_manager::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_manager::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::FileServerList = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_manager::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_manager::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_manager::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_manager {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod file_shares {
use super::{models, API_VERSION};
pub async fn list_by_file_server(
operation_config: &crate::OperationConfig,
device_name: &str,
file_server_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<models::FileShareList, list_by_file_server::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/fileservers/{}/shares",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name,
file_server_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_file_server::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_file_server::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_file_server::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_file_server::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::FileShareList = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_file_server::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_file_server::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_file_server::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_file_server {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
device_name: &str,
file_server_name: &str,
share_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<models::FileShare, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/fileservers/{}/shares/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name,
file_server_name,
share_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::FileShare =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
device_name: &str,
file_server_name: &str,
share_name: &str,
file_share: &models::FileShare,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/fileservers/{}/shares/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name,
file_server_name,
share_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(file_share).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::FileShare = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(create_or_update::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_or_update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::FileShare),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
device_name: &str,
file_server_name: &str,
share_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/fileservers/{}/shares/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name,
file_server_name,
share_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_metrics(
operation_config: &crate::OperationConfig,
device_name: &str,
file_server_name: &str,
share_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
filter: Option<&str>,
) -> std::result::Result<models::MetricList, list_metrics::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/fileservers/{}/shares/{}/metrics",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name,
file_server_name,
share_name
);
let mut url = url::Url::parse(url_str).map_err(list_metrics::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_metrics::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_metrics::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_metrics::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::MetricList =
serde_json::from_slice(rsp_body).map_err(|source| list_metrics::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| list_metrics::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_metrics::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_metrics {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_metric_definition(
operation_config: &crate::OperationConfig,
device_name: &str,
file_server_name: &str,
share_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<models::MetricDefinitionList, list_metric_definition::Error> {
let http_client = operation_config.http_client();
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/fileservers/{}/shares/{}/metricsDefinitions" , operation_config . base_path () , subscription_id , resource_group_name , manager_name , device_name , file_server_name , share_name) ;
let mut url = url::Url::parse(url_str).map_err(list_metric_definition::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_metric_definition::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_metric_definition::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_metric_definition::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::MetricDefinitionList = serde_json::from_slice(rsp_body)
.map_err(|source| list_metric_definition::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error = serde_json::from_slice(rsp_body)
.map_err(|source| list_metric_definition::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_metric_definition::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_metric_definition {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_device(
operation_config: &crate::OperationConfig,
device_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<models::FileShareList, list_by_device::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/shares",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_device::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_device::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_device::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_device::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::FileShareList =
serde_json::from_slice(rsp_body).map_err(|source| list_by_device::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| list_by_device::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_device::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_device {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod iscsi_servers {
use super::{models, API_VERSION};
pub async fn list_by_device(
operation_config: &crate::OperationConfig,
device_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<models::IscsiServerList, list_by_device::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/iscsiservers",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_device::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_device::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_device::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_device::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::IscsiServerList =
serde_json::from_slice(rsp_body).map_err(|source| list_by_device::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| list_by_device::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_device::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_device {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
device_name: &str,
iscsi_server_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<models::IscsiServer, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/iscsiservers/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name,
iscsi_server_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::IscsiServer =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
device_name: &str,
iscsi_server_name: &str,
iscsi_server: &models::IscsiServer,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/iscsiservers/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name,
iscsi_server_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(iscsi_server).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::IscsiServer = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(create_or_update::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_or_update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::IscsiServer),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
device_name: &str,
iscsi_server_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/iscsiservers/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name,
iscsi_server_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn backup_now(
operation_config: &crate::OperationConfig,
device_name: &str,
iscsi_server_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<backup_now::Response, backup_now::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/iscsiservers/{}/backup",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name,
iscsi_server_name
);
let mut url = url::Url::parse(url_str).map_err(backup_now::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(backup_now::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(backup_now::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(backup_now::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::ACCEPTED => Ok(backup_now::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(backup_now::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| backup_now::Error::DeserializeError(source, rsp_body.clone()))?;
Err(backup_now::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod backup_now {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_metrics(
operation_config: &crate::OperationConfig,
device_name: &str,
iscsi_server_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
filter: Option<&str>,
) -> std::result::Result<models::MetricList, list_metrics::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/iscsiservers/{}/metrics",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name,
iscsi_server_name
);
let mut url = url::Url::parse(url_str).map_err(list_metrics::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_metrics::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_metrics::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_metrics::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::MetricList =
serde_json::from_slice(rsp_body).map_err(|source| list_metrics::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| list_metrics::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_metrics::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_metrics {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_metric_definition(
operation_config: &crate::OperationConfig,
device_name: &str,
iscsi_server_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<models::MetricDefinitionList, list_metric_definition::Error> {
let http_client = operation_config.http_client();
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/iscsiservers/{}/metricsDefinitions" , operation_config . base_path () , subscription_id , resource_group_name , manager_name , device_name , iscsi_server_name) ;
let mut url = url::Url::parse(url_str).map_err(list_metric_definition::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_metric_definition::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_metric_definition::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_metric_definition::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::MetricDefinitionList = serde_json::from_slice(rsp_body)
.map_err(|source| list_metric_definition::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error = serde_json::from_slice(rsp_body)
.map_err(|source| list_metric_definition::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_metric_definition::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_metric_definition {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_manager(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<models::IscsiServerList, list_by_manager::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/iscsiservers",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_manager::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_manager::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_manager::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_manager::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::IscsiServerList = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_manager::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_manager::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_manager::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_manager {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod jobs {
use super::{models, API_VERSION};
pub async fn list_by_device(
operation_config: &crate::OperationConfig,
device_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
filter: Option<&str>,
) -> std::result::Result<models::JobList, list_by_device::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/jobs",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_device::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_device::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_device::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_device::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::JobList =
serde_json::from_slice(rsp_body).map_err(|source| list_by_device::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| list_by_device::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_device::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_device {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
device_name: &str,
job_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<models::Job, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/devices/{}/jobs/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
device_name,
job_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Job =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_manager(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
filter: Option<&str>,
) -> std::result::Result<models::JobList, list_by_manager::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/jobs",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_manager::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_manager::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_manager::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_manager::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::JobList = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_manager::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_manager::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_manager::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_manager {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod storage_account_credentials {
use super::{models, API_VERSION};
pub async fn list_by_manager(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<models::StorageAccountCredentialList, list_by_manager::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/storageAccountCredentials",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_manager::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_manager::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_manager::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_manager::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::StorageAccountCredentialList = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_manager::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_manager::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_manager::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_manager {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
credential_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<models::StorageAccountCredential, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/storageAccountCredentials/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
credential_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::StorageAccountCredential =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
credential_name: &str,
storage_account: &models::StorageAccountCredential,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/storageAccountCredentials/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
credential_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(storage_account).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::StorageAccountCredential = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(create_or_update::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_or_update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::StorageAccountCredential),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
credential_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/storageAccountCredentials/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
credential_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod storage_domains {
use super::{models, API_VERSION};
pub async fn list_by_manager(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<models::StorageDomainList, list_by_manager::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/storageDomains",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_manager::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_manager::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_manager::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_manager::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::StorageDomainList = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_manager::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_manager::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_manager::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_manager {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
storage_domain_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<models::StorageDomain, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/storageDomains/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
storage_domain_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::StorageDomain =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
storage_domain_name: &str,
storage_domain: &models::StorageDomain,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/storageDomains/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
storage_domain_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(storage_domain).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::StorageDomain = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(create_or_update::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_or_update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::StorageDomain),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
storage_domain_name: &str,
subscription_id: &str,
resource_group_name: &str,
manager_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.StorSimple/managers/{}/storageDomains/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
manager_name,
storage_domain_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
|
table! {
user (id) {
id -> Varchar,
username -> Varchar,
nickname -> Varchar,
phone -> Varchar,
email -> Varchar,
password -> Varchar,
}
}
|
#![feature(stmt_expr_attributes)]
#![cfg_attr(feature="benchmark", feature(test))]
#[cfg(feature = "benchmark")]
extern crate criterion;
#[cfg(feature = "benchmark")]
extern crate test;
extern crate passenger;
#[cfg(feature = "benchmark")]
use criterion::Criterion;
#[cfg(feature = "benchmark")]
mod benches {
use std::sync::mpsc::sync_channel;
use std::thread;
use test::black_box;
use passenger::BoundedSpscQueue;
use criterion::Bencher;
pub fn simple_sync_std(b: &mut Bencher) {
let (sender, receiver) = sync_channel::<i32>(1000);
thread::spawn(move || {
loop {
match sender.send(0) {
Ok(_) => {}
Err(_) => return,
}
}
});
let _ = receiver.recv();
b.iter(|| black_box(receiver.recv()));
}
pub fn simple_bounded_spsc(b: &mut Bencher) {
let (mut sender, mut receiver) = BoundedSpscQueue::new(1000);
thread::spawn(move || {
loop {
match sender.send(0) {
Ok(_) => {}
Err(_) => return,
}
}
});
let _ = receiver.recv();
b.iter(|| black_box(receiver.recv()));
}
pub fn simple_single_thread_sync_std(b: &mut Bencher) {
let (sender, receiver) = sync_channel::<i32>(1000);
b.iter(|| {
sender.send(0).unwrap();
receiver.recv().unwrap();
});
}
pub fn simple_single_thread_bounded_spsc(b: &mut Bencher) {
let (mut sender, mut receiver) = BoundedSpscQueue::new(1000);
b.iter(|| {
sender.send(0).unwrap();
receiver.recv().unwrap();
});
}
}
#[cfg(feature = "benchmark")]
use benches::*;
fn main() {
#[cfg(feature = "benchmark")]
{
let mut b = Criterion::default();
b.bench_function("std single thread", simple_single_thread_sync_std);
b.bench_function("bounded spsc single thread",
simple_single_thread_bounded_spsc);
b.bench_function("std", simple_sync_std);
b.bench_function("bounded spsc", simple_bounded_spsc);
}
}
|
mod primitives;
pub use self::primitives::DEFAULT_PROFILE_IDX;
|
use std::collections::HashMap;
const INPUT: &str = include_str!("../input.txt");
fn get_passports() -> impl Iterator<Item = HashMap<&'static str, &'static str>> {
let required_keys = ["byr", "iyr", "eyr", "hgt", "hcl", "ecl", "pid"];
INPUT
.split("\n\n")
.map(|spec| {
spec.split(|c| c == '\n' || c == ' ')
.map(|component| {
let mut split = component.split(':');
(split.next().unwrap(), split.next().unwrap())
})
.collect::<HashMap<_, _>>()
})
.filter(move |passport| required_keys.iter().all(|k| passport.contains_key(k)))
}
fn is_valid(key: &str, value: &str) -> bool {
match key {
"byr" => matches!(value.parse::<u64>(), Ok(1920..=2002)),
"iyr" => matches!(value.parse::<u64>(), Ok(2010..=2020)),
"eyr" => matches!(value.parse::<u64>(), Ok(2020..=2030)),
"hgt" => {
if let Some(height) = value.strip_suffix("cm") {
matches!(height.parse::<u64>(), Ok(150..=193))
} else if let Some(height) = value.strip_suffix("in") {
matches!(height.parse::<u64>(), Ok(59..=76))
} else {
false
}
}
"hcl" => match value.split_at(1) {
("#", rest) => {
rest.len() == 6
&& rest
.chars()
.all(|c| c.to_ascii_lowercase().is_ascii_hexdigit())
}
_ => false,
},
"ecl" => matches!(value, "amb" | "blu" | "brn" | "gry" | "grn" | "hzl" | "oth"),
"pid" => value.len() == 9 && value.parse::<u64>().is_ok(),
_ => true,
}
}
fn part1() -> usize {
get_passports().count()
}
fn part2() -> usize {
get_passports()
.filter(|pass| pass.iter().all(|(k, v)| is_valid(k, v)))
.count()
}
fn main() {
println!("part 1: {}", part1());
println!("part 2: {}", part2());
}
#[cfg(test)]
mod tests {
use super::{part1, part2};
#[test]
fn test_part1() {
assert_eq!(part1(), 233);
}
#[test]
fn test_part2() {
assert_eq!(part2(), 111);
}
}
|
use fastping_rs::Pinger;
use fastping_rs::PingResult::{Idle, Receive};
use std::collections::HashMap;
use std::sync::mpsc::{Sender, Receiver};
use std::time::SystemTime;
use crate::{PingResult, Target};
pub fn run_ping(pinger: &Pinger,
results: &Receiver<fastping_rs::PingResult>,
ips: &Vec<Target>,
results_tx: &Sender<(HashMap<String, PingResult>, u128)>,
nping: u32) {
let mut res: HashMap<String, PingResult> = HashMap::new();
let ping_count = nping as usize * ips.len();
// Fill maps
for ip in ips {
res.insert(ip.target.to_string(), PingResult::new());
}
pinger.run_pinger();
for _ in 0..ping_count {
match results.recv() {
Ok(result) => {
let addr = match result {
Idle{addr} => addr,
Receive{addr, rtt: _} => addr
};
res.entry(addr.to_string()).and_modify(|e| e.handle(result));
},
Err(_) => panic!("Worker threads disconnected!"),
}
}
pinger.stop_pinger();
// Compute values
for (_, r) in res.iter_mut() {
r.update(nping);
}
let time_ns = SystemTime::UNIX_EPOCH.elapsed().unwrap().as_nanos();
// Forward it to main thread
results_tx.send((res, time_ns)).unwrap();
}
|
use wasm_bindgen::prelude::*;
// https://rustwasm.github.io/docs/wasm-bindgen/reference/types/boxed-jsvalue-slice.html
#[wasm_bindgen(module = "js/event-queue.js")]
extern "C" {
#[wasm_bindgen]
pub fn pop_event() -> String;
// #[wasm_bindgen(method)]
// fn render(this: &MyClass) -> String;
}
|
#[doc = "Register `OTG_DSTS` reader"]
pub type R = crate::R<OTG_DSTS_SPEC>;
#[doc = "Field `SUSPSTS` reader - SUSPSTS"]
pub type SUSPSTS_R = crate::BitReader;
#[doc = "Field `ENUMSPD` reader - ENUMSPD"]
pub type ENUMSPD_R = crate::FieldReader;
#[doc = "Field `EERR` reader - EERR"]
pub type EERR_R = crate::BitReader;
#[doc = "Field `FNSOF` reader - FNSOF"]
pub type FNSOF_R = crate::FieldReader<u16>;
#[doc = "Field `DEVLNSTS` reader - DEVLNSTS"]
pub type DEVLNSTS_R = crate::FieldReader;
impl R {
#[doc = "Bit 0 - SUSPSTS"]
#[inline(always)]
pub fn suspsts(&self) -> SUSPSTS_R {
SUSPSTS_R::new((self.bits & 1) != 0)
}
#[doc = "Bits 1:2 - ENUMSPD"]
#[inline(always)]
pub fn enumspd(&self) -> ENUMSPD_R {
ENUMSPD_R::new(((self.bits >> 1) & 3) as u8)
}
#[doc = "Bit 3 - EERR"]
#[inline(always)]
pub fn eerr(&self) -> EERR_R {
EERR_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bits 8:21 - FNSOF"]
#[inline(always)]
pub fn fnsof(&self) -> FNSOF_R {
FNSOF_R::new(((self.bits >> 8) & 0x3fff) as u16)
}
#[doc = "Bits 22:23 - DEVLNSTS"]
#[inline(always)]
pub fn devlnsts(&self) -> DEVLNSTS_R {
DEVLNSTS_R::new(((self.bits >> 22) & 3) as u8)
}
}
#[doc = "This register indicates the status of the core with respect to USB-related events. It must be read on interrupts from the device all interrupts (OTG_DAINT) register.\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`otg_dsts::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct OTG_DSTS_SPEC;
impl crate::RegisterSpec for OTG_DSTS_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`otg_dsts::R`](R) reader structure"]
impl crate::Readable for OTG_DSTS_SPEC {}
#[doc = "`reset()` method sets OTG_DSTS to value 0x10"]
impl crate::Resettable for OTG_DSTS_SPEC {
const RESET_VALUE: Self::Ux = 0x10;
}
|
#![no_main]
extern crate abxml;
#[macro_use]
extern crate libfuzzer_sys;
use abxml::chunks::ResourceWrapper;
fuzz_target!(|data: &[u8]| {
let rw = ResourceWrapper::new(data);
rw.get_resources();
});
|
use serde::Serialize;
use std::io::Write;
pub mod auth;
pub mod internal_server;
pub mod validation;
#[derive(Serialize, Debug)]
#[serde(untagged)]
/// Represents all the different error categories
pub enum ErrorCategories {
ValidationError(validation::ValidationError),
InternalServerError(internal_server::InternalServerError),
AuthError(auth::AuthError),
}
/// The actual error
#[derive(Serialize, Debug)]
pub struct ApplicationError {
pub kind: &'static str,
pub body: ErrorCategories,
}
/// Wrapper struct so we can have `{"error":{...}}` instead of just `{...}`
#[derive(Serialize, Debug)]
pub struct Error {
pub error: ApplicationError,
}
impl std::fmt::Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(
f,
"Error {{
error: ApplicationError {{
kind: {},
body: {:#?}
}},
}}",
self.error.kind, self.error.body
)
}
}
impl std::error::Error for Error {}
|
extern crate hyper;
extern crate rustc_serialize;
extern crate websocket;
//mod webpage;
mod chatserver;
mod newwebpage;
fn main() {
chatserver::start();
// webpage::serve();
newwebpage::serve();
}
|
//! [`Recorder`] implementation finding the starts of all matches.
//! Faster than a full [`NodesRecorder`](super::nodes::NodesRecorder).
use super::*;
use std::cell::RefCell;
/// Recorder that saves only the start indices to the [`Sink`].
pub struct IndexRecorder<'s, S> {
sink: RefCell<&'s mut S>,
}
impl<'s, S> IndexRecorder<'s, S> {
#[inline]
pub(crate) fn new(sink: &'s mut S) -> Self {
Self {
sink: RefCell::new(sink),
}
}
}
impl<'s, B: Deref<Target = [u8]>, S> InputRecorder<B> for IndexRecorder<'s, S>
where
S: Sink<MatchIndex>,
{
#[inline(always)]
fn record_block_start(&self, _new_block: B) {
// Intentionally left empty.
}
}
impl<'s, B: Deref<Target = [u8]>, S> Recorder<B> for IndexRecorder<'s, S>
where
S: Sink<MatchIndex>,
{
#[inline]
fn record_match(&self, idx: usize, _depth: Depth, _ty: MatchedNodeType) -> Result<(), EngineError> {
self.sink
.borrow_mut()
.add_match(idx)
.map_err(|err| EngineError::SinkError(Box::new(err)))
}
#[inline]
fn record_value_terminator(&self, _idx: usize, _depth: Depth) -> Result<(), EngineError> {
// Intentionally left empty.
Ok(())
}
}
|
use crate::{
Result, OG_SUDO_STANDARD_LECTURE, PASSWORD, SUDOERS_ROOT_ALL, SUDOERS_USER_ALL_ALL,
SUDOERS_USER_ALL_NOPASSWD, USERNAME,
};
use sudo_test::{Command, Env, User};
/* cases where password input is expected */
#[test]
fn fails_if_password_needed() -> Result<()> {
let env = Env(SUDOERS_USER_ALL_ALL).user(USERNAME).build()?;
let output = Command::new("sudo")
.args(["-n", "true"])
.as_user(USERNAME)
.output(&env)?;
assert!(!output.status().success());
assert_eq!(Some(1), output.status().code());
let stderr = output.stderr();
let password_prompt = if sudo_test::is_original_sudo() {
"password for ferris"
} else {
"Password:"
};
assert_not_contains!(stderr, password_prompt);
let diagnostic = if sudo_test::is_original_sudo() {
"sudo: a password is required"
} else {
"interaction required"
};
assert_contains!(stderr, diagnostic);
Ok(())
}
#[test]
fn flag_remove_timestamp_plus_command_fails() -> Result<()> {
let env = Env(SUDOERS_USER_ALL_ALL).user(USERNAME).build()?;
let output = Command::new("sh")
.arg("-c")
.arg(format!(
"echo {PASSWORD} | sudo -S true 2>/dev/null; sudo -n -k true"
))
.as_user(USERNAME)
.output(&env)?;
assert!(!output.status().success());
assert_eq!(Some(1), output.status().code());
let stderr = output.stderr();
let password_prompt = if sudo_test::is_original_sudo() {
"password for ferris"
} else {
"Password:"
};
assert_not_contains!(stderr, password_prompt);
let diagnostic = if sudo_test::is_original_sudo() {
"sudo: a password is required"
} else {
"interaction required"
};
assert_contains!(stderr, diagnostic);
Ok(())
}
/* cases where password input is not required */
#[test]
fn root() -> Result<()> {
let env = Env(SUDOERS_ROOT_ALL).build()?;
Command::new("sudo")
.args(["-n", "true"])
.output(&env)?
.assert_success()
}
#[test]
fn nopasswd() -> Result<()> {
let env = Env(SUDOERS_USER_ALL_NOPASSWD).user(USERNAME).build()?;
Command::new("sudo")
.args(["-n", "true"])
.as_user(USERNAME)
.output(&env)?
.assert_success()
}
#[test]
fn cached_credential() -> Result<()> {
let env = Env(SUDOERS_USER_ALL_ALL)
.user(User(USERNAME).password(PASSWORD))
.build()?;
Command::new("sh")
.arg("-c")
.arg(format!("echo {PASSWORD} | sudo -S true; sudo -n true"))
.as_user(USERNAME)
.output(&env)?
.assert_success()
}
/* misc */
#[test]
fn lecture_is_not_shown() -> Result<()> {
let env = Env(SUDOERS_USER_ALL_ALL).user(USERNAME).build()?;
let output = Command::new("sudo")
.args(["-n", "true"])
.as_user(USERNAME)
.output(&env)?;
assert!(!output.status().success());
assert_eq!(Some(1), output.status().code());
assert_not_contains!(output.stderr(), OG_SUDO_STANDARD_LECTURE);
Ok(())
}
|
#[derive(Debug, Clone, PartialEq)]
pub enum Type {
Void,
Int1,
Int32,
Pointer(Box<Type>),
Function(Box<FunctionType>),
}
#[derive(Debug, Clone, PartialEq)]
pub struct FunctionType {
pub ret_ty: Type,
pub params_ty: Vec<Type>,
}
impl Type {
pub fn func_ty(ret_ty: Type, params_ty: Vec<Type>) -> Self {
Type::Function(Box::new(FunctionType::new(ret_ty, params_ty)))
}
pub fn get_function_ty(&self) -> Option<&FunctionType> {
match self {
Type::Function(f) => Some(&*f),
_ => None,
}
}
pub fn get_pointer_ty(&self) -> Type {
Type::Pointer(Box::new(self.clone()))
}
pub fn get_element_ty(&self) -> Option<&Type> {
match self {
Type::Pointer(e) => Some(&**e),
_ => None,
}
}
pub fn to_string(&self) -> String {
match self {
Type::Void => "void".to_string(),
Type::Int1 => "i1".to_string(),
Type::Int32 => "i32".to_string(),
Type::Pointer(e) => format!("{}*", e.to_string()),
Type::Function(f) => f.to_string(),
}
}
}
impl FunctionType {
pub fn new(ret_ty: Type, params_ty: Vec<Type>) -> Self {
Self { ret_ty, params_ty }
}
pub fn to_string(&self) -> String {
format!(
"{} ({})",
self.ret_ty.to_string(),
self.params_ty.iter().fold("".to_string(), |mut s, p| {
s += &(p.to_string() + ", ");
s
}),
)
}
}
|
#![recursion_limit = "256"]
pub mod meta;
|
use std::path::PathBuf;
use thiserror::Error;
pub type StandardResult<T> = std::result::Result<T, BoilrError>;
#[derive(Error, Debug)]
pub enum BoilrError {
#[error("Error cannot format {0:?}")]
FormatDisplayError(#[from] std::fmt::Error),
#[error("Error cannot read from {path:?}")]
ReadError {
source: std::io::Error,
path: PathBuf,
},
#[error("Error cannot write to {path:?}")]
WriteError {
source: std::io::Error,
path: PathBuf,
},
#[error("Error cannot copy from {from_path:?} {to_path:?}")]
CopyError {
source: Box<BoilrError>,
from_path: PathBuf,
to_path: PathBuf,
},
#[error("Error cannot delete {path:?}")]
DeleteError {
source: std::io::Error,
path: PathBuf,
},
#[error("Error while deserializing from {path:?}")]
TomlDeserializeError {
source: toml::de::Error,
path: PathBuf,
},
#[error("Error while serializing at {path:?}")]
TomlSerializeError {
source: toml::ser::Error,
path: PathBuf,
},
#[error("Error while displaying on terminal")]
TerminalError { source: std::io::Error },
#[error("Error while parsing files using Tera")]
TeraTemplateError(#[from] tera::Error),
#[error("Error while parsing directories")]
WalkDirError(#[from] walkdir::Error),
#[error("Internal path stripping error")]
StripPrefixError(#[from] std::path::StripPrefixError),
#[error("Cannot convert to String")]
StrError,
#[error("Cannot find home dir")]
HomeDirNotFoundError,
#[error("Cannot access current directory")]
AccessCurrentDirError,
#[error("Error arg not found in clap args")]
ArgNotFoundError,
#[error("{path:?} is not a directory")]
NotADirectoryError { path: PathBuf },
#[error("Generic error")]
UnspecifiedError(Option<String>),
}
|
use derivatives::Differentiable;
use std::collections::{BTreeMap, VecDeque};
use std::collections::btree_map::{Entry};
#[derive(Debug, Clone)]
pub struct Transition {
pub by_char: BTreeMap<char, u32>,
pub default: u32,
}
#[derive(Debug, Clone)]
pub struct Dfa {
pub transitions: Vec<Transition>,
}
pub trait Normalize {
fn normalize(self) -> Self;
}
impl<R: Normalize> Normalize for Vec<R> {
fn normalize(self) -> Self {
self.map_in_place(Normalize::normalize)
}
}
impl Dfa {
pub fn from_derivatives<R: Differentiable + Normalize + Ord + Clone>(initial: Vec<R>) -> (Dfa, BTreeMap<R, u32>) {
fn index<R: Ord + Clone>(worklist: &mut (BTreeMap<R, u32>, VecDeque<R>), re: R) -> u32 {
let next_index = worklist.0.len() as u32;
match worklist.0.entry(re.clone()) { // FIXME: unnecessary allocation
Entry::Vacant(view) => {
view.insert(next_index);
worklist.1.push_back(re);
next_index
}
Entry::Occupied(view) => {
return *view.get();
}
}
}
let mut result = Dfa { transitions: Vec::new() };
let mut worklist = (BTreeMap::new(), VecDeque::new());
for r in initial.into_iter() {
index(&mut worklist, r.normalize());
}
while let Some(re) = worklist.1.pop_front() {
let d = re.derivative();
let mut by_char = BTreeMap::new();
for (chars, dre) in d.d.into_iter() {
let ix = index(&mut worklist, dre.normalize());
for ch in chars.into_iter() {
by_char.insert(ch, ix);
}
}
let default = index(&mut worklist, d.rest.normalize());
result.transitions.push(Transition {
by_char: by_char,
default: default,
});
}
(result, worklist.0)
}
}
|
use super::{operate, BytesArgument};
use nu_engine::CallExt;
use nu_protocol::ast::Call;
use nu_protocol::ast::CellPath;
use nu_protocol::engine::{Command, EngineState, Stack};
use nu_protocol::Category;
use nu_protocol::{Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Value};
struct Arguments {
column_paths: Option<Vec<CellPath>>,
}
impl BytesArgument for Arguments {
fn take_column_paths(&mut self) -> Option<Vec<CellPath>> {
self.column_paths.take()
}
}
#[derive(Clone)]
pub struct BytesReverse;
impl Command for BytesReverse {
fn name(&self) -> &str {
"bytes reverse"
}
fn signature(&self) -> Signature {
Signature::build("bytes reverse")
.rest(
"rest",
SyntaxShape::CellPath,
"optionally matches prefix of text by column paths",
)
.category(Category::Bytes)
}
fn usage(&self) -> &str {
"Reverse every bytes in the pipeline"
}
fn search_terms(&self) -> Vec<&str> {
vec!["convert", "inverse", "flip"]
}
fn run(
&self,
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let column_paths: Vec<CellPath> = call.rest(engine_state, stack, 0)?;
let column_paths = if column_paths.is_empty() {
None
} else {
Some(column_paths)
};
let arg = Arguments { column_paths };
operate(reverse, arg, input, call.head, engine_state.ctrlc.clone())
}
fn examples(&self) -> Vec<Example> {
vec![
Example {
description: "Reverse bytes `0x[1F FF AA AA]`",
example: "0x[1F FF AA AA] | bytes reverse",
result: Some(Value::Binary {
val: vec![0xAA, 0xAA, 0xFF, 0x1F],
span: Span::test_data(),
}),
},
Example {
description: "Reverse bytes `0x[FF AA AA]`",
example: "0x[FF AA AA] | bytes reverse",
result: Some(Value::Binary {
val: vec![0xAA, 0xAA, 0xFF],
span: Span::test_data(),
}),
},
]
}
}
fn reverse(input: &[u8], _args: &Arguments, span: Span) -> Value {
let mut reversed_input = input.to_vec();
reversed_input.reverse();
Value::Binary {
val: reversed_input,
span,
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_examples() {
use crate::test_examples;
test_examples(BytesReverse {})
}
}
|
//! FBX data.
use std::{cell::RefCell, path::Path, rc::Rc};
use fbxcel::pull_parser::{self as fbxbin, any::AnyParser};
use gtk::{prelude::*, Window};
use crate::{
widgets::{FbxAttributeTable, FbxNodeTree, Logs},
WINDOW_TITLE_BASE,
};
pub use self::attribute::{Attribute, AttributeLoader};
mod attribute;
/// Loads the given FBX binary file.
pub fn load_fbx_binary<P: AsRef<Path>>(
path: P,
window: &Window,
logs: &Logs,
node_tree: &FbxNodeTree,
node_attrs: &FbxAttributeTable,
) {
use std::fs::File;
use std::io::BufReader;
let path = path.as_ref();
println!("FBX binary path = {}", path.display());
window.set_title(&format!("{} - {}", WINDOW_TITLE_BASE, path.display()));
logs.clear();
node_tree.clear();
node_attrs.clear();
let mut file = match File::open(path) {
Ok(file) => BufReader::new(file),
Err(err) => {
println!("Cannot open file {}: {}", path.display(), err);
logs.set_store(&vec![], Some(&err));
return;
}
};
let parser = match fbxbin::any::from_seekable_reader(&mut file) {
Ok(v) => v,
Err(err) => {
println!("Cannot open file {} as FBX binary: {}", path.display(), err);
logs.set_store(&vec![], Some(&err));
return;
}
};
node_tree.append(None, "(FBX header)", None, 0);
println!(
"FBX version: {}.{}",
parser.fbx_version().major(),
parser.fbx_version().minor()
);
match parser {
AnyParser::V7400(mut parser) => {
let warnings = Rc::new(RefCell::new(Vec::new()));
{
let warnings = Rc::downgrade(&warnings);
parser.set_warning_handler(move |warning, syn_pos| {
if let Some(rc) = warnings.upgrade() {
rc.borrow_mut().push((warning, syn_pos.clone()));
}
Ok(())
});
}
match load_fbx_binary_v7400(parser, node_tree, node_attrs) {
Ok(()) => {
logs.set_store(warnings.borrow().iter(), None);
}
Err(err) => {
println!("Failed to parse FBX file: {}", err);
logs.set_store(warnings.borrow().iter(), Some(&err));
}
}
}
parser => {
let ver = format!(
"{}.{}",
parser.fbx_version().major(),
parser.fbx_version().minor()
);
println!("Unsupported FBX version: {}", ver);
let err: Box<dyn std::error::Error> =
format!("Unsupported FBX version: {}", ver).into();
logs.set_store(&vec![], Some(err.as_ref()));
}
}
}
fn load_fbx_binary_v7400<R: fbxbin::ParserSource>(
mut parser: fbxbin::v7400::Parser<R>,
node_tree: &FbxNodeTree,
node_attrs: &FbxAttributeTable,
) -> fbxbin::Result<()> {
let mut open_nodes_iter = Vec::new();
let mut attr_index = 0;
'load_nodes: loop {
use fbxbin::v7400::*;
match parser.next_event()? {
Event::StartNode(node) => {
let name = node.name().to_owned();
let mut attributes = node.attributes();
let tree_iter = node_tree.append(
open_nodes_iter.last(),
&name,
attributes.total_count(),
attr_index,
);
attr_index += attributes.total_count();
open_nodes_iter.push(tree_iter);
while let Some(attr) = attributes.load_next(AttributeLoader)? {
node_attrs.push_attribute(attr);
}
}
Event::EndNode => {
open_nodes_iter.pop();
}
Event::EndFbx(footer_res) => {
node_tree.append(None, "(FBX footer)", None, 0);
let _ = footer_res?;
break 'load_nodes;
}
}
}
Ok(())
}
|
fn main() {
let edges = orbits::read_from_input().unwrap();
println!("Part 1: {}", orbits::count_indirect_edges(&edges));
println!("Part 2: {}", orbits::shortest_path_length(&edges, "YOU", "SAN").unwrap());
}
mod orbits
{
use std::collections::BTreeSet;
use std::env;
use std::fs;
#[derive(Debug)]
pub struct Edge {
src: String,
dst: String
}
impl Edge {
pub fn new(src: &str, dst: &str) -> Self {
Edge { src: src.to_string(), dst: dst.to_string() }
}
}
pub fn parse_edge(s: &str) -> Result<Edge, &'static str> {
let parts: Vec<&str> = s.split(")").collect();
if parts.len() == 2 {
Ok(Edge::new(parts[0], parts[1]))
} else {
Err("Bad input line")
}
}
pub fn read_from_input() -> Result<Vec<Edge>, &'static str> {
let input_path: &String = &env::args().nth(1).unwrap();
let input_data = fs::read_to_string(input_path).map_err(|_| "error reading input file")?;
read_from_string(&input_data)
}
pub fn read_from_string(s: &str) -> Result<Vec<Edge>, &'static str> {
s.lines().map(parse_edge).collect()
}
fn count_node_edges(edges: &[Edge], node: &str) -> u32 {
let mut stack: Vec<&str> = Vec::new();
let mut count: u32 = 0;
for edge in edges.iter() {
if edge.src == node {
count = count + 1;
stack.push(&edge.dst);
}
}
while stack.len() > 0 {
let e: &str = stack.pop().unwrap();
for edge in edges.iter() {
if edge.src == e {
count = count + 1;
stack.push(&edge.dst);
}
}
}
count
}
pub fn count_indirect_edges(edges: &[Edge]) -> u32 {
let src_nodes: BTreeSet<String> = edges.iter().map(|e| e.src.clone()).collect();
src_nodes.iter().map(|n| count_node_edges(edges, n)).fold(0, |sum, c| sum + c)
}
#[test]
fn provided_count_test() {
let input = read_from_string(
"COM)B
B)C
C)D
D)E
E)F
B)G
G)H
D)I
E)J
J)K
K)L").unwrap();
assert_eq!(count_indirect_edges(&input[..]), 42);
}
pub fn nodes_reachable_from(edges: &[Edge], start_nodes: BTreeSet<String>) -> BTreeSet<String> {
let mut result: BTreeSet<String> = BTreeSet::new();
for edge in edges.iter() {
if start_nodes.contains(&edge.src) && !result.contains(&edge.dst){
result.insert(edge.dst.to_string());
}
if start_nodes.contains(&edge.dst) && !result.contains(&edge.src){
result.insert(edge.src.to_string());
}
}
result
}
pub fn shortest_path_length(edges: &[Edge], start: &str, end: &str) -> Option<u32> {
let mut count = 0;
let mut current_nodes: BTreeSet<String> = BTreeSet::new();
current_nodes.insert(start.to_string());
while !current_nodes.contains(end) {
count = count + 1;
current_nodes = nodes_reachable_from(edges, current_nodes);
if current_nodes.len() == 0 {
return None;
}
if count > 1000 {
return None;
}
}
Some(count - 2)
}
#[test]
fn test_shortest_path() {
let edges = read_from_string(
"COM)B
B)C
C)D
D)E
E)F
B)G
G)H
D)I
E)J
J)K
K)L
K)YOU
I)SAN").unwrap();
assert_eq!(shortest_path_length(&edges, "YOU", "SAN"), Some(4));
}
} |
use crate::automata::conversion::LTL2Automaton;
use crate::automata::{CoBuchiAutomaton, State};
use crate::specification::Specification;
use hyperltl::{HyperLTL, Op};
use smtlib::{IdentKind, Identifier, Instance, QuantKind, Sort, Term, TermKind};
use std::collections::HashMap;
use std::process;
pub(crate) struct BoSyEncoding<'a> {
specification: &'a Specification,
instance: Instance,
}
impl<'a> BoSyEncoding<'a> {
pub(crate) fn new(specification: &'a Specification) -> Self {
BoSyEncoding {
specification,
instance: Instance::new(),
}
}
pub(crate) fn solve(&mut self, bound: usize, bounds: &[usize], file_name: &str) {
let linear = HyperLTL::Appl(Op::Negation, vec![self.specification.ltl()]);
println!("build automaton");
let automaton = match LTL2Automaton::Spot.to_ucw(&linear) {
Err(err) => {
eprintln!("failed to convert LTL to automaton");
eprintln!("{}", err);
process::exit(1);
}
Ok(automaton) => automaton,
};
//println!("{:?}", automaton);
println!("create encoding");
let mut constraints = Instance::new();
// Representation of the transition
//let states = constraints.declare_sort("S", 0);
let (states, state_values) = constraints.declare_enum(
"S",
&(0..bound)
.into_iter()
.map(|i| format!("s_{}", i))
.collect::<Vec<String>>(),
);
//let initial = constraints.declare_const("s_0", &states);
let tau = constraints.declare_fun(
"tau",
&vec![
vec![&states],
vec![Sort::BOOL; self.specification.inputs.len()],
]
.into_iter()
.flatten()
.collect::<Vec<&Sort>>(),
&states,
);
let labels: Vec<Identifier> = self
.specification
.outputs
.iter()
.map(|o| {
constraints.declare_fun(
o,
&vec![
vec![&states],
vec![Sort::BOOL; self.specification.inputs.len()],
]
.into_iter()
.flatten()
.collect::<Vec<&Sort>>(),
Sort::BOOL,
)
})
.collect();
// Representation of the automaton
let (aut_state, aut_states) = constraints.declare_enum(
"Q",
&automaton
.states()
.iter()
.enumerate()
.map(|(i, s)| {
s.name
.as_ref()
.map(|s| s.to_string())
.unwrap_or_else(|| format!("q{}", i))
})
.collect::<Vec<String>>(),
);
// representation of run graph
let lambda = constraints.declare_fun("lambda_", &vec![&states, &aut_state], Sort::BOOL);
let lambda_sharp =
constraints.declare_fun("lambda_sharp", &vec![&states, &aut_state], Sort::INT);
for (state, ident) in automaton.states().iter().zip(&aut_states) {
if state.initial {
constraints.assert(Term::new_appl(
lambda.clone(),
vec![Term::new_ident(&state_values[0]), Term::new_ident(&ident)],
))
}
for (target, term) in automaton.outgoing(state) {
constraints.assert(self.build_transitions(
&states,
&lambda,
&lambda_sharp,
target,
term,
&ident,
&aut_states,
&labels,
&tau,
None,
None,
None,
&vec![],
&vec![],
))
}
}
if let Some(spec) = self.specification.hyper.as_ref() {
self.encode_hyper(
spec,
&mut constraints,
&states,
&state_values,
&labels,
&tau,
bound,
bounds,
);
}
use std::io::BufWriter;
use std::io::Write;
println!("write encoding to file");
let mut file = std::fs::File::create(file_name).expect("file creation failed");
let mut buf_writer = BufWriter::new(file);
writeln!(
buf_writer,
"(set-option :smt.ematching false)\n(set-option :smt.mbqi true)\n{}\n(check-sat)\n",
constraints
);
//println!("{}\n(check-sat)\n", constraints);
//unimplemented!();
}
fn build_transitions(
&self,
states: &Sort,
lambda: &Identifier,
lambda_sharp: &Identifier,
target: &State<Term>,
term: &Term,
ident: &Identifier,
aut_states: &[Identifier],
labels: &[Identifier],
tau: &Identifier,
path_vars: Option<&[&String]>,
univ_path_vars: Option<&[&String]>,
strat_states: Option<&[Sort]>,
strat_labels: &[(Vec<Identifier>, String, usize)],
strat_taus: &[(Identifier, usize)],
) -> Term {
//println!("target {:?}", target);
let mut forall_quant: Vec<(String, &Sort)> = Vec::new();
if let Some(path_vars) = path_vars {
let univ_path_vars = univ_path_vars.unwrap();
//println!("paths {:?} {:?}", path_vars, univ_path_vars);
// S and S'
forall_quant.extend(path_vars.iter().map(|pvar| (format!("s_{}", pvar), states)));
forall_quant.extend(
path_vars
.iter()
.map(|pvar| (format!("s_p_{}", pvar), states)),
);
// strat and strat'
let strat_states = strat_states.unwrap();
forall_quant.extend(
strat_states
.iter()
.enumerate()
.map(|(i, sort)| (format!("s_exists_{}", i), sort)),
);
forall_quant.extend(
strat_states
.iter()
.enumerate()
.map(|(i, sort)| (format!("s_exists_p_{}", i), sort)),
);
for path_var in univ_path_vars {
forall_quant.extend(
self.specification
.inputs
.iter()
.map(|i| (format!("{}_{}", i, path_var), Sort::BOOL)),
);
}
} else {
forall_quant.push(("s".to_string(), states));
forall_quant.push(("s_p".to_string(), states));
forall_quant.extend(
self.specification
.inputs
.iter()
.map(|i| (i.clone(), Sort::BOOL)),
);
}
let t = Term::new_quant(QuantKind::Forall, &forall_quant, |identifier| {
// Extract identifier for S, S', and inputs from closure argument
let mut in_out_map: HashMap<String, Term> = HashMap::new();
// `s`, `s_prime`, and `ins` are vectors corresponding to path variables
let s: Vec<Identifier>;
let s_prime: Vec<Identifier>;
let mut ins: Vec<Vec<Identifier>>;
let ins_appl: Vec<Vec<Term>>; // inputs used in the transition function of tau
let mut strat_s: Vec<Identifier> = Vec::new();
let mut strat_s_prime: Vec<Identifier> = Vec::new();
let mut strat_tau_equal: Term = Term::TRUE;
let mut identifier = Vec::from(identifier);
if let Some(path_vars) = path_vars {
let univ_path_vars = univ_path_vars.unwrap();
let strat_states = strat_states.unwrap();
let mut other: Vec<Identifier>;
// S
other = identifier.split_off(path_vars.len());
s = identifier;
identifier = other;
// S'
other = identifier.split_off(path_vars.len());
s_prime = identifier;
identifier = other;
// S-exists
other = identifier.split_off(strat_states.len());
strat_s = identifier;
identifier = other;
// S-exists-prime
other = identifier.split_off(strat_states.len());
strat_s_prime = identifier;
identifier = other;
// inputs
let global_ins = identifier.clone(); // stores all inputs in-order, used to apply to strat_tau and strat_label using slicing
ins = Vec::new();
assert!(identifier.len() >= self.specification.inputs.len());
while identifier.len() > self.specification.inputs.len() {
other = identifier.split_off(self.specification.inputs.len());
ins.push(identifier);
identifier = other;
}
ins.push(identifier);
assert_eq!(ins.len(), univ_path_vars.len());
// used to build `ins_appl`
let mut ins_appl_builder: HashMap<String, Vec<Term>> = HashMap::new();
// universally controlled path variables
for (path_var, inputs) in univ_path_vars.iter().zip(&ins) {
// build parameter
let path_ins_appl: Vec<Term> =
inputs.iter().map(|i| Term::new_ident(i)).collect();
ins_appl_builder.insert(path_var.to_string(), path_ins_appl);
// build replacer for transition function
for (i, ident) in self.specification.inputs.iter().zip(inputs) {
in_out_map.insert(
format!(
"{}",
HyperLTL::Prop(i.to_string(), Some(path_var.to_string()))
),
Term::new_ident(&ident),
);
}
}
// existentially controlled path variables
assert_eq!(strat_s.len(), strat_labels.len());
for ((in_labels, path_var, slice_length), curr_strat_s) in
strat_labels.iter().zip(&strat_s)
{
assert_eq!(in_labels.len(), self.specification.inputs.len());
let mut path_ins_appl: Vec<Term> = Vec::new();
for (i, in_label) in self.specification.inputs.iter().zip(in_labels) {
let mut in_label_args: Vec<Term> = vec![Term::new_ident(curr_strat_s)];
in_label_args.extend(
global_ins
.split_at(*slice_length)
.0
.iter()
.map(|ele| Term::new_ident(ele)),
);
in_out_map.insert(
format!("{}", HyperLTL::Prop(i.to_string(), Some(path_var.clone()))),
Term::new_appl(in_label.clone(), in_label_args.clone()),
);
path_ins_appl.push(Term::new_appl(in_label.clone(), in_label_args));
}
ins_appl_builder.insert(path_var.to_string(), path_ins_appl);
}
ins_appl = path_vars
.iter()
.map(|pvar| ins_appl_builder[*pvar].clone())
.collect();
assert_eq!(path_vars.len(), ins_appl.len());
assert_eq!(path_vars.len(), s.len());
// define labels of transition system
for ((path_var, inputs), s) in path_vars.iter().zip(&ins_appl).zip(&s) {
for (o, ident) in self.specification.outputs.iter().zip(labels) {
let mut label_appl: Vec<Term> = vec![Term::new_ident(&s)];
label_appl.extend(inputs.clone());
in_out_map.insert(
format!(
"{}",
HyperLTL::Prop(o.to_string(), Some(path_var.to_string()))
),
Term::new_appl(ident.clone(), label_appl),
);
}
}
// build the transition equality for existential path strategy
assert_eq!(strat_s.len(), strat_s_prime.len());
assert_eq!(strat_s.len(), strat_taus.len());
for ((current_strat_s, current_strat_s_p), (tau, slice_length)) in
strat_s.iter().zip(&strat_s_prime).zip(strat_taus)
{
let mut tau_args: Vec<Term> = vec![Term::new_ident(current_strat_s)];
tau_args.extend(
global_ins
.split_at(*slice_length)
.0
.iter()
.map(|ele| Term::new_ident(ele)),
);
let tau_next = Term::new_appl(tau.clone(), tau_args);
let tau_next_equal = Term::new_appl(
Identifier::EQ,
vec![tau_next, Term::new_ident(current_strat_s_p)],
);
strat_tau_equal = strat_tau_equal & tau_next_equal;
}
} else {
let mut other: Vec<Identifier>;
// S
other = identifier.split_off(1);
s = identifier;
identifier = other;
// S'
other = identifier.split_off(1);
s_prime = identifier;
identifier = other;
// inputs
ins_appl = vec![identifier.iter().map(|i| Term::new_ident(i)).collect()];
ins = vec![identifier];
for (o, ident) in self.specification.outputs.iter().zip(labels) {
let mut label_appl: Vec<Term> = vec![Term::new_ident(&s[0])];
label_appl.extend(ins[0].iter().map(|i| Term::new_ident(&i)));
in_out_map.insert(o.to_string(), Term::new_appl(ident.clone(), label_appl));
}
for (i, ident) in self.specification.inputs.iter().zip(&ins[0]) {
in_out_map.insert(i.to_string(), Term::new_ident(&ident));
}
}
let transformed = term.convert(&|t| match &t.kind {
TermKind::Ident(i) => match &i.kind {
IdentKind::Custom(decl) => {
if let Some(term) = in_out_map.get(decl.name()) {
Some(term.clone())
} else {
None
}
}
_ => None,
},
_ => None,
});
//println!("{}", transformed);
assert_eq!(s.len(), s_prime.len());
assert_eq!(s.len(), ins_appl.len());
let mut tau_next_constraint = Term::TRUE;
for ((current_s, current_s_p), inputs) in s.iter().zip(&s_prime).zip(&ins_appl) {
assert_eq!(inputs.len(), self.specification.inputs.len());
let mut tau_appl: Vec<Term> = vec![Term::new_ident(current_s)];
tau_appl.extend(inputs.iter().map(|i| i.clone()));
let tau_next = Term::new_appl(tau.clone(), tau_appl);
let tau_next_equal =
Term::new_appl(Identifier::EQ, vec![tau_next, Term::new_ident(current_s_p)]);
tau_next_constraint = tau_next_constraint & tau_next_equal;
}
let mut lambda_appl: Vec<Term> = s.iter().map(|s| Term::new_ident(&s)).collect();
lambda_appl.push(Term::new_ident(ident));
lambda_appl.extend(strat_s.iter().map(|s| Term::new_ident(&s)));
let lambda_current = Term::new_appl(lambda.clone(), lambda_appl);
Term::new_appl(
Identifier::IMPL,
vec![
lambda_current & transformed & tau_next_constraint & strat_tau_equal,
self.next_state(
lambda,
lambda_sharp,
&s,
&s_prime,
&strat_s,
&strat_s_prime,
ident,
&aut_states[target.id],
target.rejecting,
),
],
)
});
t
}
fn next_state(
&self,
lambda: &Identifier,
lambda_sharp: &Identifier,
s: &[Identifier],
s_prime: &[Identifier],
strat_s: &[Identifier],
strat_s_prime: &[Identifier],
source: &Identifier,
target: &Identifier,
rejecting: bool,
) -> Term {
let mut lambda_appl: Vec<Term> = s_prime.iter().map(|s| Term::new_ident(&s)).collect();
lambda_appl.push(Term::new_ident(target));
lambda_appl.extend(strat_s_prime.iter().map(|s| Term::new_ident(&s)));
let lambda_next_appl = lambda_appl.clone();
let l = Term::new_appl(lambda.clone(), lambda_appl);
let fun = if rejecting {
Identifier::LT
} else {
Identifier::LE
};
let mut lambda_curr_appl: Vec<Term> = s.iter().map(|s| Term::new_ident(&s)).collect();
lambda_curr_appl.push(Term::new_ident(source));
lambda_curr_appl.extend(strat_s.iter().map(|s| Term::new_ident(&s)));
let greater = Term::new_appl(
fun,
vec![
Term::new_appl(lambda_sharp.clone(), lambda_curr_appl),
Term::new_appl(lambda_sharp.clone(), lambda_next_appl),
],
);
l & greater
}
fn encode_hyper(
&self,
spec: &[HyperLTL],
constraints: &mut Instance,
states: &Sort,
state_values: &[Identifier],
labels: &[Identifier],
tau: &Identifier,
bound: usize,
bounds: &[usize],
) {
assert_eq!(
spec.len(),
bounds.len(),
"The bounds have to match the number of HyperLTL specifications"
);
for (i, hyper) in spec.iter().enumerate() {
//println!("{}", hyper);
//println!("{}", hyper.get_body());
let negated_hyper = HyperLTL::Appl(Op::Negation, vec![hyper.get_body().clone()]);
let automaton = match LTL2Automaton::Spot.to_ucw(&negated_hyper) {
Err(err) => {
eprintln!("failed to convert LTL to automaton");
eprintln!("{}", err);
process::exit(1);
}
Ok(automaton) => automaton,
};
// Representation of the automaton
let (aut_state, aut_states) = constraints.declare_enum(
&format!("Q_{}", i),
&automaton
.states()
.iter()
.enumerate()
.map(|(j, s)| {
s.name
.as_ref()
.map(|s| format!("q_{}_{}", i, s))
.unwrap_or_else(|| format!("q_{}_j", j))
})
.collect::<Vec<String>>(),
);
let quantifier = hyper.get_quantifier();
let path_vars: Vec<&String> = quantifier
.iter()
.map(|(_, params)| params)
.flatten()
.collect();
let num_quant = path_vars.len();
/*println!(
"num-quant {}, paths-vars {:?}, quant {:?}",
num_quant, path_vars, quantifier
);*/
// build the strategy for existential path quantifier
let mut universal_pvars: Vec<&String> = Vec::new(); // stores the dependent path quantifier, i.e., earlier bound
let mut strat_sorts: Vec<Sort> = Vec::new(); // stores the states of the strategies
let mut strat_initial: Vec<Identifier> = Vec::new(); // stores the initial states
let mut strat_labels: Vec<(Vec<Identifier>, String, usize)> = Vec::new(); // stores the labels representing inputs, second parameter is path_variable, third parameter number of univ path vars * inputs
let mut strat_taus: Vec<(Identifier, usize)> = Vec::new(); // stores the transition functions, second parameter number of univ path vars * inputs
for (j, (kind, param)) in quantifier.iter().enumerate() {
match kind {
hyperltl::QuantKind::Forall => universal_pvars.extend(param),
hyperltl::QuantKind::Exists => {
let (strat_sort, strat_states) = constraints.declare_enum(
&format!("S_{}_{}", i, j),
&(0..bounds[i])
.into_iter()
.map(|k| format!("s_{}_{}_{}", i, j, k))
.collect::<Vec<String>>(),
);
let tau = constraints.declare_fun(
&format!("tau_{}_{}", i, j),
&vec![
vec![&strat_sort],
vec![
Sort::BOOL;
self.specification.inputs.len() * universal_pvars.len()
],
]
.into_iter()
.flatten()
.collect::<Vec<&Sort>>(),
&strat_sort,
);
strat_taus
.push((tau, self.specification.inputs.len() * universal_pvars.len()));
for pvar in param {
let labels: Vec<Identifier> = self
.specification
.inputs
.iter()
.map(|inp| {
constraints.declare_fun(
&format!("out_{}_{}_{}_{}_", i, j, inp, pvar),
&vec![
vec![&strat_sort],
vec![
Sort::BOOL;
self.specification.inputs.len()
* universal_pvars.len()
],
]
.into_iter()
.flatten()
.collect::<Vec<&Sort>>(),
Sort::BOOL,
)
})
.collect();
strat_labels.push((
labels,
pvar.clone(),
self.specification.inputs.len() * universal_pvars.len(),
));
}
strat_sorts.push(strat_sort);
strat_initial.push(strat_states[0].clone());
}
}
}
let mut lambda_args: Vec<&Sort> = vec![&states; num_quant];
lambda_args.push(&aut_state);
lambda_args.extend(&strat_sorts);
let lambda =
constraints.declare_fun(&format!("lambda_{}", i), &lambda_args, Sort::BOOL);
let lambda_sharp =
constraints.declare_fun(&format!("lambda_{}_sharp", i), &lambda_args, Sort::INT);
for (state, ident) in automaton.states().iter().zip(&aut_states) {
if state.initial {
let mut lambda_init_args: Vec<Term> =
vec![Term::new_ident(&state_values[0]); num_quant];
lambda_init_args.push(Term::new_ident(&ident));
lambda_init_args.extend(strat_initial.iter().map(|s| Term::new_ident(s)));
constraints.assert(Term::new_appl(lambda.clone(), lambda_init_args))
}
for (target, term) in automaton.outgoing(state) {
constraints.assert(self.build_transitions(
&states,
&lambda,
&lambda_sharp,
target,
term,
&ident,
&aut_states,
&labels,
&tau,
Some(&path_vars),
Some(&universal_pvars),
Some(&strat_sorts),
&strat_labels,
&strat_taus,
))
}
}
}
}
}
|
use secp256k1::{Message, SecretKey, PublicKey, sign, Signature as Sig, RecoveryId, recover};
use hex::{FromHex, encode};
use k256::{
ecdsa::{
recoverable,
signature::{Signer, DigestSigner},
SigningKey, VerifyingKey,
Signature,
},
Secp256k1,
elliptic_curve::{FieldBytes, sec1::ToEncodedPoint},
};
use sha3::{Digest, Keccak256};
#[test]
fn sig_is_not_the_same() {
let mut priv_bytes = [0u8; 32];
hex::decode_to_slice("e6c09c13a38db68df81c12e599bc2a1b3cbf8f1c225b2c816fbb75bb5d30246a", &mut priv_bytes as &mut [u8]);
let mut data = [0u8; 32];
hex::decode_to_slice("bcf48d55045cc3f9add32a7d40e74758e50fbe01e75033bb257dec20ed2e6c27", &mut data as &mut [u8]);
let mut digest = Keccak256::new();
digest.update(data);
let secret_key = SecretKey::parse(&priv_bytes).unwrap();
let message = Message::parse(&digest.clone().finalize().into());
let (sig1, id) = sign(&message, &secret_key);
let r1 = hex::encode(&sig1.r.b32()[..]);
let s1 = hex::encode(&sig1.s.b32()[..]);
println!("r1: {:?}", r1);
println!("s1: {:?}", s1);
let signing_key = SigningKey::from_bytes(&priv_bytes[..]).unwrap();
let sig2: recoverable::Signature = signing_key.sign_digest(digest);
let r2: FieldBytes<Secp256k1> = sig2.r().into();
let s2: FieldBytes<Secp256k1> = sig2.s().into();
println!("r2 {:?}", hex::encode(r2));
println!("s2 {:?}", hex::encode(s2));
assert_eq!(r1, hex::encode(r2));
assert_eq!(s1, hex::encode(s2));
}
#[test]
fn check_recovery() {
let mut priv_bytes = [0u8; 32];
hex::decode_to_slice("e6c09c13a38db68df81c12e599bc2a1b3cbf8f1c225b2c816fbb75bb5d30246a", &mut priv_bytes as &mut [u8]);
let mut data = [0u8; 32];
hex::decode_to_slice("bcf48d55045cc3f9add32a7d40e74758e50fbe01e75033bb257dec20ed2e6c27", &mut data as &mut [u8]);
let mut digest = Keccak256::new();
digest.update(data);
let secret_key = SecretKey::parse(&priv_bytes).unwrap();
let message = Message::parse(&digest.clone().finalize().into());
let (sig1, id1) = sign(&message, &secret_key);
let pub_key = PublicKey::from_secret_key(&secret_key);
println!("pubKey orig {:?}", hex::encode(&pub_key.serialize()[..]));
let recovered_pub_key = recover(&message, &sig1, &id1).unwrap();
println!("pubKey recovered {:?}", hex::encode(&recovered_pub_key.serialize()[..]));
let sig2 = Signature::from_scalars(sig1.r.b32(), sig1.s.b32()).unwrap();
let id2 = recoverable::Id::new(id1.serialize()).unwrap();
let sig2rec = recoverable::Signature::new(&sig2, id2).unwrap();
let rec_pub_key = sig2rec.recover_verify_key_from_digest(digest).unwrap();
println!("pubkey2 recovered {:?}", hex::encode(&rec_pub_key.to_encoded_point(false).as_bytes()));
assert_eq!(&recovered_pub_key.serialize()[..], rec_pub_key.to_encoded_point(false).as_bytes());
}
|
use actix::prelude::*;
#[derive(Message)]
#[rtype(result = "()")]
pub struct Info {
pub info:
crate::participants::consumer_folder::consumer_structs::Info,
}
#[derive(Message)]
#[rtype(result = "()")]
pub struct PurchaseResult {
pub expense: f64,
pub balance: f64,
pub purchased: f64,
}
#[derive(Message)]
#[rtype(result = "()")]
pub struct TurnList {
pub list: Vec<(String, crate::participants::producer_folder::producer_structs::Participant)>,
}
|
//! The compile module is responsible for taking a `Pattern` and compiling it into a `StateGraph`
//! (as defined in `state`). It is recommended to optimize Patterns before compiling them.
//!
//! The output of `to_state()`, which is implemented for Pattern and some subtypes, is a
//! `StateGraph`, which itself is a vector of `State` objects, each of which in turn is a state of
//! the generated state machine. The states reference each other by their indices in the
//! `StateGraph`.
//!
//! `start_compile()` is the entry point and public API of this module.
use crate::matcher::{self, wrap_matcher};
use crate::repr::{AnchorLocation, Pattern, Repetition};
use crate::state::{State, StateGraph, StateRef, Submatch};
/// Types implementing Compile can be compiled into a state graph.
pub trait Compile {
/// to_state returns the start node of a subgraph, and a list of pointers that need to be
/// connected to the following subgraph. The list can contain the first tuple element.
fn to_state(&self, sg: &mut StateGraph) -> (StateRef, Vec<StateRef>);
}
/// start_compile takes a parsed regex as RETree and returns the first node of a directed graph
/// representing the regex.
pub fn start_compile(re: &Pattern) -> StateGraph {
let mut state_graph = Vec::with_capacity(64);
let mut before = State::default();
before.sub = Some(Submatch::Start);
// First element in graph vector.
let beforeref = 0;
state_graph.push(before);
let (s, sp) = re.to_state(&mut state_graph);
state_graph[beforeref].out = Some(s);
let mut end = State::default();
end.sub = Some(Submatch::End);
let endref = state_graph.len();
state_graph.push(end);
// Connect all loose ends with the final node.
for p in sp {
state_graph[p].patch(endref);
}
state_graph
}
impl Compile for Pattern {
fn to_state(&self, sg: &mut StateGraph) -> (StateRef, Vec<StateRef>) {
match *self {
Pattern::Concat(ref ps) => {
if ps.is_empty() {
panic!("invalid Concat len: 0")
} else if ps.len() == 1 {
return ps[0].to_state(sg);
}
let (init, mut lastp) = ps[0].to_state(sg);
for i in 1..ps.len() {
let (next, nextp) = ps[i].to_state(sg);
// Connect all loose ends with the new node.
for p in lastp {
sg[p].patch(next);
}
// Remember the loose ends of this one.
lastp = nextp;
}
(init, lastp)
}
Pattern::Any => {
let s = State {
out: None,
out1: None,
matcher: wrap_matcher(Box::new(matcher::AnyMatcher)),
sub: None,
};
let sref = sg.len();
sg.push(s);
(sref, vec![sref])
}
Pattern::Char(c) => {
let s = State {
out: None,
out1: None,
matcher: wrap_matcher(Box::new(matcher::CharMatcher(c))),
sub: None,
};
let sref = sg.len();
sg.push(s);
(sref, vec![sref])
}
Pattern::Str(ref s) => {
let s = State {
out: None,
out1: None,
matcher: wrap_matcher(Box::new(matcher::StringMatcher::new(s))),
sub: None,
};
let sref = sg.len();
sg.push(s);
(sref, vec![sref])
}
Pattern::CharRange(from, to) => {
let s = State {
out: None,
out1: None,
matcher: wrap_matcher(Box::new(matcher::CharRangeMatcher(from, to))),
sub: None,
};
let sref = sg.len();
sg.push(s);
(sref, vec![sref])
}
Pattern::CharSet(ref set) => {
let s = State {
out: None,
out1: None,
matcher: wrap_matcher(Box::new(matcher::CharSetMatcher(set.clone()))),
sub: None,
};
let sref = sg.len();
sg.push(s);
(sref, vec![sref])
}
Pattern::Alternate(ref r) => alternate(sg, &r, &vec![]),
Pattern::Submatch(ref p) => {
let (s, sp) = p.to_state(sg);
let before = State {
out: Some(s),
out1: None,
matcher: None,
sub: Some(Submatch::Start),
};
let after = State {
out: None,
out1: None,
matcher: None,
sub: Some(Submatch::End),
};
let beforeref = sg.len();
sg.push(before);
let afterref = sg.len();
sg.push(after);
for p in sp {
sg[p].patch(afterref);
}
(beforeref, vec![afterref])
}
Pattern::Repeated(ref p) => p.to_state(sg),
Pattern::Anchor(ref loc) => {
let mut m = matcher::AnchorMatcher::Begin;
match loc {
&AnchorLocation::End => m = matcher::AnchorMatcher::End,
_ => (),
};
let s = State {
out: None,
out1: None,
matcher: wrap_matcher(Box::new(m)),
sub: None,
};
let sref = sg.len();
sg.push(s);
(sref, vec![sref])
}
}
}
}
/// alternate compiles a list of patterns into a graph that accepts any one of the patterns.
fn alternate(
sg: &mut StateGraph,
ps: &[Pattern],
to_patch: &[StateRef],
) -> (StateRef, Vec<StateRef>) {
if ps.len() == 1 {
let (s, sp) = ps[0].to_state(sg);
for e in to_patch {
sg[*e].patch(s);
}
(s, sp)
} else {
let mut init = State {
out: None,
out1: None,
matcher: None,
sub: None,
};
let mid = ps.len() / 2;
let (left, mut leftpatch) = alternate(sg, &ps[..mid], &vec![]);
let (right, mut rightpatch) = alternate(sg, &ps[mid..], &vec![]);
init.patch(left);
init.patch(right);
leftpatch.append(&mut rightpatch);
let initref = sg.len();
sg.push(init);
(initref, leftpatch)
}
}
impl Compile for Repetition {
fn to_state(&self, sg: &mut StateGraph) -> (StateRef, Vec<StateRef>) {
match *self {
Repetition::ZeroOrOnce(ref p) => {
let (s, to_patch) = p.to_state(sg);
let after = State {
out: None,
out1: None,
matcher: None,
sub: None,
};
let afterref = sg.len();
sg.push(after);
let before = State {
out: Some(s),
out1: Some(afterref),
matcher: None,
sub: None,
};
let beforeref = sg.len();
sg.push(before);
for p in to_patch {
sg[p].patch(afterref);
}
(beforeref, vec![afterref])
}
Repetition::ZeroOrMore(ref p) => {
let (s, to_patch) = p.to_state(sg);
let before = State {
out: Some(s.clone()),
out1: None,
matcher: None,
sub: None,
};
let beforeref = sg.len();
sg.push(before);
let after = State {
out: Some(s.clone()),
out1: None,
matcher: None,
sub: None,
};
let afterref = sg.len();
sg.push(after);
sg[beforeref].patch(afterref);
for p in to_patch {
sg[p].patch(afterref);
}
(beforeref, vec![afterref])
}
Repetition::OnceOrMore(ref p) => {
let (s, to_patch) = p.to_state(sg);
let after = State {
out: Some(s.clone()),
out1: None,
matcher: None,
sub: None,
};
let afterref = sg.len();
sg.push(after);
for p in to_patch {
sg[p].patch(afterref);
}
(s, vec![afterref])
}
// Specific is 'min' concatenations of a simple state and 'max - min' concatenations of
// a ZeroOrOnce state.
Repetition::Specific(ref p, min, max_) => {
let cap = max_.unwrap_or(min) as usize;
assert!(cap >= min as usize);
let mut repetition = Vec::with_capacity(cap);
// Append the minimum required number of occurrences.
for _ in 0..min {
repetition.push(p.clone());
}
// If an upper limit is set, append max-min repetitions of ZeroOrOnce states for
// the repeated pattern.
if let Some(max) = max_ {
for _ in 0..(max - min) {
repetition.push(Pattern::Repeated(Box::new(Repetition::ZeroOrOnce(
p.clone(),
))));
}
} else {
// If no upper limit is set, append a ZeroOrMore state for the repeated
// pattern.
repetition.push(Pattern::Repeated(Box::new(Repetition::ZeroOrMore(
p.clone(),
))));
}
Pattern::Concat(repetition).to_state(sg)
}
}
}
}
|
use std::str;
use std::net;
use std::thread;
pub fn main() {
let udp_socket = net::UdpSocket::bind("127.0.0.1:3999")
.expect("Unable to bind to port");
let mut buff = [0; 1024];
loop {
let udp_socket_new = udp_socket.try_clone()
.expect("Unable to clone socket");
match udp_socket_new.recv_from(&mut buff) {
Ok((num_bytes, src_addr)) => {
thread::spawn(move || {
let send_buff = &mut buff[..num_bytes];
println!("Received from client: {}",
str::from_utf8(send_buff).unwrap());
let response_string = format!("Received this: {}",
String::from_utf8_lossy(send_buff));
udp_socket_new.send_to(&response_string.as_bytes(), &src_addr)
.expect("Error in sending datagram")
});
}
Err(e) => {
println!("Error in receiving datagrams over UDP: {}", e);
}
}
}
} |
/*
Copyright (c) 2023 Uber Technologies, Inc.
<p>Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
except in compliance with the License. You may obtain a copy of the License at
<p>http://www.apache.org/licenses/LICENSE-2.0
<p>Unless required by applicable law or agreed to in writing, software distributed under the
License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing permissions and
limitations under the License.
*/
#![allow(deprecated)] // This prevents cargo clippy throwing warning for deprecated use.
use models::{
edit::Edit, filter::Filter, matches::Match, outgoing_edges::OutgoingEdges,
piranha_arguments::PiranhaArguments, piranha_output::PiranhaOutputSummary, rule::Rule,
rule_graph::RuleGraph, source_code_unit::SourceCodeUnit,
};
#[macro_use]
extern crate lazy_static;
pub mod df;
pub mod models;
#[cfg(test)]
mod tests;
pub mod utilities;
use std::{collections::HashMap, fs::File, io::Write, path::PathBuf};
use itertools::Itertools;
use log::{debug, info};
use crate::models::rule_store::RuleStore;
use pyo3::prelude::{pyfunction, pymodule, wrap_pyfunction, PyModule, PyResult, Python};
use tempdir::TempDir;
#[pymodule]
fn polyglot_piranha(_py: Python<'_>, m: &PyModule) -> PyResult<()> {
pyo3_log::init();
m.add_function(wrap_pyfunction!(execute_piranha, m)?)?;
m.add_class::<PiranhaArguments>()?;
m.add_class::<PiranhaOutputSummary>()?;
m.add_class::<Edit>()?;
m.add_class::<Match>()?;
m.add_class::<RuleGraph>()?;
m.add_class::<Rule>()?;
m.add_class::<OutgoingEdges>()?;
m.add_class::<Filter>()?;
Ok(())
}
/// Executes piranha for the given `piranha_arguments`.
///
/// # Arguments:
/// * piranha_arguments: Piranha Arguments
///
/// Returns Piranha Output Summary for each file touched or analyzed by Piranha.
/// For each file, it reports its content after the rewrite, the list of matches and the list of rewrites.
#[pyfunction]
pub fn execute_piranha(piranha_arguments: &PiranhaArguments) -> Vec<PiranhaOutputSummary> {
info!("Executing Polyglot Piranha !!!");
let mut piranha = Piranha::new(piranha_arguments);
piranha.perform_cleanup();
let summaries = piranha
.get_updated_files()
.iter()
.map(PiranhaOutputSummary::new)
.collect_vec();
log_piranha_output_summaries(&summaries);
summaries
}
fn log_piranha_output_summaries(summaries: &Vec<PiranhaOutputSummary>) {
let mut total_number_of_matches: usize = 0;
let mut total_number_of_rewrites: usize = 0;
for summary in summaries {
let number_of_rewrites = &summary.rewrites().len();
let number_of_matches = &summary.matches().len();
info!("File : {:?}", &summary.path());
info!(" # Rewrites : {}", number_of_rewrites);
info!(" # Matches : {}", number_of_matches);
total_number_of_rewrites += number_of_rewrites;
total_number_of_matches += number_of_matches;
}
info!("Total files affected/matched {}", &summaries.len());
info!("Total number of matches {}", total_number_of_matches);
info!("Total number of rewrites {}", total_number_of_rewrites);
}
// Maintains the state of Piranha and the updated content of files in the source code.
struct Piranha {
// Maintains Piranha's state
rule_store: RuleStore,
// Files updated by Piranha.
relevant_files: HashMap<PathBuf, SourceCodeUnit>,
// Piranha Arguments
piranha_arguments: PiranhaArguments,
}
impl Piranha {
fn get_updated_files(&self) -> Vec<SourceCodeUnit> {
self
.relevant_files
.values()
.filter(|r| !r.matches().is_empty() || !r.rewrites().is_empty())
.cloned()
.collect_vec()
}
/// Performs cleanup related to stale flags
fn perform_cleanup(&mut self) {
// Setup the parser for the specific language
let piranha_args = &self.piranha_arguments;
let mut parser = piranha_args.language().parser();
let mut paths_to_codebase = self.piranha_arguments.paths_to_codebase().clone();
let temp_dir = if !self.piranha_arguments.code_snippet().is_empty() {
let td = self.write_code_snippet_to_temp();
paths_to_codebase = vec![td.path().to_str().unwrap_or_default().to_string()];
Some(td)
} else {
None
};
let mut current_global_substitutions = piranha_args.input_substitutions();
// Keep looping until new `global` rules are added.
loop {
let current_rules = self.rule_store.global_rules().clone();
debug!("\n # Global rules {}", current_rules.len());
// Iterate over each file containing the usage of the feature flag API
for (path, content) in self.rule_store.get_relevant_files(
&paths_to_codebase,
piranha_args.include(),
piranha_args.exclude(),
) {
// Get the `SourceCodeUnit` for the file `path` from the cache `relevant_files`.
// In case of miss, lazily insert a new `SourceCodeUnit`.
let source_code_unit = self
.relevant_files
.entry(path.to_path_buf())
.or_insert_with(|| {
SourceCodeUnit::new(
&mut parser,
content,
¤t_global_substitutions,
path.as_path(),
piranha_args,
)
});
// Apply the rules in this `SourceCodeUnit`
source_code_unit.apply_rules(&mut self.rule_store, ¤t_rules, &mut parser, None);
// Add the substitutions for the global tags to the `current_global_substitutions`
current_global_substitutions.extend(source_code_unit.global_substitutions());
// Break when a new `global` rule is added
if self.rule_store.global_rules().len() > current_rules.len() {
debug!("Found a new global rule. Will start scanning all the files again.");
break;
}
}
// If no new `global_rules` were added, break.
if self.rule_store.global_rules().len() == current_rules.len() {
break;
}
}
// Delete the temp dir inside which the input code snippet was copied
if let Some(t) = temp_dir {
_ = t.close();
} else {
let source_code_units = self.get_updated_files();
for scu in source_code_units.iter() {
scu.persist();
}
}
}
/// Instantiate Flag-cleaner
fn new(piranha_arguments: &PiranhaArguments) -> Self {
let graph_rule_store = RuleStore::new(piranha_arguments);
Self {
rule_store: graph_rule_store,
relevant_files: HashMap::new(),
piranha_arguments: piranha_arguments.clone(),
}
}
/// Write the input code snippet into a temp directory.
/// Returns: A temporary directory containing the created input code snippet as a file
/// This function panics if it finds that neither `code_snippet` nor `path_to_configuration` are provided
fn write_code_snippet_to_temp(&self) -> TempDir {
let temp_dir = TempDir::new_in(".", "tmp").unwrap();
let temp_dir_path = temp_dir.path();
let sample_file = temp_dir_path.join(format!(
"sample.{}",
self.piranha_arguments.language().extension()
));
let mut file = File::create(sample_file).unwrap();
file
.write_all(self.piranha_arguments.code_snippet().as_bytes())
.unwrap();
temp_dir
}
}
|
use util::*;
fn main() -> Result<(), Box<dyn std::error::Error>> {
let timer = Timer::new();
let input = input::matrix::<char>(&std::env::args().nth(1).unwrap(), "");
let mut count = 1;
let slopes = vec![(1, 1), (3, 1), (5, 1), (7, 1), (1, 2)];
for (right, down) in slopes {
count *= (0..input.len())
.step_by(down)
.zip((0..).step_by(right))
.filter(|&(i, j)| input[i][j % input[0].len()] == '#')
.count();
}
timer.print();
println!("{}", count);
Ok(())
}
|
use clap::{App, AppSettings, Arg, SubCommand};
use std::fs::{self, OpenOptions};
use std::io::{self, Read, Write};
use std::process;
use cbm::disk::directory::FileType;
use cbm::disk::file::{File, FileOps, Scheme};
use cbm::disk::geos::GEOSFile;
use cbm::disk::{self, D64, D71, D81, DiskType};
use cbm::Petscii;
// Possible exit codes
const _EXIT_SUCCESS: i32 = 0;
const EXIT_FAILURE: i32 = 1;
/// If a dash is specified for a filename, this indicates that the user wants
/// to read from standard input or write to standard output.
const STDINOUT_PSEUDOFILENAME: &str = "-";
fn main() {
// Parse command-line arguments
let app = App::new("Commodore Disk Image Utility")
.version(clap::crate_version!())
.about("Read, write, and understand D64/D71/D81 disk images.")
.setting(AppSettings::SubcommandRequiredElseHelp)
.arg(Arg::with_name("diskimage").required(true))
.subcommand(
SubCommand::with_name("bam")
.about("Block Availability Map (BAM) commands")
.setting(AppSettings::SubcommandRequiredElseHelp)
.subcommand(
SubCommand::with_name("show").about("Show the Block Availability Map (BAM)"),
)
.subcommand(
SubCommand::with_name("allocate")
.about("Mark block(s) as allocated in the BAM.")
.arg(
Arg::with_name("track")
.validator(optional_track_validator)
.required(true),
)
.arg(
Arg::with_name("sector")
.validator(optional_sector_validator)
.required(true),
),
)
.subcommand(
SubCommand::with_name("free")
.about("Mark block(s) as free in the BAM.")
.arg(
Arg::with_name("track")
.validator(optional_track_validator)
.required(true),
)
.arg(
Arg::with_name("sector")
.validator(optional_sector_validator)
.required(true),
),
),
)
.subcommand(
SubCommand::with_name("read")
.about("Read a file from a disk image.")
.arg(Arg::with_name("source_filename").required(true))
.arg(Arg::with_name("destination_filename").required(false)),
)
.subcommand(
SubCommand::with_name("write")
.about("Write a file to a disk image.")
.arg(
Arg::with_name("type")
.short("t")
.long("type")
.takes_value(true)
.possible_values(&["prg", "seq", "usr"])
.default_value("seq")
.help("CBM file type"),
)
.arg(Arg::with_name("source_filename").required(true))
.arg(Arg::with_name("destination_filename").required(false)),
)
.subcommand(
SubCommand::with_name("append")
.about("Append to a file.")
.arg(Arg::with_name("source_filename").required(true))
.arg(Arg::with_name("destination_filename").required(false)),
)
.subcommand(
SubCommand::with_name("geosread")
.about("Read a file.")
.arg(Arg::with_name("source_filename").required(true))
.arg(Arg::with_name("destination_filename").required(false)),
)
.subcommand(SubCommand::with_name("create").about("Create a blank disk image"))
.subcommand(
SubCommand::with_name("dir")
.about("Show a directory listing")
.arg(
Arg::with_name("verbose")
.short("v")
.long("verbose")
.multiple(true)
.help("Show more detail"),
),
)
.subcommand(
SubCommand::with_name("format")
.about("Format a disk image")
.arg(Arg::with_name("name").required(true))
.arg(Arg::with_name("id").required(true)),
)
.subcommand(
SubCommand::with_name("rename")
.about("Rename a file.")
.arg(Arg::with_name("original_filename").required(true))
.arg(Arg::with_name("new_filename").required(true)),
)
.subcommand(
SubCommand::with_name("delete")
.about("Delete (scratch) a file.")
.arg(Arg::with_name("filename").required(true)),
)
.subcommand(
SubCommand::with_name("dump")
.about("Provide a hex dump of a disk image or file.")
.arg(Arg::with_name("filename").required(false)),
)
.subcommand(SubCommand::with_name("validate").about("Validate a disk image."));
let mut app_clone = app.clone();
let matches = app.get_matches();
let diskimage = matches.value_of("diskimage").unwrap();
let result = match matches.subcommand() {
("bam", Some(m)) => match m.subcommand() {
("show", Some(_)) => cmd_bam_show(diskimage),
("allocate", Some(m)) => cmd_bam_edit(
diskimage,
optional_track_parser(m.value_of("track").unwrap().to_string()),
optional_sector_parser(m.value_of("sector").unwrap().to_string()),
true,
),
("free", Some(m)) => cmd_bam_edit(
diskimage,
optional_track_parser(m.value_of("track").unwrap().to_string()),
optional_sector_parser(m.value_of("sector").unwrap().to_string()),
false,
),
_ => {
app_clone.print_help().unwrap();
println!();
process::exit(EXIT_FAILURE);
}
},
("read", Some(m)) => cmd_read(
diskimage,
m.value_of("source_filename").unwrap(),
m.value_of("destination_filename"),
),
("write", Some(m)) => cmd_write(
diskimage,
m.value_of("source_filename").unwrap(),
m.value_of("destination_filename"),
m.value_of("type")
.and_then(FileType::from_string)
.unwrap_or(FileType::SEQ),
),
("append", Some(m)) => cmd_append(
diskimage,
m.value_of("source_filename").unwrap(),
m.value_of("destination_filename"),
),
("geosread", Some(m)) => cmd_geosread(
diskimage,
m.value_of("source_filename").unwrap(),
m.value_of("destination_filename"),
),
("create", Some(_)) => cmd_create(diskimage),
("dir", Some(m)) => cmd_dir(diskimage, m.occurrences_of("verbose")),
("format", Some(m)) => cmd_format(
diskimage,
m.value_of("name").unwrap(),
m.value_of("id").unwrap(),
),
("rename", Some(m)) => cmd_rename(
diskimage,
m.value_of("original_filename").unwrap(),
m.value_of("new_filename").unwrap(),
),
("delete", Some(m)) => cmd_delete(diskimage, m.value_of("filename").unwrap()),
("dump", Some(m)) => cmd_dump(diskimage, m.value_of("filename")),
("validate", Some(_)) => cmd_validate(diskimage),
_ => {
app_clone.print_help().unwrap();
println!();
process::exit(EXIT_FAILURE);
}
};
if let Err(e) = result {
eprintln!("Error: {}", e);
}
}
fn optional_u8_parser(v: String, min: u8, max: u8) -> Result<Option<u8>, ()> {
if v == "all" {
Ok(None)
} else {
match v.parse::<u8>() {
Ok(n) if n >= min && n <= max => Ok(Some(n)),
_ => Err(()),
}
}
}
fn optional_u8_validator(v: String, min: u8, max: u8) -> Result<(), String> {
match optional_u8_parser(v, min, max) {
Ok(_) => Ok(()),
Err(_) => Err(format!(
"Expected a value from {}-{}, or \"all\".",
min, max
)),
}
}
/// Require a track argument to be a number in the range 1-40 or "all".
fn optional_track_validator(v: String) -> Result<(), String> {
optional_u8_validator(v, 1, 40)
}
fn optional_track_parser(v: String) -> Option<u8> {
optional_u8_parser(v, 1, 40).unwrap()
}
/// Require a sector argument to be a number in the range 0-20 or "all".
fn optional_sector_validator(v: String) -> Result<(), String> {
optional_u8_validator(v, 0, 20)
}
fn optional_sector_parser(v: String) -> Option<u8> {
optional_u8_parser(v, 0, 20).unwrap()
}
/// A FillReader is simply a reader that supplies a certain number of the
/// provided fill byte.
struct FillReader {
fill_byte: u8,
size: usize,
}
impl FillReader {
fn new(fill_byte: u8, size: usize) -> FillReader {
FillReader { fill_byte, size }
}
}
impl Read for FillReader {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
let bytes_to_write = buf.len().min(self.size);
for b in &mut buf[..bytes_to_write] {
*b = self.fill_byte;
}
self.size -= bytes_to_write;
Ok(bytes_to_write)
}
}
fn cmd_bam_show(diskimage: &str) -> io::Result<()> {
let disk = disk::open(diskimage, false)?;
let bam = disk.bam()?;
print!("{:?}", bam.borrow());
Ok(())
}
fn cmd_bam_edit(
diskimage: &str,
track: Option<u8>,
sector: Option<u8>,
allocate: bool,
) -> io::Result<()> {
let disk = disk::open(diskimage, true)?;
let format = disk.disk_format()?.clone();
let bam = disk.bam()?;
let mut bam = bam.borrow_mut();
let track_range = match track {
Some(t) => t..=t,
None => 1..=35,
};
for track in track_range {
let entry = bam.entry_mut(track)?;
let sector_range = match sector {
Some(s) => s..=s,
None => 0..=(format.sectors_in_track(track) - 1),
};
for sector in sector_range {
if allocate {
entry.allocate(sector);
} else {
entry.free(sector);
}
}
}
// Write the updated BAM
bam.flush()
}
/// Open an existing CBM file from the specified disk image for reading.
fn open_cbm_file(diskimage: &str, filename: &str) -> io::Result<File> {
// Open the CBM file
let disk = disk::open(diskimage, false)?;
disk.open_file(&filename.into())
}
/// Open a file on a CBM disk image for reading.
fn open_cbm_reader(diskimage: &str, filename: &str) -> io::Result<Box<dyn Read>> {
Ok(Box::new(open_cbm_file(diskimage, filename)?.reader()?))
}
/// Open a file on a CBM disk image for appending.
fn open_cbm_appender(diskimage: &str, filename: &str) -> io::Result<Box<dyn Write>> {
Ok(Box::new(open_cbm_file(diskimage, filename)?.writer()?))
}
/// Open an existing GEOS file from the specified disk image for reading.
fn open_geos_file(diskimage: &str, filename: &str) -> io::Result<GEOSFile> {
// Open the GEOS file
let disk = disk::open(diskimage, false)?;
let file = disk.open_file(&filename.into())?;
let file = match file {
File::GEOSVLIR(f) => f,
File::GEOSSequential(f) => f,
_ => unreachable!(),
};
Ok(file)
}
/// Open a GEOS file for reading.
fn open_geos_reader(diskimage: &str, filename: &str) -> io::Result<Box<dyn Read>> {
Ok(Box::new(open_geos_file(diskimage, filename)?.reader()?))
}
/// Open a file on a CBM disk image for writing.
fn open_cbm_writer(diskimage: &str, filename: &str, file_type: FileType) -> io::Result<Box<dyn Write>> {
let mut disk = disk::open(diskimage, true)?;
let file = disk.create_file(&filename.into(), file_type, Scheme::Linear)?;
let file = match file {
File::Linear(f) => f,
_ => unreachable!(),
};
let writer = file.writer()?;
Ok(Box::new(writer))
}
/// Open a file for reading
fn open_fs_reader(filename: &str) -> io::Result<Box<dyn Read>> {
if filename == STDINOUT_PSEUDOFILENAME {
Ok(Box::new(io::stdin()))
} else {
Ok(Box::new(fs::File::open(filename)?))
}
}
/// Open a file for writing
fn open_fs_writer(filename: &str) -> io::Result<Box<dyn Write>> {
if filename == STDINOUT_PSEUDOFILENAME {
Ok(Box::new(io::stdout()))
} else {
Ok(Box::new(fs::File::create(filename)?))
}
}
fn cmd_read(
diskimage: &str,
source_filename: &str,
destination_filename: Option<&str>,
) -> io::Result<()> {
let destination_filename = destination_filename.unwrap_or(source_filename);
let mut reader = open_cbm_reader(diskimage, source_filename)?;
let mut writer = open_fs_writer(destination_filename)?;
io::copy(&mut reader, &mut writer)?;
writer.flush()
}
fn cmd_write(
diskimage: &str,
source_filename: &str,
destination_filename: Option<&str>,
file_type: FileType,
) -> io::Result<()> {
let destination_filename = destination_filename.unwrap_or(source_filename);
let mut reader = open_fs_reader(source_filename)?;
let mut writer = open_cbm_writer(diskimage, destination_filename, file_type)?;
io::copy(&mut reader, &mut writer)?;
writer.flush()
}
fn cmd_append(
diskimage: &str,
source_filename: &str,
destination_filename: Option<&str>,
) -> io::Result<()> {
let destination_filename = destination_filename.unwrap_or(source_filename);
let mut reader = open_fs_reader(source_filename)?;
let mut writer = open_cbm_appender(diskimage, destination_filename)?;
io::copy(&mut reader, &mut writer)?;
writer.flush()
}
fn cmd_geosread(
diskimage: &str,
source_filename: &str,
destination_filename: Option<&str>,
) -> io::Result<()> {
let destination_filename = destination_filename.unwrap_or(source_filename);
let mut reader = open_geos_reader(diskimage, source_filename)?;
let mut writer = open_fs_writer(destination_filename)?;
io::copy(&mut reader, &mut writer)?;
writer.flush()
}
fn cmd_create(diskimage: &str) -> io::Result<()> {
// Determine what kind of disk image to create based on the file extension.
let geometry = match DiskType::from_extension(diskimage) {
Some(DiskType::D64) => D64::geometry(false),
Some(DiskType::D71) => D71::geometry(false),
Some(DiskType::D81) => D81::geometry(false),
None => {
println!("Unknown file extension. Assuming D64...");
D64::geometry(false)
}
};
let mut file = OpenOptions::new()
.write(true)
.create_new(true)
.open(diskimage)?;
io::copy(&mut FillReader::new(0x00, geometry.size()), &mut file)?;
file.flush()
}
fn cmd_dir(diskimage: &str, verbosity: u64) -> io::Result<()> {
let disk = disk::open(diskimage, false)?;
println!("{}", disk);
for entry in disk.iter() {
if verbosity > 0 {
let entry = entry?;
println!("{:#}", entry);
if verbosity > 1 {
let file = disk.open_file(&entry.filename)?;
file.details(&mut io::stdout(), (verbosity as usize) - 2)?;
println!();
}
} else {
println!("{}", entry?);
}
}
let bam = disk.bam()?;
let bam = bam.borrow();
println!("{} blocks free.", bam.blocks_free());
Ok(())
}
fn cmd_format(diskimage: &str, name: &str, id: &str) -> io::Result<()> {
let id: Petscii = id.into();
let id_bytes: &[u8] = id.as_bytes();
let mut disk = disk::open(diskimage, true)?;
disk.write_format(&name.into(), &id_bytes.into())
}
fn cmd_rename(diskimage: &str, original_filename: &str, new_filename: &str) -> io::Result<()> {
let mut disk = disk::open(diskimage, true)?;
disk.rename(&original_filename.into(), &new_filename.into())
}
fn cmd_delete(diskimage: &str, filename: &str) -> io::Result<()> {
let disk = disk::open(diskimage, true)?;
let mut file = disk.open_file(&filename.into())?;
file.delete()
}
fn cmd_dump(diskimage: &str, filename: Option<&str>) -> io::Result<()> {
let mut disk = disk::open(diskimage, true)?;
match filename {
Some(filename) => {
let file = disk.open_file(&filename.into())?;
file.dump(&mut io::stdout())?;
}
None => disk.dump(&mut io::stdout())?,
}
io::stdout().flush()
}
fn cmd_validate(diskimage: &str) -> io::Result<()> {
let disk = disk::open(diskimage, true)?;
let errors = disk.validate()?;
for e in errors.iter() {
println!("{}", e);
}
if errors.is_empty() {
println!("Disk validates successfully.");
Ok(())
} else {
Err(io::Error::new(
io::ErrorKind::Other,
format!("{} errors found during validation.", errors.len()),
))
}
}
|
use serde::{Serialize, Deserialize};
#[derive(Serialize, Deserialize, Debug)]
pub struct PpuData {
line_cycle: usize,
scanline: usize,
frame: usize,
v: u16,
t: u16,
x: u8,
w: u8,
nametable_a: Vec<u8>,
nametable_b: Vec<u8>,
nametable_c: Vec<u8>,
nametable_d: Vec<u8>,
palette_ram: Vec<u8>,
background_pattern_sr_low: u16,
background_pattern_sr_high: u16,
nametable_byte: u8,
attribute_table_byte: u8,
low_pattern_table_byte: u8,
high_pattern_table_byte: u8,
background_palette_sr_low: u8,
background_palette_sr_high: u8,
background_palette_latch: u8,
primary_oam: Vec<u8>,
secondary_oam: Vec<u8>,
sprite_attribute_latches: Vec<u8>,
sprite_counters: Vec<u8>,
sprite_indexes: Vec<u8>,
sprite_pattern_table_srs: Vec<(u8, u8)>,
num_sprites: usize,
address_increment: u16,
sprite_pattern_table_base: usize,
background_pattern_table_base:usize,
oam_address: usize,
sprite_size: u8,
grayscale: bool,
show_background_left: bool,
show_sprites_left: bool,
show_background: bool,
show_sprites: bool,
emphasize_red: bool,
emphasize_green: bool,
emphasize_blue: bool,
sprite_overflow: bool,
sprite_zero_hit: bool,
should_generate_nmi: bool,
vertical_blank: bool,
trigger_nmi: bool,
previous_nmi: bool,
nmi_delay: usize,
read_buffer: u8,
recent_bits: u8,
previous_a12: u8,
}
impl super::Ppu {
pub fn save_state(&self) -> PpuData {
PpuData{
line_cycle: self.line_cycle,
scanline: self.scanline,
frame: self.frame,
v: self.v,
t: self.t,
x: self.x,
w: self.w,
nametable_a: self.nametable_a.clone(),
nametable_b: self.nametable_b.clone(),
nametable_c: self.nametable_c.clone(),
nametable_d: self.nametable_d.clone(),
palette_ram: self.palette_ram.clone(),
background_pattern_sr_low: self.background_pattern_sr_low,
background_pattern_sr_high: self.background_pattern_sr_high,
nametable_byte: self.nametable_byte,
attribute_table_byte: self.attribute_table_byte,
low_pattern_table_byte: self.low_pattern_table_byte,
high_pattern_table_byte: self.high_pattern_table_byte,
background_palette_sr_low: self.background_palette_sr_low,
background_palette_sr_high: self.background_palette_sr_high,
background_palette_latch: self.background_palette_latch,
primary_oam: self.primary_oam.clone(),
secondary_oam: self.secondary_oam.clone(),
sprite_attribute_latches: self.sprite_attribute_latches.clone(),
sprite_counters: self.sprite_counters.clone(),
sprite_indexes: self.sprite_indexes.clone(),
sprite_pattern_table_srs: self.sprite_pattern_table_srs.clone(),
num_sprites: self.num_sprites,
address_increment: self.address_increment,
sprite_pattern_table_base: self.sprite_pattern_table_base,
background_pattern_table_base: self.background_pattern_table_base,
oam_address: self.oam_address,
sprite_size: self.sprite_size,
grayscale: self.grayscale,
show_background_left: self.show_background_left,
show_sprites_left: self.show_sprites_left,
show_background: self.show_background,
show_sprites: self.show_sprites,
emphasize_red: self.emphasize_red,
emphasize_green: self.emphasize_green,
emphasize_blue: self.emphasize_blue,
sprite_overflow: self.sprite_overflow,
sprite_zero_hit: self.sprite_zero_hit,
should_generate_nmi: self.should_generate_nmi,
vertical_blank: self.vertical_blank,
trigger_nmi: self.trigger_nmi,
previous_nmi: self.previous_nmi,
nmi_delay: self.nmi_delay,
read_buffer: self.read_buffer,
recent_bits: self.recent_bits,
previous_a12: self.previous_a12,
}
}
pub fn load_state(&mut self, data: PpuData) {
self.line_cycle = data.line_cycle;
self.scanline = data.scanline;
self.frame = data.frame;
self.v = data.v;
self.t = data.t;
self.x = data.x;
self.w = data.w;
self.nametable_a = data.nametable_a;
self.nametable_b = data.nametable_b;
self.nametable_c = data.nametable_c;
self.nametable_d = data.nametable_d;
self.palette_ram = data.palette_ram;
self.background_pattern_sr_low = data.background_pattern_sr_low;
self.background_pattern_sr_high = data.background_pattern_sr_high;
self.nametable_byte = data.nametable_byte;
self.attribute_table_byte = data.attribute_table_byte;
self.low_pattern_table_byte = data.low_pattern_table_byte;
self.high_pattern_table_byte = data.high_pattern_table_byte;
self.background_palette_sr_low = data.background_palette_sr_low;
self.background_palette_sr_high = data.background_palette_sr_high;
self.background_palette_latch = data.background_palette_latch;
self.primary_oam = data.primary_oam;
self.secondary_oam = data.secondary_oam;
self.sprite_attribute_latches = data.sprite_attribute_latches;
self.sprite_counters = data.sprite_counters;
self.sprite_indexes = data.sprite_indexes;
self.sprite_pattern_table_srs = data.sprite_pattern_table_srs;
self.num_sprites = data.num_sprites;
self.address_increment = data.address_increment;
self.sprite_pattern_table_base = data.sprite_pattern_table_base;
self.background_pattern_table_base = data.background_pattern_table_base;
self.oam_address = data.oam_address;
self.sprite_size = data.sprite_size;
self.grayscale = data.grayscale;
self.show_background_left = data.show_background_left;
self.show_sprites_left = data.show_sprites_left;
self.show_background = data.show_background;
self.show_sprites = data.show_sprites;
self.emphasize_red = data.emphasize_red;
self.emphasize_green = data.emphasize_green;
self.emphasize_blue = data.emphasize_blue;
self.sprite_overflow = data.sprite_overflow;
self.sprite_zero_hit = data.sprite_zero_hit;
self.should_generate_nmi = data.should_generate_nmi;
self.vertical_blank = data.vertical_blank;
self.trigger_nmi = data.trigger_nmi;
self.previous_nmi = data.previous_nmi;
self.nmi_delay = data.nmi_delay;
self.read_buffer = data.read_buffer;
self.recent_bits = data.recent_bits;
self.previous_a12 = data.previous_a12;
}
}
|
//! This module corresponds to `mach/mach_host.h`
use kern_return::kern_return_t;
use mach_types::{host_t, clock_serv_t};
use clock_types::clock_id_t;
extern "C" {
pub fn host_get_clock_service(host: host_t, clock_id: clock_id_t, clock_serv: *mut clock_serv_t) -> kern_return_t;
}
|
/*
* Slack Web API
*
* One way to interact with the Slack platform is its HTTP RPC-based Web API, a collection of methods requiring OAuth 2.0-based user, bot, or workspace tokens blessed with related OAuth scopes.
*
* The version of the OpenAPI document: 1.7.0
*
* Generated by: https://openapi-generator.tech
*/
use reqwest;
use crate::apis::ResponseContent;
use super::{Error, configuration};
/// struct for typed errors of method `admin_conversations_archive`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum AdminConversationsArchiveError {
DefaultResponse(::std::collections::HashMap<String, serde_json::Value>),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `admin_conversations_convert_to_private`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum AdminConversationsConvertToPrivateError {
DefaultResponse(::std::collections::HashMap<String, serde_json::Value>),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `admin_conversations_create`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum AdminConversationsCreateError {
DefaultResponse(::std::collections::HashMap<String, serde_json::Value>),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `admin_conversations_delete`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum AdminConversationsDeleteError {
DefaultResponse(::std::collections::HashMap<String, serde_json::Value>),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `admin_conversations_disconnect_shared`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum AdminConversationsDisconnectSharedError {
DefaultResponse(::std::collections::HashMap<String, serde_json::Value>),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `admin_conversations_get_conversation_prefs`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum AdminConversationsGetConversationPrefsError {
DefaultResponse(::std::collections::HashMap<String, serde_json::Value>),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `admin_conversations_get_teams`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum AdminConversationsGetTeamsError {
DefaultResponse(::std::collections::HashMap<String, serde_json::Value>),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `admin_conversations_invite`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum AdminConversationsInviteError {
DefaultResponse(::std::collections::HashMap<String, serde_json::Value>),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `admin_conversations_rename`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum AdminConversationsRenameError {
DefaultResponse(::std::collections::HashMap<String, serde_json::Value>),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `admin_conversations_search`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum AdminConversationsSearchError {
DefaultResponse(::std::collections::HashMap<String, serde_json::Value>),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `admin_conversations_set_conversation_prefs`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum AdminConversationsSetConversationPrefsError {
DefaultResponse(::std::collections::HashMap<String, serde_json::Value>),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `admin_conversations_set_teams`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum AdminConversationsSetTeamsError {
DefaultResponse(::std::collections::HashMap<String, serde_json::Value>),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `admin_conversations_unarchive`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum AdminConversationsUnarchiveError {
DefaultResponse(::std::collections::HashMap<String, serde_json::Value>),
UnknownValue(serde_json::Value),
}
/// Archive a public or private channel.
pub async fn admin_conversations_archive(configuration: &configuration::Configuration, token: &str, channel_id: &str) -> Result<::std::collections::HashMap<String, serde_json::Value>, Error<AdminConversationsArchiveError>> {
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/admin.conversations.archive", configuration.base_path);
let mut local_var_req_builder = local_var_client.post(local_var_uri_str.as_str());
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
local_var_req_builder = local_var_req_builder.header("token", token.to_string());
if let Some(ref local_var_token) = configuration.oauth_access_token {
local_var_req_builder = local_var_req_builder.bearer_auth(local_var_token.to_owned());
};
let mut local_var_form_params = std::collections::HashMap::new();
local_var_form_params.insert("channel_id", channel_id.to_string());
local_var_req_builder = local_var_req_builder.form(&local_var_form_params);
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<AdminConversationsArchiveError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// Convert a public channel to a private channel.
pub async fn admin_conversations_convert_to_private(configuration: &configuration::Configuration, token: &str, channel_id: &str) -> Result<::std::collections::HashMap<String, serde_json::Value>, Error<AdminConversationsConvertToPrivateError>> {
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/admin.conversations.convertToPrivate", configuration.base_path);
let mut local_var_req_builder = local_var_client.post(local_var_uri_str.as_str());
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
local_var_req_builder = local_var_req_builder.header("token", token.to_string());
if let Some(ref local_var_token) = configuration.oauth_access_token {
local_var_req_builder = local_var_req_builder.bearer_auth(local_var_token.to_owned());
};
let mut local_var_form_params = std::collections::HashMap::new();
local_var_form_params.insert("channel_id", channel_id.to_string());
local_var_req_builder = local_var_req_builder.form(&local_var_form_params);
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<AdminConversationsConvertToPrivateError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// Create a public or private channel-based conversation.
pub async fn admin_conversations_create(configuration: &configuration::Configuration, token: &str, name: &str, is_private: bool, description: Option<&str>, org_wide: Option<bool>, team_id: Option<&str>) -> Result<::std::collections::HashMap<String, serde_json::Value>, Error<AdminConversationsCreateError>> {
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/admin.conversations.create", configuration.base_path);
let mut local_var_req_builder = local_var_client.post(local_var_uri_str.as_str());
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
local_var_req_builder = local_var_req_builder.header("token", token.to_string());
if let Some(ref local_var_token) = configuration.oauth_access_token {
local_var_req_builder = local_var_req_builder.bearer_auth(local_var_token.to_owned());
};
let mut local_var_form_params = std::collections::HashMap::new();
local_var_form_params.insert("name", name.to_string());
if let Some(local_var_param_value) = description {
local_var_form_params.insert("description", local_var_param_value.to_string());
}
local_var_form_params.insert("is_private", is_private.to_string());
if let Some(local_var_param_value) = org_wide {
local_var_form_params.insert("org_wide", local_var_param_value.to_string());
}
if let Some(local_var_param_value) = team_id {
local_var_form_params.insert("team_id", local_var_param_value.to_string());
}
local_var_req_builder = local_var_req_builder.form(&local_var_form_params);
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<AdminConversationsCreateError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// Delete a public or private channel.
pub async fn admin_conversations_delete(configuration: &configuration::Configuration, token: &str, channel_id: &str) -> Result<::std::collections::HashMap<String, serde_json::Value>, Error<AdminConversationsDeleteError>> {
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/admin.conversations.delete", configuration.base_path);
let mut local_var_req_builder = local_var_client.post(local_var_uri_str.as_str());
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
local_var_req_builder = local_var_req_builder.header("token", token.to_string());
if let Some(ref local_var_token) = configuration.oauth_access_token {
local_var_req_builder = local_var_req_builder.bearer_auth(local_var_token.to_owned());
};
let mut local_var_form_params = std::collections::HashMap::new();
local_var_form_params.insert("channel_id", channel_id.to_string());
local_var_req_builder = local_var_req_builder.form(&local_var_form_params);
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<AdminConversationsDeleteError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// Disconnect a connected channel from one or more workspaces.
pub async fn admin_conversations_disconnect_shared(configuration: &configuration::Configuration, token: &str, channel_id: &str, leaving_team_ids: Option<&str>) -> Result<::std::collections::HashMap<String, serde_json::Value>, Error<AdminConversationsDisconnectSharedError>> {
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/admin.conversations.disconnectShared", configuration.base_path);
let mut local_var_req_builder = local_var_client.post(local_var_uri_str.as_str());
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
local_var_req_builder = local_var_req_builder.header("token", token.to_string());
if let Some(ref local_var_token) = configuration.oauth_access_token {
local_var_req_builder = local_var_req_builder.bearer_auth(local_var_token.to_owned());
};
let mut local_var_form_params = std::collections::HashMap::new();
local_var_form_params.insert("channel_id", channel_id.to_string());
if let Some(local_var_param_value) = leaving_team_ids {
local_var_form_params.insert("leaving_team_ids", local_var_param_value.to_string());
}
local_var_req_builder = local_var_req_builder.form(&local_var_form_params);
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<AdminConversationsDisconnectSharedError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// Get conversation preferences for a public or private channel.
pub async fn admin_conversations_get_conversation_prefs(configuration: &configuration::Configuration, token: &str, channel_id: &str) -> Result<::std::collections::HashMap<String, serde_json::Value>, Error<AdminConversationsGetConversationPrefsError>> {
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/admin.conversations.getConversationPrefs", configuration.base_path);
let mut local_var_req_builder = local_var_client.get(local_var_uri_str.as_str());
local_var_req_builder = local_var_req_builder.query(&[("channel_id", &channel_id.to_string())]);
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
local_var_req_builder = local_var_req_builder.header("token", token.to_string());
if let Some(ref local_var_token) = configuration.oauth_access_token {
local_var_req_builder = local_var_req_builder.bearer_auth(local_var_token.to_owned());
};
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<AdminConversationsGetConversationPrefsError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// Get all the workspaces a given public or private channel is connected to within this Enterprise org.
pub async fn admin_conversations_get_teams(configuration: &configuration::Configuration, token: &str, channel_id: &str, cursor: Option<&str>, limit: Option<i32>) -> Result<::std::collections::HashMap<String, serde_json::Value>, Error<AdminConversationsGetTeamsError>> {
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/admin.conversations.getTeams", configuration.base_path);
let mut local_var_req_builder = local_var_client.get(local_var_uri_str.as_str());
local_var_req_builder = local_var_req_builder.query(&[("channel_id", &channel_id.to_string())]);
if let Some(ref local_var_str) = cursor {
local_var_req_builder = local_var_req_builder.query(&[("cursor", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = limit {
local_var_req_builder = local_var_req_builder.query(&[("limit", &local_var_str.to_string())]);
}
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
local_var_req_builder = local_var_req_builder.header("token", token.to_string());
if let Some(ref local_var_token) = configuration.oauth_access_token {
local_var_req_builder = local_var_req_builder.bearer_auth(local_var_token.to_owned());
};
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<AdminConversationsGetTeamsError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// Invite a user to a public or private channel.
pub async fn admin_conversations_invite(configuration: &configuration::Configuration, token: &str, user_ids: &str, channel_id: &str) -> Result<::std::collections::HashMap<String, serde_json::Value>, Error<AdminConversationsInviteError>> {
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/admin.conversations.invite", configuration.base_path);
let mut local_var_req_builder = local_var_client.post(local_var_uri_str.as_str());
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
local_var_req_builder = local_var_req_builder.header("token", token.to_string());
if let Some(ref local_var_token) = configuration.oauth_access_token {
local_var_req_builder = local_var_req_builder.bearer_auth(local_var_token.to_owned());
};
let mut local_var_form_params = std::collections::HashMap::new();
local_var_form_params.insert("user_ids", user_ids.to_string());
local_var_form_params.insert("channel_id", channel_id.to_string());
local_var_req_builder = local_var_req_builder.form(&local_var_form_params);
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<AdminConversationsInviteError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// Rename a public or private channel.
pub async fn admin_conversations_rename(configuration: &configuration::Configuration, token: &str, channel_id: &str, name: &str) -> Result<::std::collections::HashMap<String, serde_json::Value>, Error<AdminConversationsRenameError>> {
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/admin.conversations.rename", configuration.base_path);
let mut local_var_req_builder = local_var_client.post(local_var_uri_str.as_str());
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
local_var_req_builder = local_var_req_builder.header("token", token.to_string());
if let Some(ref local_var_token) = configuration.oauth_access_token {
local_var_req_builder = local_var_req_builder.bearer_auth(local_var_token.to_owned());
};
let mut local_var_form_params = std::collections::HashMap::new();
local_var_form_params.insert("channel_id", channel_id.to_string());
local_var_form_params.insert("name", name.to_string());
local_var_req_builder = local_var_req_builder.form(&local_var_form_params);
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<AdminConversationsRenameError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// Search for public or private channels in an Enterprise organization.
pub async fn admin_conversations_search(configuration: &configuration::Configuration, token: &str, team_ids: Option<&str>, query: Option<&str>, limit: Option<i32>, cursor: Option<&str>, search_channel_types: Option<&str>, sort: Option<&str>, sort_dir: Option<&str>) -> Result<::std::collections::HashMap<String, serde_json::Value>, Error<AdminConversationsSearchError>> {
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/admin.conversations.search", configuration.base_path);
let mut local_var_req_builder = local_var_client.get(local_var_uri_str.as_str());
if let Some(ref local_var_str) = team_ids {
local_var_req_builder = local_var_req_builder.query(&[("team_ids", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = query {
local_var_req_builder = local_var_req_builder.query(&[("query", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = limit {
local_var_req_builder = local_var_req_builder.query(&[("limit", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = cursor {
local_var_req_builder = local_var_req_builder.query(&[("cursor", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = search_channel_types {
local_var_req_builder = local_var_req_builder.query(&[("search_channel_types", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = sort {
local_var_req_builder = local_var_req_builder.query(&[("sort", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = sort_dir {
local_var_req_builder = local_var_req_builder.query(&[("sort_dir", &local_var_str.to_string())]);
}
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
local_var_req_builder = local_var_req_builder.header("token", token.to_string());
if let Some(ref local_var_token) = configuration.oauth_access_token {
local_var_req_builder = local_var_req_builder.bearer_auth(local_var_token.to_owned());
};
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<AdminConversationsSearchError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// Set the posting permissions for a public or private channel.
pub async fn admin_conversations_set_conversation_prefs(configuration: &configuration::Configuration, token: &str, channel_id: &str, prefs: &str) -> Result<::std::collections::HashMap<String, serde_json::Value>, Error<AdminConversationsSetConversationPrefsError>> {
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/admin.conversations.setConversationPrefs", configuration.base_path);
let mut local_var_req_builder = local_var_client.post(local_var_uri_str.as_str());
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
local_var_req_builder = local_var_req_builder.header("token", token.to_string());
if let Some(ref local_var_token) = configuration.oauth_access_token {
local_var_req_builder = local_var_req_builder.bearer_auth(local_var_token.to_owned());
};
let mut local_var_form_params = std::collections::HashMap::new();
local_var_form_params.insert("channel_id", channel_id.to_string());
local_var_form_params.insert("prefs", prefs.to_string());
local_var_req_builder = local_var_req_builder.form(&local_var_form_params);
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<AdminConversationsSetConversationPrefsError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// Set the workspaces in an Enterprise grid org that connect to a public or private channel.
pub async fn admin_conversations_set_teams(configuration: &configuration::Configuration, token: &str, channel_id: &str, team_id: Option<&str>, target_team_ids: Option<&str>, org_channel: Option<bool>) -> Result<::std::collections::HashMap<String, serde_json::Value>, Error<AdminConversationsSetTeamsError>> {
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/admin.conversations.setTeams", configuration.base_path);
let mut local_var_req_builder = local_var_client.post(local_var_uri_str.as_str());
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
local_var_req_builder = local_var_req_builder.header("token", token.to_string());
if let Some(ref local_var_token) = configuration.oauth_access_token {
local_var_req_builder = local_var_req_builder.bearer_auth(local_var_token.to_owned());
};
let mut local_var_form_params = std::collections::HashMap::new();
local_var_form_params.insert("channel_id", channel_id.to_string());
if let Some(local_var_param_value) = team_id {
local_var_form_params.insert("team_id", local_var_param_value.to_string());
}
if let Some(local_var_param_value) = target_team_ids {
local_var_form_params.insert("target_team_ids", local_var_param_value.to_string());
}
if let Some(local_var_param_value) = org_channel {
local_var_form_params.insert("org_channel", local_var_param_value.to_string());
}
local_var_req_builder = local_var_req_builder.form(&local_var_form_params);
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<AdminConversationsSetTeamsError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// Unarchive a public or private channel.
pub async fn admin_conversations_unarchive(configuration: &configuration::Configuration, token: &str, channel_id: &str) -> Result<::std::collections::HashMap<String, serde_json::Value>, Error<AdminConversationsUnarchiveError>> {
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/admin.conversations.unarchive", configuration.base_path);
let mut local_var_req_builder = local_var_client.post(local_var_uri_str.as_str());
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
local_var_req_builder = local_var_req_builder.header("token", token.to_string());
if let Some(ref local_var_token) = configuration.oauth_access_token {
local_var_req_builder = local_var_req_builder.bearer_auth(local_var_token.to_owned());
};
let mut local_var_form_params = std::collections::HashMap::new();
local_var_form_params.insert("channel_id", channel_id.to_string());
local_var_req_builder = local_var_req_builder.form(&local_var_form_params);
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<AdminConversationsUnarchiveError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
|
use sack::{SackType, SackStorable, TokenLike, SackBacker};
/// This is a sack that can decay
pub trait Decayer<'a, C1: 'a, C2: 'a, C3: 'a, D1: 'a, D2: 'a, D3: 'a, B1: 'a, B2: 'a, B3: 'a, T1: 'a, T2: 'a, T3: 'a>
where C1: SackStorable,
C2: SackStorable,
C3: SackStorable,
D1: SackStorable,
D2: SackStorable,
D3: SackStorable,
T1: TokenLike,
T2: TokenLike,
T3: TokenLike,
B1: SackBacker,
B2: SackBacker,
B3: SackBacker
{
fn decay(&mut self) -> (&'a SackType<C2, D2, B2>, &'a SackType<C3, D3, B3>);
}
|
pub mod signal;
pub mod graph; |
use crate::{bgp_type::AutonomousSystemNumber, error::ConvertBytesToBgpMessageError};
use std::collections::BTreeSet;
use std::net::Ipv4Addr;
use std::path::Path;
use bytes::{BytesMut, BufMut};
use anyhow::Context;
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
pub enum PathAttribute {
Origin(Origin),
AsPath(AsPath),
NextHop(Ipv4Addr),
DontKnow(Vec<u8>),
}
impl PathAttribute {
pub fn bytes_len(&self) -> usize {
let path_attribute_value_length = match self {
PathAttribute::Origin(o) => 1,
PathAttribute::AsPath(a) => a.bytes_len(),
PathAttribute::NextHop(_) => 4,
PathAttribute::DontKnow(v) => v.len(),
};
let length = path_attribute_value_length + 2;
if path_attribute_value_length > 255 {
length + 2
} else {
length + 1
}
}
pub fn from_u8_slice(bytes: &[u8],) -> Result<Vec<PathAttribute>, ConvertBytesToBgpMessageError> {
let mut path_attributes = vec![];
let mut i = 0;
while bytes.len() > i {
let attribute_flag = bytes[i];
let attribute_length_octets = ((attribute_flag & 0b00010000) >> 4) + 1;
let attribute_type_code = bytes[i+1];
let attribute_length = if attribute_length_octets == 1 {
bytes[i+2] as usize
} else {
u16::from_be_bytes(bytes[i+2..i+4].try_into().context("aa")?) as usize
};
let attribute_start_index = i + 1 + attribute_length_octets as usize + 1;
let attribute_end_index = attribute_start_index + attribute_length;
let path_attribute = match attribute_type_code {
1 => PathAttribute::Origin(Origin::try_from(bytes[attribute_start_index])?),
2 => PathAttribute::AsPath(AsPath::try_from(&bytes[attribute_start_index..attribute_end_index])?),
3 => {
let addr = Ipv4Addr::new(
bytes[attribute_start_index],
bytes[attribute_start_index + 1],
bytes[attribute_start_index + 2],
bytes[attribute_start_index + 3],
);
PathAttribute::NextHop(addr)
},
_ => PathAttribute::DontKnow(bytes[i..attribute_end_index].to_owned()),
};
path_attributes.push(path_attribute);
i = attribute_end_index;
}
Ok(path_attributes)
}
}
impl From<&PathAttribute> for BytesMut {
fn from(p: &PathAttribute) -> Self {
let mut bytes = BytesMut::new();
match p {
PathAttribute::Origin(o) => {
let attribute_flag = 0b01000000;
let attribute_type_code = 1;
let attribute_length = 1;
let attribute = match o {
Origin::Igp => 0,
Origin::Egp => 1,
Origin::Incomplete => 2,
};
bytes.put_u8(attribute_flag);
bytes.put_u8(attribute_type_code);
bytes.put_u8(attribute_length);
bytes.put_u8(attribute);
},
PathAttribute::AsPath(p) => {
let mut attribute_flag = 0b01000000;
let attribute_type_code = 2;
let attribute_length = p.bytes_len() as u16;
let mut attribute_length_bytes = BytesMut::new();
if attribute_length < 256 {
attribute_length_bytes.put_u8(attribute_length as u8);
} else {
attribute_flag += 0b00010000;
attribute_length_bytes.put_u16(attribute_length);
}
let attribute = BytesMut::from(p);
bytes.put_u8(attribute_flag);
bytes.put_u8(attribute_type_code);
bytes.put(attribute_length_bytes);
bytes.put(attribute);
},
PathAttribute::NextHop(i) => {
let mut attribute_flag = 0b01000000;
let attribute_type_code = 3;
let attribute_length = 4;
let attribute = i.octets();
bytes.put_u8(attribute_flag);
bytes.put_u8(attribute_type_code);
bytes.put_u8(attribute_length);
bytes.put(&attribute[..]);
},
PathAttribute::DontKnow(v) => bytes.put(&v[..]),
}
bytes
}
}
#[derive(Debug, PartialEq, Eq, Clone, Hash, Copy)]
pub enum Origin {
Igp,
Egp,
Incomplete,
}
impl TryFrom<u8> for Origin {
type Error = anyhow::Error;
fn try_from(value: u8) -> Result<Self, Self::Error> {
match value {
0 => Ok(Origin::Igp),
1 => Ok(Origin::Egp),
2 => Ok(Origin::Incomplete),
_ => Err(anyhow::anyhow!(format!("value: {} cannot convert to Origin", value))),
}
}
}
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
pub enum AsPath {
AsSequence(Vec<AutonomousSystemNumber>),
AsSet(BTreeSet<AutonomousSystemNumber>),
}
impl AsPath {
pub fn bytes_len(&self) -> usize {
let as_bytes_length = match self {
AsPath::AsSequence(v) => 2 * v.len(),
AsPath::AsSet(s) => 2 * s.len(),
};
1 + 1 + as_bytes_length
}
pub fn does_contain(&self, as_path: AutonomousSystemNumber) -> bool {
match self {
AsPath::AsSequence(seq) => seq.contains(&as_path),
AsPath::AsSet(set) => set.contains(&as_path),
}
}
pub fn push(&mut self, as_path: AutonomousSystemNumber) {
match self {
AsPath::AsSequence(seq) => seq.push(as_path),
AsPath::AsSet(set) => {set.insert(as_path);},
}
}
}
impl From<&AsPath> for BytesMut {
fn from(as_path: &AsPath) -> Self {
match as_path {
AsPath::AsSet(s) => {
let mut bytes = BytesMut::new();
let path_segment_type = 1;
let number_of_ases = s.len();
bytes.put_u8(path_segment_type);
bytes.put_u8(number_of_ases as u8);
bytes.put(
&s.iter()
.flat_map(|a| u16::from(*a).to_be_bytes())
.collect::<Vec<u8>>()[..],
);
bytes
},
AsPath::AsSequence(s) => {
let mut bytes = BytesMut::new();
let path_segment_type = 2;
let number_of_ases = s.len();
bytes.put_u8(path_segment_type);
bytes.put_u8(number_of_ases as u8);
bytes.put(
&s.iter()
.flat_map(|a| u16::from(*a).to_be_bytes())
.collect::<Vec<u8>>()[..],
);
bytes
},
}
}
}
impl TryFrom<&[u8]> for AsPath {
type Error = anyhow::Error;
fn try_from(value: &[u8]) -> Result<Self, Self::Error> {
match value[0] {
1 => {
let mut ases = BTreeSet::new();
let mut i = 2;
while i < value.len() {
ases.insert(u16::from_be_bytes(
value[i..i+2].try_into()?
).into());
i += 2;
}
Ok(AsPath::AsSet(ases))
},
2 => {
let mut ases = vec![];
let mut i = 2;
while i < value.len() {
ases.push(u16::from_be_bytes(
value[i..i+2].try_into()?
).into());
i += 2;
}
Ok(AsPath::AsSequence((ases)))
},
_ => Err(anyhow::anyhow!(format!("value: {:?} cannot convert to AsPath", &value)))
}
}
} |
#[cfg(feature = "std")]
use crate::grid::config::{ColoredConfig, VerticalLine as VLine};
use super::Line;
/// A horizontal split line which can be used to set a border.
#[cfg_attr(not(feature = "std"), allow(dead_code))]
#[derive(Debug, Clone)]
pub struct VerticalLine {
pub(crate) index: usize,
pub(crate) line: Line,
}
impl VerticalLine {
/// Creates a new horizontal split line.
pub const fn new(index: usize, line: Line) -> Self {
Self { index, line }
}
/// Sets a horizontal character.
pub const fn main(mut self, c: Option<char>) -> Self {
self.line.main = c;
self
}
/// Sets a vertical intersection character.
pub const fn intersection(mut self, c: Option<char>) -> Self {
self.line.intersection = c;
self
}
/// Sets a top character.
pub const fn top(mut self, c: Option<char>) -> Self {
self.line.connector1 = c;
self
}
/// Sets a bottom character.
pub const fn bottom(mut self, c: Option<char>) -> Self {
self.line.connector2 = c;
self
}
}
#[cfg(feature = "std")]
impl<R, D> crate::settings::TableOption<R, D, ColoredConfig> for VerticalLine {
fn change(self, _: &mut R, cfg: &mut ColoredConfig, _: &mut D) {
cfg.insert_vertical_line(self.index, VLine::from(self.line));
}
}
|
use ::core::core_msg::ToCoreThreadMsg;
use ::file::file_msg::{FileThreadId, ToFileThreadMsg, SaveResult};
use ::file::text::{Line, Point};
use std::fs::File;
use std::io::{Read, Write};
use std::path::PathBuf;
use std::sync::mpsc::{Sender, Receiver};
use std::thread;
/// A file thread represents one open file. It contains all the information about the data within
/// that file and listens for messages to manipulate the data within the file.
pub struct FileThread {
id: FileThreadId,
core_sender: Sender<ToCoreThreadMsg>,
core_receiver: Receiver<ToFileThreadMsg>,
data: Vec<Line>,
path: Option<PathBuf>,
}
impl FileThread {
/// Call to open a new file thread. The `path` parameter is the `Path` to the file to edit.
/// If `path` is `None`, an empty, untitled file is opened.
/// If the file at the path does not exist, the file is created when the file is saved.
pub fn start(id: FileThreadId,
path: Option<PathBuf>,
sender: Sender<ToCoreThreadMsg>,
receiver: Receiver<ToFileThreadMsg>) {
thread::spawn(move || {
println!("Spawning file thread.");
let mut file_thread = FileThread {
id: id,
core_sender: sender,
core_receiver: receiver,
data: Vec::new(),
path: path,
};
file_thread.load_file();
if file_thread.data.is_empty() {
file_thread.data.push(Line::new("".into()));
}
file_thread.run();
});
}
/// Runs the event loop for the `FileThread`
pub fn run(&mut self) {
while let Ok(msg) = self.core_receiver.recv() {
match msg {
ToFileThreadMsg::ReplaceText(begin, end, text) =>
self.handle_replace_text(begin, end, text),
ToFileThreadMsg::ClearAllText =>
self.handle_clear_all_text(),
ToFileThreadMsg::Save(sender) =>
self.handle_save(sender),
}
}
}
fn load_file(&mut self) {
if let Some(ref path) = self.path {
let path = path.as_path();
if !path.exists() || !path.is_file() {
return warn!("Illegal path, cannot load file.");
}
// TODO(Connor): Handle file opening failure.
let mut file = File::open(path).unwrap();
let mut data = String::new();
file.read_to_string(&mut data);
for line in data.lines() {
self.data.push(Line::new(line.into()));
}
info!("Loaded file from path: {:?}", path);
}
}
fn handle_replace_text(&mut self, begin: Point, end: Option<Point>, text: String) {
info!("Replacing text between {:?} and {:?} with {:?}", begin, end, text);
let end = match end {
Some(end) => end,
None => begin,
};
let lines = {
let before_text = &self.data[begin.line][..begin.index];
let after_text = &self.data[end.line][end.index..];
let text = format!("{}{}{}", before_text, text, after_text);
let lines: Vec<Line> = text.lines().map(|line| Line::new(line.into())).collect();
lines
};
self.data.drain(begin.line..end.line);
let mut line_index = begin.line;
for line in &lines {
// TODO(Connor): Remove this clone somehow.
self.data.insert(line_index, line.clone());
line_index += 1;
}
self.data = lines;
}
fn handle_clear_all_text(&mut self) {
self.data.clear();
self.data.push(Line::new("".into()));
}
fn handle_save(&self, sender: Sender<SaveResult>) {
match self.path {
Some(ref path) => {
info!("Saving file at path: {:?}", path);
let path = path.as_path();
let mut file = if path.exists() && path.is_file() {
// TODO(Connor): Handle file opening failure.
File::create(path).unwrap()
} else {
// TODO(Connor): Handle file opening failure.
File::create(path).unwrap()
};
let mut data = String::new();
for line in &self.data {
data.push_str(&line.text);
data.push('\n');
}
// TODO(Connor): Handle file writing failure.
file.write_all(data.as_bytes()).expect("Failed to write.");
},
None => {
info!("Could not save file. Path is not set.");
let _ = sender.send(SaveResult::PromptForPath);
},
}
}
}
|
// Copyright 2014-2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(never_type)]
#![warn(clippy::diverging_sub_expression)]
#![allow(clippy::match_same_arms, clippy::logic_bug)]
#[allow(clippy::empty_loop)]
fn diverge() -> ! {
loop {}
}
struct A;
impl A {
fn foo(&self) -> ! {
diverge()
}
}
#[allow(unused_variables, clippy::unnecessary_operation, clippy::short_circuit_statement)]
fn main() {
let b = true;
b || diverge();
b || A.foo();
}
#[allow(dead_code, unused_variables)]
fn foobar() {
loop {
let x = match 5 {
4 => return,
5 => continue,
6 => true || return,
7 => true || continue,
8 => break,
9 => diverge(),
3 => true || diverge(),
10 => match 42 {
99 => return,
_ => true || panic!("boo"),
},
_ => true || break,
};
}
}
|
// Copyright 2021 The Matrix.org Foundation C.I.C.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::collections::HashSet;
use serde::Serialize;
use thiserror::Error;
use url::Url;
use crate::config::OAuth2ClientConfig;
#[derive(Debug, Clone, Serialize)]
pub struct Client {
pub client_id: String,
redirect_uris: Option<HashSet<Url>>,
}
#[derive(Debug, Error)]
#[error("Could not find client")]
pub struct ClientLookupError;
#[derive(Debug, Error)]
#[error("Invalid redirect URI")]
pub struct InvalidRedirectUriError;
impl Client {
pub fn resolve_redirect_uri<'a>(
&'a self,
suggested_uri: &'a Option<Url>,
) -> Result<&'a Url, InvalidRedirectUriError> {
match (suggested_uri, &self.redirect_uris) {
(None, None) => Err(InvalidRedirectUriError),
(None, Some(redirect_uris)) => {
redirect_uris.iter().next().ok_or(InvalidRedirectUriError)
}
(Some(suggested_uri), None) => Ok(suggested_uri),
(Some(suggested_uri), Some(redirect_uris)) => {
if redirect_uris.contains(&suggested_uri) {
Ok(suggested_uri)
} else {
Err(InvalidRedirectUriError)
}
}
}
}
}
impl<T> super::Storage<T> {
pub fn with_static_clients(mut self, clients: &[OAuth2ClientConfig]) -> Self {
let storage = &mut self.clients;
for config in clients {
let redirect_uris = config
.redirect_uris
.as_ref()
.map(|uris| uris.iter().cloned().collect());
let client_id = config.client_id.clone();
let client = Client {
client_id: client_id.clone(),
redirect_uris,
};
// TODO: we could warn about duplicate clients here
storage.insert(client_id, client);
}
self
}
pub async fn lookup_client(&self, client_id: &str) -> Result<Client, ClientLookupError> {
self.clients
.get(client_id)
.cloned()
.ok_or(ClientLookupError)
}
}
|
use day10::part_1;
fn main() {
let result1 = part_1();
println!("The number of asteroids detected in the best position is {}", result1);
}
|
use std::{cmp::Reverse, sync::Arc};
use chrono::{Duration, Utc};
use eyre::Report;
use hashbrown::HashMap;
use rosu_v2::prelude::{GameMode, OsuError, Username};
use crate::{
commands::{
check_user_mention,
osu::{get_user, UserArgs},
},
database::OsuData,
embeds::{EmbedData, SnipedDiffEmbed},
pagination::{Pagination, SnipedDiffPagination},
util::{
constants::{GENERAL_ISSUE, HUISMETBENEN_ISSUE, OSU_API_ISSUE},
numbers, MessageExt,
},
BotResult, CommandData, Context, MessageBuilder,
};
pub(super) async fn _sniped_diff(
ctx: Arc<Context>,
data: CommandData<'_>,
diff: Difference,
name: Option<Username>,
) -> BotResult<()> {
let name = match name {
Some(name) => name,
None => return super::require_link(&ctx, &data).await,
};
// Request the user
let user_args = UserArgs::new(name.as_str(), GameMode::STD);
let mut user = match get_user(&ctx, &user_args).await {
Ok(user) => user,
Err(OsuError::NotFound) => {
let content = format!("Could not find user `{name}`");
return data.error(&ctx, content).await;
}
Err(why) => {
let _ = data.error(&ctx, OSU_API_ISSUE).await;
return Err(why.into());
}
};
// Overwrite default mode
user.mode = GameMode::STD;
if !ctx.contains_country(user.country_code.as_str()) {
let content = format!(
"`{}`'s country {} is not supported :(",
user.username, user.country_code
);
return data.error(&ctx, content).await;
}
let client = &ctx.clients.custom;
let now = Utc::now();
let week_ago = now - Duration::weeks(1);
// Request the scores
let scores_fut = match diff {
Difference::Gain => client.get_national_snipes(&user, true, week_ago, now),
Difference::Loss => client.get_national_snipes(&user, false, week_ago, now),
};
let mut scores = match scores_fut.await {
Ok(scores) => scores,
Err(why) => {
let _ = data.error(&ctx, HUISMETBENEN_ISSUE).await;
return Err(why.into());
}
};
if scores.is_empty() {
let content = format!(
"`{name}` didn't {diff} national #1s in the last week.",
name = user.username,
diff = match diff {
Difference::Gain => "gain any new",
Difference::Loss => "lose any",
}
);
let builder = MessageBuilder::new().embed(content);
data.create_message(&ctx, builder).await?;
return Ok(());
}
scores.sort_unstable_by_key(|s| Reverse(s.date));
let pages = numbers::div_euclid(5, scores.len());
let mut maps = HashMap::new();
let data_fut = SnipedDiffEmbed::new(&user, diff, &scores, 0, (1, pages), &mut maps, &ctx);
let builder = match data_fut.await {
Ok(data) => data.into_builder().build().into(),
Err(why) => {
let _ = data.error(&ctx, GENERAL_ISSUE).await;
return Err(why);
}
};
// Creating the embed
let response_raw = data.create_message(&ctx, builder).await?;
// Skip pagination if too few entries
if scores.len() <= 5 {
return Ok(());
}
let response = response_raw.model().await?;
// Pagination
let pagination =
SnipedDiffPagination::new(response, user, diff, scores, maps, Arc::clone(&ctx));
let owner = data.author()?.id;
tokio::spawn(async move {
if let Err(err) = pagination.start(&ctx, owner, 60).await {
warn!("{:?}", Report::new(err));
}
});
Ok(())
}
#[command]
#[short_desc("Display a user's recently acquired national #1 scores")]
#[long_desc(
"Display a user's national #1 scores that they acquired within the last week.\n\
All data originates from [Mr Helix](https://osu.ppy.sh/users/2330619)'s \
website [huismetbenen](https://snipe.huismetbenen.nl/)."
)]
#[usage("[username]")]
#[example("badewanne3")]
#[aliases("sg", "snipegain", "snipesgain")]
#[bucket("snipe")]
async fn snipedgain(ctx: Arc<Context>, data: CommandData) -> BotResult<()> {
match data {
CommandData::Message { msg, mut args, num } => {
let name = match args.next() {
Some(arg) => match check_user_mention(&ctx, arg).await {
Ok(Ok(osu)) => Some(osu.into_username()),
Ok(Err(content)) => return msg.error(&ctx, content).await,
Err(why) => {
let _ = msg.error(&ctx, GENERAL_ISSUE).await;
return Err(why);
}
},
None => match ctx.psql().get_user_osu(msg.author.id).await {
Ok(osu) => osu.map(OsuData::into_username),
Err(why) => {
let _ = msg.error(&ctx, GENERAL_ISSUE).await;
return Err(why);
}
},
};
let data = CommandData::Message { msg, args, num };
_sniped_diff(ctx, data, Difference::Gain, name).await
}
CommandData::Interaction { command } => super::slash_snipe(ctx, *command).await,
}
}
#[command]
#[short_desc("Display a user's recently lost national #1 scores")]
#[long_desc(
"Display a user's national #1 scores that they lost within the last week.\n\
All data originates from [Mr Helix](https://osu.ppy.sh/users/2330619)'s \
website [huismetbenen](https://snipe.huismetbenen.nl/)."
)]
#[usage("[username]")]
#[example("badewanne3")]
#[aliases(
"sl",
"snipeloss",
"snipesloss",
"snipedlost",
"snipelost",
"snipeslost"
)]
#[bucket("snipe")]
async fn snipedloss(ctx: Arc<Context>, data: CommandData) -> BotResult<()> {
match data {
CommandData::Message { msg, mut args, num } => {
let name = match args.next() {
Some(arg) => match check_user_mention(&ctx, arg).await {
Ok(Ok(osu)) => Some(osu.into_username()),
Ok(Err(content)) => return msg.error(&ctx, content).await,
Err(why) => {
let _ = msg.error(&ctx, GENERAL_ISSUE).await;
return Err(why);
}
},
None => match ctx.psql().get_user_osu(msg.author.id).await {
Ok(osu) => osu.map(OsuData::into_username),
Err(why) => {
let _ = msg.error(&ctx, GENERAL_ISSUE).await;
return Err(why);
}
},
};
let data = CommandData::Message { msg, args, num };
_sniped_diff(ctx, data, Difference::Loss, name).await
}
CommandData::Interaction { command } => super::slash_snipe(ctx, *command).await,
}
}
#[derive(Copy, Clone)]
pub enum Difference {
Gain,
Loss,
}
|
fn bar(x: &[u64; 1]) -> &u64 {
&x[0]
}
fn main() {}
/*
thread 'rustc' panicked at 'internal error: entered unreachable code: __t4 := builtin$havoc_ref()', prusti-viper/src/encoder/foldunfold/mod.rs:455:26
stack backtrace:
0: rust_begin_unwind
at /rustc/8007b506ac5da629f223b755f5a5391edd5f6d01/library/std/src/panicking.rs:517:5
1: core::panicking::panic_fmt
at /rustc/8007b506ac5da629f223b755f5a5391edd5f6d01/library/core/src/panicking.rs:93:14
2: <core::iter::adapters::map::Map<I,F> as core::iter::traits::iterator::Iterator>::fold
3: prusti_viper::encoder::foldunfold::process_expire_borrows::<impl prusti_viper::encoder::foldunfold::FoldUnfold>::process_expire_borrows
4: <prusti_viper::encoder::foldunfold::FoldUnfold as vir::legacy::cfg::visitor::CfgReplacer<prusti_viper::encoder::foldunfold::path_ctxt::PathCtxt,prusti_viper::encoder::foldunfold::ActionVec>>::replace_stmt
5: <prusti_viper::encoder::foldunfold::FoldUnfold as vir::legacy::cfg::visitor::CfgReplacer<prusti_viper::encoder::foldunfold::path_ctxt::PathCtxt,prusti_viper::encoder::foldunfold::ActionVec>>::replace_stmt
6: prusti_viper::encoder::foldunfold::add_fold_unfold
7: prusti_viper::encoder::procedure_encoder::ProcedureEncoder::encode
8: prusti_viper::encoder::encoder::Encoder::encode_procedure
9: prusti_viper::encoder::encoder::Encoder::process_encoding_queue
10: prusti_viper::verifier::Verifier::verify
11: prusti_driver::verifier::verify
12: <prusti_driver::callbacks::PrustiCompilerCalls as rustc_driver::Callbacks>::after_analysis
13: rustc_interface::queries::<impl rustc_interface::interface::Compiler>::enter
14: rustc_span::with_source_map
15: rustc_interface::interface::create_compiler_and_run
16: scoped_tls::ScopedKey<T>::set
*/
|
#[doc = "Register `BSEC_OTP_CONFIG` reader"]
pub type R = crate::R<BSEC_OTP_CONFIG_SPEC>;
#[doc = "Register `BSEC_OTP_CONFIG` writer"]
pub type W = crate::W<BSEC_OTP_CONFIG_SPEC>;
#[doc = "Field `PWRUP` reader - PWRUP"]
pub type PWRUP_R = crate::BitReader;
#[doc = "Field `PWRUP` writer - PWRUP"]
pub type PWRUP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `FRC` reader - FRC"]
pub type FRC_R = crate::FieldReader;
#[doc = "Field `FRC` writer - FRC"]
pub type FRC_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>;
#[doc = "Field `PRGWIDTH` reader - PRGWIDTH"]
pub type PRGWIDTH_R = crate::FieldReader;
#[doc = "Field `PRGWIDTH` writer - PRGWIDTH"]
pub type PRGWIDTH_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>;
#[doc = "Field `TREAD` reader - TREAD"]
pub type TREAD_R = crate::FieldReader;
#[doc = "Field `TREAD` writer - TREAD"]
pub type TREAD_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>;
impl R {
#[doc = "Bit 0 - PWRUP"]
#[inline(always)]
pub fn pwrup(&self) -> PWRUP_R {
PWRUP_R::new((self.bits & 1) != 0)
}
#[doc = "Bits 1:2 - FRC"]
#[inline(always)]
pub fn frc(&self) -> FRC_R {
FRC_R::new(((self.bits >> 1) & 3) as u8)
}
#[doc = "Bits 3:6 - PRGWIDTH"]
#[inline(always)]
pub fn prgwidth(&self) -> PRGWIDTH_R {
PRGWIDTH_R::new(((self.bits >> 3) & 0x0f) as u8)
}
#[doc = "Bits 7:8 - TREAD"]
#[inline(always)]
pub fn tread(&self) -> TREAD_R {
TREAD_R::new(((self.bits >> 7) & 3) as u8)
}
}
impl W {
#[doc = "Bit 0 - PWRUP"]
#[inline(always)]
#[must_use]
pub fn pwrup(&mut self) -> PWRUP_W<BSEC_OTP_CONFIG_SPEC, 0> {
PWRUP_W::new(self)
}
#[doc = "Bits 1:2 - FRC"]
#[inline(always)]
#[must_use]
pub fn frc(&mut self) -> FRC_W<BSEC_OTP_CONFIG_SPEC, 1> {
FRC_W::new(self)
}
#[doc = "Bits 3:6 - PRGWIDTH"]
#[inline(always)]
#[must_use]
pub fn prgwidth(&mut self) -> PRGWIDTH_W<BSEC_OTP_CONFIG_SPEC, 3> {
PRGWIDTH_W::new(self)
}
#[doc = "Bits 7:8 - TREAD"]
#[inline(always)]
#[must_use]
pub fn tread(&mut self) -> TREAD_W<BSEC_OTP_CONFIG_SPEC, 7> {
TREAD_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "BSEC OTP configuration register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`bsec_otp_config::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`bsec_otp_config::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct BSEC_OTP_CONFIG_SPEC;
impl crate::RegisterSpec for BSEC_OTP_CONFIG_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`bsec_otp_config::R`](R) reader structure"]
impl crate::Readable for BSEC_OTP_CONFIG_SPEC {}
#[doc = "`write(|w| ..)` method takes [`bsec_otp_config::W`](W) writer structure"]
impl crate::Writable for BSEC_OTP_CONFIG_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets BSEC_OTP_CONFIG to value 0x0e"]
impl crate::Resettable for BSEC_OTP_CONFIG_SPEC {
const RESET_VALUE: Self::Ux = 0x0e;
}
|
use nu_protocol::ast::{Call, Expr, Expression, ImportPatternMember};
use nu_protocol::engine::{Command, EngineState, Stack};
use nu_protocol::{
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Value,
};
#[derive(Clone)]
pub struct Hide;
impl Command for Hide {
fn name(&self) -> &str {
"hide"
}
fn signature(&self) -> nu_protocol::Signature {
Signature::build("hide")
.required("pattern", SyntaxShape::ImportPattern, "import pattern")
.category(Category::Core)
}
fn usage(&self) -> &str {
"Hide definitions in the current scope"
}
fn extra_usage(&self) -> &str {
r#"Definitions are hidden by priority: First aliases, then custom commands.
This command is a parser keyword. For details, check:
https://www.nushell.sh/book/thinking_in_nu.html"#
}
fn is_parser_keyword(&self) -> bool {
true
}
fn run(
&self,
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
_input: PipelineData,
) -> Result<nu_protocol::PipelineData, nu_protocol::ShellError> {
let import_pattern = if let Some(Expression {
expr: Expr::ImportPattern(pat),
..
}) = call.positional_nth(0)
{
pat
} else {
return Err(ShellError::GenericError(
"Unexpected import".into(),
"import pattern not supported".into(),
Some(call.head),
None,
Vec::new(),
));
};
let head_name_str = if let Ok(s) = String::from_utf8(import_pattern.head.name.clone()) {
s
} else {
return Err(ShellError::NonUtf8(import_pattern.head.span));
};
if let Some(module_id) = engine_state.find_module(&import_pattern.head.name, &[]) {
// The first word is a module
let module = engine_state.get_module(module_id);
let env_vars_to_hide = if import_pattern.members.is_empty() {
module.env_vars_with_head(&import_pattern.head.name)
} else {
match &import_pattern.members[0] {
ImportPatternMember::Glob { .. } => module.env_vars(),
ImportPatternMember::Name { name, span } => {
let mut output = vec![];
if let Some((name, id)) =
module.env_var_with_head(name, &import_pattern.head.name)
{
output.push((name, id));
} else if !(module.has_alias(name) || module.has_decl(name)) {
return Err(ShellError::EnvVarNotFoundAtRuntime(
String::from_utf8_lossy(name).into(),
*span,
));
}
output
}
ImportPatternMember::List { names } => {
let mut output = vec![];
for (name, span) in names {
if let Some((name, id)) =
module.env_var_with_head(name, &import_pattern.head.name)
{
output.push((name, id));
} else if !(module.has_alias(name) || module.has_decl(name)) {
return Err(ShellError::EnvVarNotFoundAtRuntime(
String::from_utf8_lossy(name).into(),
*span,
));
}
}
output
}
}
};
for (name, _) in env_vars_to_hide {
let name = if let Ok(s) = String::from_utf8(name.clone()) {
s
} else {
return Err(ShellError::NonUtf8(import_pattern.span()));
};
if stack.remove_env_var(engine_state, &name).is_none() {
return Err(ShellError::NotFound(
call.positional_nth(0)
.expect("already checked for present positional")
.span,
));
}
}
} else if !import_pattern.hidden.contains(&import_pattern.head.name)
&& stack.remove_env_var(engine_state, &head_name_str).is_none()
{
// TODO: we may want to error in the future
}
Ok(PipelineData::new(call.head))
}
fn examples(&self) -> Vec<Example> {
vec![
Example {
description: "Hide the alias just defined",
example: r#"alias lll = ls -l; hide lll"#,
result: None,
},
Example {
description: "Hide a custom command",
example: r#"def say-hi [] { echo 'Hi!' }; hide say-hi"#,
result: None,
},
Example {
description: "Hide an environment variable",
example: r#"let-env HZ_ENV_ABC = 1; hide HZ_ENV_ABC; 'HZ_ENV_ABC' in (env).name"#,
result: Some(Value::boolean(false, Span::test_data())),
},
]
}
}
|
use std::collections::{BTreeMap, BTreeSet};
use std::fmt;
use std::io;
use crate::base::Part;
pub fn part1(r: &mut dyn io::Read) -> Result<String, String> {
solve(r, Part::One)
}
pub fn part2(r: &mut dyn io::Read) -> Result<String, String> {
solve(r, Part::Two)
}
fn solve(r: &mut dyn io::Read, part: Part) -> Result<String, String> {
let mut input = String::new();
r.read_to_string(&mut input).map_err(|e| e.to_string())?;
let (samples, program) = parse_input(&input);
match part {
Part::One => {
let count = samples
.iter()
.filter(|sample| sample.candidates().len() >= 3)
.count();
Ok(count.to_string())
}
Part::Two => {
let opcode_map = determine_opcodes(&samples);
let final_registers = program.iter().fold(vec![0; 4], |registers, &instruction| {
let opcode = opcode_map[&instruction.opcode];
opcode.apply(instruction, ®isters)
});
Ok(final_registers[0].to_string())
}
}
}
fn determine_opcodes(samples: &[Sample]) -> BTreeMap<usize, Opcode> {
let mut samples_by_opcode = BTreeMap::new();
for sample in samples {
let opcode_samples = samples_by_opcode
.entry(sample.instruction.opcode)
.or_insert_with(Vec::new);
opcode_samples.push(sample);
}
let mut candidates_by_opcode = samples_by_opcode
.iter()
.map(|(&opcode, samples)| {
let candidates = samples
.iter()
.fold(Opcode::all(), |acc, sample| &acc & &sample.candidates());
(opcode, candidates)
})
.collect::<BTreeMap<usize, BTreeSet<Opcode>>>();
let mut determined = BTreeMap::new();
while let Some((opcode, single_candidate_set)) = candidates_by_opcode
.iter()
.filter(|(_opcode, candidates)| candidates.len() == 1)
.next()
{
let candidate = *single_candidate_set.iter().next().unwrap();
determined.insert(*opcode, candidate);
for candidate_set in candidates_by_opcode.values_mut() {
candidate_set.remove(&candidate);
}
}
determined
}
pub type Registers = Vec<usize>;
pub type Program = Vec<Instruction>;
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
pub struct Instruction {
pub opcode: usize,
pub a: usize,
pub b: usize,
pub c: usize,
}
impl fmt::Display for Instruction {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{} {} {} {}", self.opcode, self.a, self.b, self.c)
}
}
#[derive(Clone, Debug, Eq, Hash, PartialEq)]
struct Sample {
before: Registers,
instruction: Instruction,
after: Registers,
}
impl Sample {
fn candidates(&self) -> BTreeSet<Opcode> {
Opcode::all()
.into_iter()
.filter(|opcode| opcode.apply(self.instruction, &self.before) == self.after)
.collect()
}
}
impl fmt::Display for Sample {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
writeln!(f, "Before: {:?}", self.before)?;
writeln!(f, "{}", self.instruction)?;
write!(f, "After: {:?}", self.after)
}
}
#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
enum Mode {
Ignored,
Immediate,
Register,
}
#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
enum OpType {
Addition,
Multiplication,
BitwiseAnd,
BitwiseOr,
Assignment,
GreaterThanTesting,
EqualityTesting,
}
#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
struct Op {
op_type: OpType,
a_mode: Mode,
b_mode: Mode,
}
impl Op {
fn apply(&self, instruction: Instruction, registers: &Registers) -> Registers {
let a = match self.a_mode {
Mode::Ignored => None,
Mode::Immediate => Some(instruction.a),
Mode::Register => Some(registers[instruction.a]),
};
let b = match self.b_mode {
Mode::Ignored => None,
Mode::Immediate => Some(instruction.b),
Mode::Register => Some(registers[instruction.b]),
};
let c = instruction.c;
let output = match self.op_type {
OpType::Addition => a.unwrap() + b.unwrap(),
OpType::Multiplication => a.unwrap() * b.unwrap(),
OpType::BitwiseAnd => a.unwrap() & b.unwrap(),
OpType::BitwiseOr => a.unwrap() | b.unwrap(),
OpType::Assignment => a.unwrap(),
OpType::GreaterThanTesting => usize::from(a.unwrap() > b.unwrap()),
OpType::EqualityTesting => usize::from(a.unwrap() == b.unwrap()),
};
let mut new_registers = registers.clone();
new_registers[c] = output;
new_registers
}
}
#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub enum Opcode {
Addr,
Addi,
Mulr,
Muli,
Banr,
Bani,
Borr,
Bori,
Setr,
Seti,
Gtir,
Gtri,
Gtrr,
Eqir,
Eqri,
Eqrr,
}
impl Opcode {
fn all() -> BTreeSet<Opcode> {
let mut set = BTreeSet::new();
for &opcode in &[
Opcode::Addr,
Opcode::Addi,
Opcode::Mulr,
Opcode::Muli,
Opcode::Banr,
Opcode::Bani,
Opcode::Borr,
Opcode::Bori,
Opcode::Setr,
Opcode::Seti,
Opcode::Gtir,
Opcode::Gtri,
Opcode::Gtrr,
Opcode::Eqir,
Opcode::Eqri,
Opcode::Eqrr,
] {
set.insert(opcode);
}
set
}
fn op(&self) -> Op {
let (op_type, a_mode, b_mode) = match *self {
Opcode::Addr => (OpType::Addition, Mode::Register, Mode::Register),
Opcode::Addi => (OpType::Addition, Mode::Register, Mode::Immediate),
Opcode::Mulr => (OpType::Multiplication, Mode::Register, Mode::Register),
Opcode::Muli => (OpType::Multiplication, Mode::Register, Mode::Immediate),
Opcode::Banr => (OpType::BitwiseAnd, Mode::Register, Mode::Register),
Opcode::Bani => (OpType::BitwiseAnd, Mode::Register, Mode::Immediate),
Opcode::Borr => (OpType::BitwiseOr, Mode::Register, Mode::Register),
Opcode::Bori => (OpType::BitwiseOr, Mode::Register, Mode::Immediate),
Opcode::Setr => (OpType::Assignment, Mode::Register, Mode::Ignored),
Opcode::Seti => (OpType::Assignment, Mode::Immediate, Mode::Ignored),
Opcode::Gtir => (OpType::GreaterThanTesting, Mode::Immediate, Mode::Register),
Opcode::Gtri => (OpType::GreaterThanTesting, Mode::Register, Mode::Immediate),
Opcode::Gtrr => (OpType::GreaterThanTesting, Mode::Register, Mode::Register),
Opcode::Eqir => (OpType::EqualityTesting, Mode::Immediate, Mode::Register),
Opcode::Eqri => (OpType::EqualityTesting, Mode::Register, Mode::Immediate),
Opcode::Eqrr => (OpType::EqualityTesting, Mode::Register, Mode::Register),
};
Op {
op_type,
a_mode,
b_mode,
}
}
pub fn apply(&self, instruction: Instruction, registers: &Registers) -> Registers {
self.op().apply(instruction, registers)
}
}
fn parse_input(input: &str) -> (Vec<Sample>, Program) {
let mut lines = input.lines().peekable();
let mut samples = Vec::new();
let mut next_line = lines.peek().unwrap();
while *next_line != "" {
let before_line = lines.next().unwrap();
let before = parse_registers(before_line);
let instruction_line = lines.next().unwrap();
let instruction = parse_instruction(instruction_line);
let after_line = lines.next().unwrap();
let after = parse_registers(after_line);
let sample = Sample {
before,
instruction,
after,
};
samples.push(sample);
// Advance past the expected empty line.
lines.next();
// Peek what the next line is. If it is an empty line, we have reached the end of all the
// samples; if it is not empty, we assume that is the start of a new sample.
next_line = lines.peek().unwrap();
}
let program = lines
.skip_while(|&line| line == "")
.map(parse_instruction)
.collect();
(samples, program)
}
fn parse_registers(line: &str) -> Registers {
let is_bracket = |c| c == '[' || c == ']';
let stripped = line
.trim_matches(|c| !is_bracket(c))
.trim_matches(is_bracket);
let mut registers = vec![0; 4];
for (i, part) in stripped.split(", ").enumerate() {
let register = part.parse::<usize>().unwrap();
registers[i] = register;
}
registers
}
fn parse_instruction(line: &str) -> Instruction {
let parts = line.split_whitespace().collect::<Vec<&str>>();
let opcode = parts[0].parse::<usize>().unwrap();
let a = parts[1].parse::<usize>().unwrap();
let b = parts[2].parse::<usize>().unwrap();
let c = parts[3].parse::<usize>().unwrap();
Instruction { opcode, a, b, c }
}
#[cfg(test)]
mod tests {
use super::*;
use crate::test;
mod part1 {
use super::*;
test!(actual, file "../../../inputs/2018/16", "596", part1);
}
mod part2 {
use super::*;
test!(actual, file "../../../inputs/2018/16", "554", part2);
}
}
|
use std::collections::HashMap;
struct MorseDecoder {
morse_code: HashMap<String, String>,
}
impl MorseDecoder {
fn new() -> MorseDecoder {
MorseDecoder{ morse_code :
[("....-", "4"),("--..--", ","),(".--", "W"),(".-.-.-", "."),("..---", "2"),(".", "E"),("--..", "Z"),(".----", "1"),(".-..", "L"),
(".--.", "P"),(".-.", "R"),("...", "S"),("-.--", "Y"),("...--", "3"),(".....", "5"),("--.", "G"),("-.--.", "("),("-....", "6"),
(".-.-.", "+"),("...-..-", "$"),(".--.-.", "@"),("...---...", "SOS"),("..--.-", "_"),("-.", "N"),("-..-", "X"),("-----", "0"),
("....", "H"),("-...", "B"),(".---", "J"),("---...", ","),("-", "T"),("---..", "8"),("-..-.", "/"),("--.-", "Q"),("...-", "V"),
("----.", "9"),("--", "M"),("-.-.-.", ";"),("-.-.--", "!"),("..-.", "F"),("..--..", "?"),("-...-", "="),("..-", "U"),(".----.", "'"),
("---", "O"),("-.--.-", ")"),("..", "I"),("-....-", "-"),(".-..-.", "\""),(".-", "A"),("-.-.", "C"),("-..", "D"),(".-...", "&"),
("--...", "7"),("-.-", "K")].iter().map(|(k, v)| (k.to_string(), v.to_string())).collect()}
}
fn decode_morse(&self, encoded: &str) -> String {
let mut ret: String = String::new();
let words: Vec<&str> = encoded.trim().split(" ").collect();
if words == vec![""] { return ret; }
for word in words {
let signs = word.split(" ");
let mut word_string = String::new();
for sign in signs {
word_string.push_str( &self.morse_code[sign].clone() );
}
ret.push_str( & word_string );
ret.push(' ');
}
ret.pop();
ret.to_string()
}
}
#[test]
fn test0() {
let decoder = MorseDecoder::new();
assert_eq!(decoder.decode_morse(".... . -.--"), "HEY");
}
#[test]
fn test1() {
let decoder = MorseDecoder::new();
assert_eq!(decoder.decode_morse(" .... . -.-- "), "HEY");
}
#[test]
fn test2() {
let decoder = MorseDecoder::new();
assert_eq!(decoder.decode_morse(" .... . -.--"), "HEY");
}
#[test]
fn test3() {
let decoder = MorseDecoder::new();
assert_eq!(decoder.decode_morse(".... . -.-- "), "HEY");
}
#[test]
fn test4() {
let decoder = MorseDecoder::new();
assert_eq!(decoder.decode_morse(".... . -.-- .--- ..- -.. ."), "HEY JUDE");
}
#[test]
fn test5() {
let decoder = MorseDecoder::new();
assert_eq!(decoder.decode_morse(" .... . -.-- .--- ..- -.. ."), "HEY JUDE");
}
#[test]
fn test6() {
let decoder = MorseDecoder::new();
assert_eq!(decoder.decode_morse(".... . -.-- .... . -.--"), "HEYHEY");
}
#[test]
fn test7() {
let decoder = MorseDecoder::new();
assert_eq!(decoder.decode_morse("... --- ..."), "SOS");
}
#[test]
fn test8() {
let decoder = MorseDecoder::new();
assert_eq!(decoder.decode_morse(" ...---... "), "SOS");
}
#[test]
fn test9() {
let decoder = MorseDecoder::new();
assert_eq!(decoder.decode_morse("...---..."), "SOS");
}
fn main() {
}
|
//! Serial Peripheral Interface (SPI) bus
use hal::spi::{FullDuplex, Mode , Phase, Polarity};
use nb;
use tm4c123x::{SSI0,SSI1, SSI2, SSI3};
use gpio::gpioa::{PA2, PA4, PA5};
use gpio::gpiob::{PB4, PB6, PB7};
use gpio::gpiod::{PD0, PD2, PD3};
use sysctl::Clocks;
use sysctl;
use gpio::{AF1, AF2, AlternateFunction, OutputMode};
use time::Hertz;
/// SPI error
#[derive(Debug)]
pub enum Error {
#[doc(hidden)] _Extensible,
}
// FIXME these should be "closed" traits
/// SCK pin -- DO NOT IMPLEMENT THIS TRAIT
pub unsafe trait SckPin<SPI> {}
/// MISO pin -- DO NOT IMPLEMENT THIS TRAIT
pub unsafe trait MisoPin<SPI> {}
/// MOSI pin -- DO NOT IMPLEMENT THIS TRAIT
pub unsafe trait MosiPin<SPI> {}
// SSI0
unsafe impl<T> SckPin<SSI0> for PA2<AlternateFunction<AF2, T>> where T:OutputMode, {}
unsafe impl<T> MisoPin<SSI0> for PA4<AlternateFunction<AF2, T>> where T:OutputMode, {}
unsafe impl<T> MosiPin<SSI0> for PA5<AlternateFunction<AF2, T>> where T:OutputMode, {}
// SSI1
unsafe impl<T> SckPin<SSI1> for PD0<AlternateFunction<AF2, T>> where T:OutputMode, {}
unsafe impl<T> MisoPin<SSI1> for PD2<AlternateFunction<AF2, T>> where T:OutputMode, {}
unsafe impl<T> MosiPin<SSI1> for PD3<AlternateFunction<AF2, T>> where T:OutputMode, {}
// SSI2
unsafe impl<T> SckPin<SSI2> for PB4<AlternateFunction<AF2, T>> where T:OutputMode, {}
unsafe impl<T> MisoPin<SSI2> for PB6<AlternateFunction<AF2, T>> where T:OutputMode, {}
unsafe impl<T> MosiPin<SSI2> for PB7<AlternateFunction<AF2, T>> where T:OutputMode, {}
// SSI3
unsafe impl<T> SckPin<SSI3> for PD0<AlternateFunction<AF1, T>> where T:OutputMode, {}
unsafe impl<T> MisoPin<SSI3> for PD2<AlternateFunction<AF1, T>> where T:OutputMode, {}
unsafe impl<T> MosiPin<SSI3> for PD3<AlternateFunction<AF1, T>> where T:OutputMode, {}
/// SPI peripheral operating in full duplex master mode
pub struct Spi<SPI, PINS> {
spi: SPI,
pins: PINS,
}
macro_rules! busy_wait {
($spi:expr, $flag:ident, $op:ident) => {
loop {
let sr = $spi.sr.read();
if sr.$flag().$op() {
break;
}
}
}
}
macro_rules! hal {
($($SPIX:ident: ($powerDomain:ident, $spiX:ident),)+) => {
$(
impl<SCK, MISO, MOSI> Spi<$SPIX, (SCK, MISO, MOSI)> {
/// Configures the SPI peripheral to operate in full duplex master mode
pub fn $spiX<F>(
spi: $SPIX,
pins: (SCK, MISO, MOSI),
mode: Mode,
freq: F,
clocks: &Clocks,
pc: &sysctl::PowerControl,
) -> Self
where
F: Into<Hertz>,
SCK: SckPin<$SPIX>,
MISO: MisoPin<$SPIX>,
MOSI: MosiPin<$SPIX>,
{
// power up
sysctl::control_power(
pc, sysctl::Domain::$powerDomain,
sysctl::RunMode::Run, sysctl::PowerState::On);
sysctl::reset(pc, sysctl::Domain::$powerDomain);
// write 0 (reset value) for master operation.
spi.cr1.write(|w| w);
// SSICC Clock setup
// set to reset value (0 = use system clock)
spi.cc.write(|w| w);
// Use Moto/SPI & 8bits data size
let scr: u8;
let mut cpsr = 2u32;
let target_bitrate : u32 = clocks.sysclk.0 / freq.into().0;
// Find solution for
// SSInClk = SysClk / (CPSDVSR * (1 + SCR))
// with:
// CPSDVSR in [2,254]
// SCR in [0,255]
loop {
let scr32 = (target_bitrate / cpsr) - 1;
if scr32 < 255 {
scr = scr32 as u8;
break;
}
cpsr += 2;
assert!(cpsr <= 254);
}
let cpsr = cpsr as u8;
spi.cpsr.write(|w| unsafe {
w.cpsdvsr().bits(cpsr)
});
spi.cr0.modify(|_,w| unsafe {
w.spo().bit(mode.polarity == Polarity::IdleHigh)
.sph().bit(mode.phase == Phase::CaptureOnSecondTransition)
// FIXME: How to use FRFR::MOTO and DSS:: ?
.frf().bits(0)
.dss().bits(0x7)
.scr().bits(scr)
});
// Enable peripheral
spi.cr1.write(|w| w.sse().set_bit());
Spi { spi, pins }
}
/// Releases the SPI peripheral and associated pins
pub fn free(self) -> ($SPIX, (SCK, MISO, MOSI)) {
(self.spi, self.pins)
}
}
impl<PINS> FullDuplex<u8> for Spi<$SPIX, PINS> {
type Error = Error;
fn read(&mut self) -> nb::Result<u8, Error> {
// Receive FIFO Not Empty
if self.spi.sr.read().rne().bit_is_clear() {
Err(nb::Error::WouldBlock)
} else {
let r = self.spi.dr.read().data().bits() as u8;
Ok(r)
}
}
fn send(&mut self, byte: u8) -> nb::Result<(), Error> {
// Transmit FIFO Not Full
if self.spi.sr.read().tnf().bit_is_clear() {
Err(nb::Error::WouldBlock)
} else {
self.spi.dr.write(|w| unsafe {
w.data().bits(byte.into())
});
busy_wait!(self.spi, bsy, bit_is_clear);
Ok(())
}
}
}
impl<PINS> ::hal::blocking::spi::transfer::Default<u8> for Spi<$SPIX, PINS> {}
impl<PINS> ::hal::blocking::spi::write::Default<u8> for Spi<$SPIX, PINS> {}
)+
}
}
hal! {
SSI0: (Ssi0, spi0),
SSI1: (Ssi1, spi1),
SSI2: (Ssi2, spi2),
SSI3: (Ssi3, spi3),
}
|
use std::env;
use std::fs;
fn main() -> std::io::Result<()> {
let args : Vec<String> =env::args().collect();
let var = &args[1];
fs::remove_file(var)?;
println! ("{} has been deleted", var);
Ok(())
}
|
#[cfg(test)]
mod test {
use rbs::value::map::ValueMap;
use rbs::{value_map, Value};
#[test]
fn test_decode_value() {
let m = value_map! {
1.to_string() => 1,
2.to_string() => 2,
};
let m = Value::Map(m);
let v: Value = rbatis::decode(Value::Array(vec![m.clone()])).unwrap();
assert_eq!(v, Value::Array(vec![m]));
}
#[test]
fn test_decode_one() {
let date = rbdc::types::datetime::FastDateTime::now();
let m = value_map! {
1.to_string() => date.clone(),
};
let v: rbdc::types::datetime::FastDateTime =
rbatis::decode(Value::Array(vec![Value::Map(m)])).unwrap();
assert_eq!(v, date);
}
#[test]
fn test_decode_i32() {
let v: i32 = rbatis::decode(Value::Array(vec![Value::Map({
let mut m = ValueMap::new();
m.insert(Value::String("a".to_string()), Value::I64(1));
m
})]))
.unwrap();
assert_eq!(v, 1);
}
#[test]
fn test_decode_i64() {
let v: i64 = rbatis::decode(Value::Array(vec![Value::Map({
let mut m = ValueMap::new();
m.insert(Value::String("a".to_string()), Value::I64(1));
m
})]))
.unwrap();
assert_eq!(v, 1i64);
}
#[test]
fn test_decode_json_array() {
let m = value_map! {
1.to_string() => 1,
2.to_string() => 2,
};
let m = Value::Map(m);
let v: serde_json::Value =
rbatis::decode(Value::Array(vec![m.clone(), m.clone()])).unwrap();
assert_eq!(
v,
serde_json::from_str::<serde_json::Value>(r#"[{"1":1,"2":2},{"1":1,"2":2}]"#).unwrap()
);
}
}
|
use crate::prelude::Token;
pub enum Statement {
Assign { name: Token, expression: Expression },
AssignQuote { name: Token, expression: Expression },
Expression(Expression),
}
pub enum Expression {
Ident {
name: Token,
},
List {
value: Vec<Token>,
},
MonadCall {
op: Box<Expression>,
lhs: Box<Expression>,
},
DyadCall {
rhs: Box<Expression>,
op: Box<Expression>,
lhs: Box<Expression>,
},
Quote {
colon: Token,
expression: Box<Expression>,
},
Call {
expression: Box<Expression>,
colon: Token,
},
Spread {
verb: Box<Expression>,
slash: Token,
},
Lambda {
left_brace: Token,
expression: Box<Expression>,
},
}
pub fn parse() -> Option<Statement> {
None
}
#[cfg(test)]
mod test {
use crate::prelude::*;
#[test]
fn parse1() {
let s = Scanner::new("x+y");
let v: Vec<_> = s.collect();
v.into_iter()
.zip(
vec![
Token::no_span(TokenKind::Ident, "x"),
Token::no_span(TokenKind::Builtin, "+"),
Token::no_span(TokenKind::Ident, "y"),
]
.into_iter(),
)
.map(|(a, b)| assert!(a.compare_no_span(b)))
.for_each(drop);
}
#[test]
fn parse2() {
let _src = "a = 1 + 2";
let _a = Statement::Assign {
name: Token::no_span(TokenKind::Ident, "a"),
expression: Expression::DyadCall {
rhs: Box::new(Expression::List {
value: vec![Token::no_span(TokenKind::Int(1), "1")],
}),
op: Box::new(Expression::Ident {
name: Token::no_span(TokenKind::Ident, "+"),
}),
lhs: Box::new(Expression::List {
value: vec![Token::no_span(TokenKind::Int(2), "2")],
}),
},
};
}
}
|
#[doc = "Register `CFGR` reader"]
pub type R = crate::R<CFGR_SPEC>;
#[doc = "Register `CFGR` writer"]
pub type W = crate::W<CFGR_SPEC>;
#[doc = "Field `SEL` reader - Select the phase for the Output clock"]
pub type SEL_R = crate::FieldReader;
#[doc = "Field `SEL` writer - Select the phase for the Output clock"]
pub type SEL_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>;
#[doc = "Field `UNIT` reader - Delay Defines the delay of a Unit delay cell"]
pub type UNIT_R = crate::FieldReader;
#[doc = "Field `UNIT` writer - Delay Defines the delay of a Unit delay cell"]
pub type UNIT_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 7, O>;
#[doc = "Field `LNG` reader - Delay line length value"]
pub type LNG_R = crate::FieldReader<u16>;
#[doc = "Field `LNG` writer - Delay line length value"]
pub type LNG_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 12, O, u16>;
#[doc = "Field `LNGF` reader - Length valid flag"]
pub type LNGF_R = crate::BitReader;
#[doc = "Field `LNGF` writer - Length valid flag"]
pub type LNGF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bits 0:3 - Select the phase for the Output clock"]
#[inline(always)]
pub fn sel(&self) -> SEL_R {
SEL_R::new((self.bits & 0x0f) as u8)
}
#[doc = "Bits 8:14 - Delay Defines the delay of a Unit delay cell"]
#[inline(always)]
pub fn unit(&self) -> UNIT_R {
UNIT_R::new(((self.bits >> 8) & 0x7f) as u8)
}
#[doc = "Bits 16:27 - Delay line length value"]
#[inline(always)]
pub fn lng(&self) -> LNG_R {
LNG_R::new(((self.bits >> 16) & 0x0fff) as u16)
}
#[doc = "Bit 31 - Length valid flag"]
#[inline(always)]
pub fn lngf(&self) -> LNGF_R {
LNGF_R::new(((self.bits >> 31) & 1) != 0)
}
}
impl W {
#[doc = "Bits 0:3 - Select the phase for the Output clock"]
#[inline(always)]
#[must_use]
pub fn sel(&mut self) -> SEL_W<CFGR_SPEC, 0> {
SEL_W::new(self)
}
#[doc = "Bits 8:14 - Delay Defines the delay of a Unit delay cell"]
#[inline(always)]
#[must_use]
pub fn unit(&mut self) -> UNIT_W<CFGR_SPEC, 8> {
UNIT_W::new(self)
}
#[doc = "Bits 16:27 - Delay line length value"]
#[inline(always)]
#[must_use]
pub fn lng(&mut self) -> LNG_W<CFGR_SPEC, 16> {
LNG_W::new(self)
}
#[doc = "Bit 31 - Length valid flag"]
#[inline(always)]
#[must_use]
pub fn lngf(&mut self) -> LNGF_W<CFGR_SPEC, 31> {
LNGF_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "DLYB configuration register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cfgr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cfgr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct CFGR_SPEC;
impl crate::RegisterSpec for CFGR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`cfgr::R`](R) reader structure"]
impl crate::Readable for CFGR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`cfgr::W`](W) writer structure"]
impl crate::Writable for CFGR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets CFGR to value 0"]
impl crate::Resettable for CFGR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
#[doc = "Register `CRC_CR` reader"]
pub type R = crate::R<CRC_CR_SPEC>;
#[doc = "Register `CRC_CR` writer"]
pub type W = crate::W<CRC_CR_SPEC>;
#[doc = "Field `RESET` reader - RESET"]
pub type RESET_R = crate::BitReader;
#[doc = "Field `RESET` writer - RESET"]
pub type RESET_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `POLYSIZE` reader - POLYSIZE"]
pub type POLYSIZE_R = crate::FieldReader;
#[doc = "Field `POLYSIZE` writer - POLYSIZE"]
pub type POLYSIZE_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>;
#[doc = "Field `REV_IN` reader - REV_IN"]
pub type REV_IN_R = crate::FieldReader;
#[doc = "Field `REV_IN` writer - REV_IN"]
pub type REV_IN_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>;
#[doc = "Field `REV_OUT` reader - REV_OUT"]
pub type REV_OUT_R = crate::BitReader;
#[doc = "Field `REV_OUT` writer - REV_OUT"]
pub type REV_OUT_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bit 0 - RESET"]
#[inline(always)]
pub fn reset(&self) -> RESET_R {
RESET_R::new((self.bits & 1) != 0)
}
#[doc = "Bits 3:4 - POLYSIZE"]
#[inline(always)]
pub fn polysize(&self) -> POLYSIZE_R {
POLYSIZE_R::new(((self.bits >> 3) & 3) as u8)
}
#[doc = "Bits 5:6 - REV_IN"]
#[inline(always)]
pub fn rev_in(&self) -> REV_IN_R {
REV_IN_R::new(((self.bits >> 5) & 3) as u8)
}
#[doc = "Bit 7 - REV_OUT"]
#[inline(always)]
pub fn rev_out(&self) -> REV_OUT_R {
REV_OUT_R::new(((self.bits >> 7) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - RESET"]
#[inline(always)]
#[must_use]
pub fn reset(&mut self) -> RESET_W<CRC_CR_SPEC, 0> {
RESET_W::new(self)
}
#[doc = "Bits 3:4 - POLYSIZE"]
#[inline(always)]
#[must_use]
pub fn polysize(&mut self) -> POLYSIZE_W<CRC_CR_SPEC, 3> {
POLYSIZE_W::new(self)
}
#[doc = "Bits 5:6 - REV_IN"]
#[inline(always)]
#[must_use]
pub fn rev_in(&mut self) -> REV_IN_W<CRC_CR_SPEC, 5> {
REV_IN_W::new(self)
}
#[doc = "Bit 7 - REV_OUT"]
#[inline(always)]
#[must_use]
pub fn rev_out(&mut self) -> REV_OUT_W<CRC_CR_SPEC, 7> {
REV_OUT_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "CRC control register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`crc_cr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`crc_cr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct CRC_CR_SPEC;
impl crate::RegisterSpec for CRC_CR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`crc_cr::R`](R) reader structure"]
impl crate::Readable for CRC_CR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`crc_cr::W`](W) writer structure"]
impl crate::Writable for CRC_CR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets CRC_CR to value 0"]
impl crate::Resettable for CRC_CR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
//! andn
/// Logical and not
pub trait Andn {
/// Bitwise logical `AND` of inverted `self` with `y`.
///
/// # Instructions
///
/// - [`ANDN`](http://www.felixcloutier.com/x86/ANDN.html):
/// - Description: Logical and not.
/// - Architecture: x86.
/// - Instruction set: BMI.
/// - Registers: 32/64 bit.
///
/// # Example
///
/// ```
/// # use bitintr::*;
/// assert_eq!(0.andn(0), 0);
/// assert_eq!(0.andn(1), 1);
/// assert_eq!(1.andn(0), 0);
/// assert_eq!(1.andn(1), 0);
///
/// assert_eq!(0b0000_0000u8.andn(0b0000_0000u8), 0b0000_0000u8);
/// assert_eq!(0b0000_0000u8.andn(0b1111_1111u8), 0b1111_1111u8);
/// assert_eq!(0b1111_1111u8.andn(0b0000_0000u8), 0b0000_0000u8);
/// assert_eq!(0b1111_1111u8.andn(0b1111_1111u8), 0b0000_0000u8);
///
/// assert_eq!(0b0100_0000u8.andn(0b0101_1101u8), 0b0001_1101u8);
/// ```
fn andn(self, y: Self) -> Self;
}
macro_rules! impl_andn {
($id:ident) => {
impl Andn for $id {
#[inline]
fn andn(self, y: Self) -> Self {
!self & y
}
}
};
}
impl_all!(impl_andn: u8, u16, u32, u64, i8, i16, i32, i64);
|
use crate::feed_processor::{FeedObjectMapping, FeedProcessor, FeedProcessor as FeedProcessorT};
use crate::{self as pallet_feeds};
use codec::{Compact, CompactLen, Decode, Encode};
use frame_support::parameter_types;
use frame_support::traits::{ConstU16, ConstU32, ConstU64};
use scale_info::TypeInfo;
use sp_core::H256;
use sp_runtime::testing::Header;
use sp_runtime::traits::{BlakeTwo256, IdentityLookup};
type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic<Test>;
type Block = frame_system::mocking::MockBlock<Test>;
type FeedId = u64;
frame_support::construct_runtime!(
pub struct Test where
Block = Block,
NodeBlock = Block,
UncheckedExtrinsic = UncheckedExtrinsic,
{
System: frame_system::{Pallet, Call, Config, Storage, Event<T>},
Feeds: pallet_feeds::{Pallet, Call, Storage, Event<T>}
}
);
impl frame_system::Config for Test {
type BaseCallFilter = frame_support::traits::Everything;
type BlockWeights = ();
type BlockLength = ();
type DbWeight = ();
type RuntimeOrigin = RuntimeOrigin;
type RuntimeCall = RuntimeCall;
type Index = u64;
type BlockNumber = u64;
type Hash = H256;
type Hashing = BlakeTwo256;
type AccountId = u64;
type Lookup = IdentityLookup<Self::AccountId>;
type Header = Header;
type RuntimeEvent = RuntimeEvent;
type BlockHashCount = ConstU64<250>;
type Version = ();
type PalletInfo = PalletInfo;
type AccountData = ();
type OnNewAccount = ();
type OnKilledAccount = ();
type SystemWeightInfo = ();
type SS58Prefix = ConstU16<42>;
type OnSetCode = ();
type MaxConsumers = ConstU32<16>;
}
parameter_types! {
pub const ExistentialDeposit: u64 = 1;
pub const MaxFeeds: u32 = 1;
}
#[derive(Default, Debug, Copy, Clone, Encode, Decode, TypeInfo, Eq, PartialEq)]
pub enum MockFeedProcessorKind {
#[default]
Content,
ContentWithin,
Custom([u8; 32]),
}
impl pallet_feeds::Config for Test {
type RuntimeEvent = RuntimeEvent;
type FeedId = FeedId;
type FeedProcessorKind = MockFeedProcessorKind;
type MaxFeeds = MaxFeeds;
fn feed_processor(
feed_processor_kind: Self::FeedProcessorKind,
) -> Box<dyn FeedProcessorT<Self::FeedId>> {
match feed_processor_kind {
MockFeedProcessorKind::Content => Box::new(()),
MockFeedProcessorKind::ContentWithin => Box::new(ContentEnumFeedProcessor),
MockFeedProcessorKind::Custom(key) => {
Box::new(CustomContentFeedProcessor(key.to_vec()))
}
}
}
}
pub fn new_test_ext() -> sp_io::TestExternalities {
let t = frame_system::GenesisConfig::default()
.build_storage::<Test>()
.unwrap();
let mut t: sp_io::TestExternalities = t.into();
t.execute_with(|| System::set_block_number(1));
t
}
/// Same as default except key is not derived from object
struct CustomContentFeedProcessor(Vec<u8>);
impl FeedProcessor<FeedId> for CustomContentFeedProcessor {
fn object_mappings(&self, _feed_id: FeedId, _object: &[u8]) -> Vec<FeedObjectMapping> {
vec![FeedObjectMapping::Custom {
key: self.0.clone(),
offset: 0,
}]
}
}
// this is the content enum encoded as object for the put call
// we want to index content_a or content_b by an index either content addressable or name spaced key
#[derive(Debug, Clone, Encode, Decode)]
pub(crate) enum ContentEnum {
ContentA(Vec<u8>),
ContentB(Vec<u8>),
}
struct ContentEnumFeedProcessor;
impl FeedProcessor<FeedId> for ContentEnumFeedProcessor {
fn object_mappings(&self, _feed_id: FeedId, object: &[u8]) -> Vec<FeedObjectMapping> {
let content =
ContentEnum::decode(&mut object.to_vec().as_slice()).expect("must decode to content");
match content {
ContentEnum::ContentA(_) | ContentEnum::ContentB(_) => {
vec![FeedObjectMapping::Content {
// also need to consider the encoded length of the object
// encoded content_a or content_b starts at offset 1 due to enum variant
offset: 1 + Compact::<u32>::compact_len(&(object.len() as u32)) as u32,
}]
}
}
}
}
|
#[doc = "Register `DOEPMSK` reader"]
pub type R = crate::R<DOEPMSK_SPEC>;
#[doc = "Register `DOEPMSK` writer"]
pub type W = crate::W<DOEPMSK_SPEC>;
#[doc = "Field `XFRCM` reader - Transfer completed interrupt mask"]
pub type XFRCM_R = crate::BitReader;
#[doc = "Field `XFRCM` writer - Transfer completed interrupt mask"]
pub type XFRCM_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `EPDM` reader - Endpoint disabled interrupt mask"]
pub type EPDM_R = crate::BitReader;
#[doc = "Field `EPDM` writer - Endpoint disabled interrupt mask"]
pub type EPDM_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `STUPM` reader - SETUP phase done mask"]
pub type STUPM_R = crate::BitReader;
#[doc = "Field `STUPM` writer - SETUP phase done mask"]
pub type STUPM_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `OTEPDM` reader - OUT token received when endpoint disabled mask"]
pub type OTEPDM_R = crate::BitReader;
#[doc = "Field `OTEPDM` writer - OUT token received when endpoint disabled mask"]
pub type OTEPDM_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bit 0 - Transfer completed interrupt mask"]
#[inline(always)]
pub fn xfrcm(&self) -> XFRCM_R {
XFRCM_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - Endpoint disabled interrupt mask"]
#[inline(always)]
pub fn epdm(&self) -> EPDM_R {
EPDM_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 3 - SETUP phase done mask"]
#[inline(always)]
pub fn stupm(&self) -> STUPM_R {
STUPM_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - OUT token received when endpoint disabled mask"]
#[inline(always)]
pub fn otepdm(&self) -> OTEPDM_R {
OTEPDM_R::new(((self.bits >> 4) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - Transfer completed interrupt mask"]
#[inline(always)]
#[must_use]
pub fn xfrcm(&mut self) -> XFRCM_W<DOEPMSK_SPEC, 0> {
XFRCM_W::new(self)
}
#[doc = "Bit 1 - Endpoint disabled interrupt mask"]
#[inline(always)]
#[must_use]
pub fn epdm(&mut self) -> EPDM_W<DOEPMSK_SPEC, 1> {
EPDM_W::new(self)
}
#[doc = "Bit 3 - SETUP phase done mask"]
#[inline(always)]
#[must_use]
pub fn stupm(&mut self) -> STUPM_W<DOEPMSK_SPEC, 3> {
STUPM_W::new(self)
}
#[doc = "Bit 4 - OUT token received when endpoint disabled mask"]
#[inline(always)]
#[must_use]
pub fn otepdm(&mut self) -> OTEPDM_W<DOEPMSK_SPEC, 4> {
OTEPDM_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "OTG_FS device OUT endpoint common interrupt mask register (OTG_FS_DOEPMSK)\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`doepmsk::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`doepmsk::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct DOEPMSK_SPEC;
impl crate::RegisterSpec for DOEPMSK_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`doepmsk::R`](R) reader structure"]
impl crate::Readable for DOEPMSK_SPEC {}
#[doc = "`write(|w| ..)` method takes [`doepmsk::W`](W) writer structure"]
impl crate::Writable for DOEPMSK_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets DOEPMSK to value 0"]
impl crate::Resettable for DOEPMSK_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use chrono::Utc;
use hashbrown::HashMap;
use parking_lot::Mutex;
use std::{hash::Hash, str::FromStr};
pub struct Buckets([Mutex<Bucket>; 9]);
impl Buckets {
pub fn new() -> Self {
let make_bucket = |delay, time_span, limit| {
let ratelimit = Ratelimit {
delay,
limit: Some((time_span, limit)),
};
Mutex::new(Bucket::new(ratelimit))
};
Self([
make_bucket(0, 9, 4), // All
make_bucket(1, 8, 2), // BgBigger
make_bucket(0, 10, 4), // BgHint
make_bucket(2, 20, 3), // BgSkip
make_bucket(30, 0, 1), // Leaderboard
make_bucket(15, 0, 1), // MatchCompare
make_bucket(5, 900, 3), // MatchLive
make_bucket(0, 60, 10), // Snipe
make_bucket(20, 0, 1), // Songs
])
}
pub fn get(&self, bucket: BucketName) -> &Mutex<Bucket> {
match bucket {
BucketName::All => &self.0[0],
BucketName::BgBigger => &self.0[1],
BucketName::BgHint => &self.0[2],
BucketName::BgSkip => &self.0[3],
BucketName::Leaderboard => &self.0[4],
BucketName::MatchCompare => &self.0[5],
BucketName::MatchLive => &self.0[6],
BucketName::Snipe => &self.0[7],
BucketName::Songs => &self.0[8],
}
}
}
pub struct Ratelimit {
pub delay: i64,
pub limit: Option<(i64, i32)>,
}
#[derive(Default)]
pub struct MemberRatelimit {
pub last_time: i64,
pub set_time: i64,
pub tickets: i32,
}
pub struct Bucket {
pub ratelimit: Ratelimit,
pub users: HashMap<u64, MemberRatelimit>,
}
impl Bucket {
fn new(ratelimit: Ratelimit) -> Self {
Self {
ratelimit,
users: HashMap::new(),
}
}
pub fn take(&mut self, user_id: u64) -> i64 {
let time = Utc::now().timestamp();
let user = self
.users
.entry(user_id)
.or_insert_with(MemberRatelimit::default);
if let Some((timespan, limit)) = self.ratelimit.limit {
if (user.tickets + 1) > limit {
if time < (user.set_time + timespan) {
return (user.set_time + timespan) - time;
} else {
user.tickets = 0;
user.set_time = time;
}
}
}
if time < user.last_time + self.ratelimit.delay {
(user.last_time + self.ratelimit.delay) - time
} else {
user.tickets += 1;
user.last_time = time;
0
}
}
}
#[derive(Debug, Eq, PartialEq, Copy, Clone, Hash)]
pub enum BucketName {
All,
BgBigger,
BgHint,
BgSkip,
Leaderboard,
MatchCompare,
MatchLive,
Snipe,
Songs,
}
impl FromStr for BucketName {
type Err = &'static str;
fn from_str(name: &str) -> Result<Self, Self::Err> {
let bucket = match name {
"all" => BucketName::All,
"bg_bigger" => BucketName::BgBigger,
"bg_hint" => BucketName::BgHint,
"bg_skip" => BucketName::BgSkip,
"leaderboard" => BucketName::Leaderboard,
"match_compare" => BucketName::MatchCompare,
"match_live" => BucketName::MatchLive,
"snipe" => BucketName::Snipe,
"songs" => BucketName::Songs,
_ => return Err("Unknown bucket name"),
};
Ok(bucket)
}
}
|
#[doc = "Register `CFGR2` reader"]
pub type R = crate::R<CFGR2_SPEC>;
#[doc = "Register `CFGR2` writer"]
pub type W = crate::W<CFGR2_SPEC>;
#[doc = "Field `LOCKUP_LOCK` reader - Cortex-M0+ LOCKUP bit enable bit"]
pub type LOCKUP_LOCK_R = crate::BitReader;
#[doc = "Field `LOCKUP_LOCK` writer - Cortex-M0+ LOCKUP bit enable bit"]
pub type LOCKUP_LOCK_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SRAM_PARITY_LOCK` reader - SRAM parity lock bit"]
pub type SRAM_PARITY_LOCK_R = crate::BitReader;
#[doc = "Field `SRAM_PARITY_LOCK` writer - SRAM parity lock bit"]
pub type SRAM_PARITY_LOCK_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `PVD_LOCK` reader - PVD lock enable bit"]
pub type PVD_LOCK_R = crate::BitReader;
#[doc = "Field `PVD_LOCK` writer - PVD lock enable bit"]
pub type PVD_LOCK_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ECC_LOCK` reader - ECC error lock bit"]
pub type ECC_LOCK_R = crate::BitReader;
#[doc = "Field `ECC_LOCK` writer - ECC error lock bit"]
pub type ECC_LOCK_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SRAM_PEF` reader - SRAM parity error flag"]
pub type SRAM_PEF_R = crate::BitReader;
#[doc = "Field `SRAM_PEF` writer - SRAM parity error flag"]
pub type SRAM_PEF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `PA1_CDEN` reader - PA1_CDEN"]
pub type PA1_CDEN_R = crate::BitReader;
#[doc = "Field `PA1_CDEN` writer - PA1_CDEN"]
pub type PA1_CDEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `PA3_CDEN` reader - PA3_CDEN"]
pub type PA3_CDEN_R = crate::BitReader;
#[doc = "Field `PA3_CDEN` writer - PA3_CDEN"]
pub type PA3_CDEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `PA5_CDEN` reader - PA5_CDEN"]
pub type PA5_CDEN_R = crate::BitReader;
#[doc = "Field `PA5_CDEN` writer - PA5_CDEN"]
pub type PA5_CDEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `PA6_CDEN` reader - PA6_CDEN"]
pub type PA6_CDEN_R = crate::BitReader;
#[doc = "Field `PA6_CDEN` writer - PA6_CDEN"]
pub type PA6_CDEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `PA13_CDEN` reader - PA13_CDEN"]
pub type PA13_CDEN_R = crate::BitReader;
#[doc = "Field `PA13_CDEN` writer - PA13_CDEN"]
pub type PA13_CDEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `PB0_CDEN` reader - PB0_CDEN"]
pub type PB0_CDEN_R = crate::BitReader;
#[doc = "Field `PB0_CDEN` writer - PB0_CDEN"]
pub type PB0_CDEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `PB1_CDEN` reader - PB1_CDEN"]
pub type PB1_CDEN_R = crate::BitReader;
#[doc = "Field `PB1_CDEN` writer - PB1_CDEN"]
pub type PB1_CDEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `PB2_CDEN` reader - PB2_CDEN"]
pub type PB2_CDEN_R = crate::BitReader;
#[doc = "Field `PB2_CDEN` writer - PB2_CDEN"]
pub type PB2_CDEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bit 0 - Cortex-M0+ LOCKUP bit enable bit"]
#[inline(always)]
pub fn lockup_lock(&self) -> LOCKUP_LOCK_R {
LOCKUP_LOCK_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - SRAM parity lock bit"]
#[inline(always)]
pub fn sram_parity_lock(&self) -> SRAM_PARITY_LOCK_R {
SRAM_PARITY_LOCK_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - PVD lock enable bit"]
#[inline(always)]
pub fn pvd_lock(&self) -> PVD_LOCK_R {
PVD_LOCK_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - ECC error lock bit"]
#[inline(always)]
pub fn ecc_lock(&self) -> ECC_LOCK_R {
ECC_LOCK_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 8 - SRAM parity error flag"]
#[inline(always)]
pub fn sram_pef(&self) -> SRAM_PEF_R {
SRAM_PEF_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bit 16 - PA1_CDEN"]
#[inline(always)]
pub fn pa1_cden(&self) -> PA1_CDEN_R {
PA1_CDEN_R::new(((self.bits >> 16) & 1) != 0)
}
#[doc = "Bit 17 - PA3_CDEN"]
#[inline(always)]
pub fn pa3_cden(&self) -> PA3_CDEN_R {
PA3_CDEN_R::new(((self.bits >> 17) & 1) != 0)
}
#[doc = "Bit 18 - PA5_CDEN"]
#[inline(always)]
pub fn pa5_cden(&self) -> PA5_CDEN_R {
PA5_CDEN_R::new(((self.bits >> 18) & 1) != 0)
}
#[doc = "Bit 19 - PA6_CDEN"]
#[inline(always)]
pub fn pa6_cden(&self) -> PA6_CDEN_R {
PA6_CDEN_R::new(((self.bits >> 19) & 1) != 0)
}
#[doc = "Bit 20 - PA13_CDEN"]
#[inline(always)]
pub fn pa13_cden(&self) -> PA13_CDEN_R {
PA13_CDEN_R::new(((self.bits >> 20) & 1) != 0)
}
#[doc = "Bit 21 - PB0_CDEN"]
#[inline(always)]
pub fn pb0_cden(&self) -> PB0_CDEN_R {
PB0_CDEN_R::new(((self.bits >> 21) & 1) != 0)
}
#[doc = "Bit 22 - PB1_CDEN"]
#[inline(always)]
pub fn pb1_cden(&self) -> PB1_CDEN_R {
PB1_CDEN_R::new(((self.bits >> 22) & 1) != 0)
}
#[doc = "Bit 23 - PB2_CDEN"]
#[inline(always)]
pub fn pb2_cden(&self) -> PB2_CDEN_R {
PB2_CDEN_R::new(((self.bits >> 23) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - Cortex-M0+ LOCKUP bit enable bit"]
#[inline(always)]
#[must_use]
pub fn lockup_lock(&mut self) -> LOCKUP_LOCK_W<CFGR2_SPEC, 0> {
LOCKUP_LOCK_W::new(self)
}
#[doc = "Bit 1 - SRAM parity lock bit"]
#[inline(always)]
#[must_use]
pub fn sram_parity_lock(&mut self) -> SRAM_PARITY_LOCK_W<CFGR2_SPEC, 1> {
SRAM_PARITY_LOCK_W::new(self)
}
#[doc = "Bit 2 - PVD lock enable bit"]
#[inline(always)]
#[must_use]
pub fn pvd_lock(&mut self) -> PVD_LOCK_W<CFGR2_SPEC, 2> {
PVD_LOCK_W::new(self)
}
#[doc = "Bit 3 - ECC error lock bit"]
#[inline(always)]
#[must_use]
pub fn ecc_lock(&mut self) -> ECC_LOCK_W<CFGR2_SPEC, 3> {
ECC_LOCK_W::new(self)
}
#[doc = "Bit 8 - SRAM parity error flag"]
#[inline(always)]
#[must_use]
pub fn sram_pef(&mut self) -> SRAM_PEF_W<CFGR2_SPEC, 8> {
SRAM_PEF_W::new(self)
}
#[doc = "Bit 16 - PA1_CDEN"]
#[inline(always)]
#[must_use]
pub fn pa1_cden(&mut self) -> PA1_CDEN_W<CFGR2_SPEC, 16> {
PA1_CDEN_W::new(self)
}
#[doc = "Bit 17 - PA3_CDEN"]
#[inline(always)]
#[must_use]
pub fn pa3_cden(&mut self) -> PA3_CDEN_W<CFGR2_SPEC, 17> {
PA3_CDEN_W::new(self)
}
#[doc = "Bit 18 - PA5_CDEN"]
#[inline(always)]
#[must_use]
pub fn pa5_cden(&mut self) -> PA5_CDEN_W<CFGR2_SPEC, 18> {
PA5_CDEN_W::new(self)
}
#[doc = "Bit 19 - PA6_CDEN"]
#[inline(always)]
#[must_use]
pub fn pa6_cden(&mut self) -> PA6_CDEN_W<CFGR2_SPEC, 19> {
PA6_CDEN_W::new(self)
}
#[doc = "Bit 20 - PA13_CDEN"]
#[inline(always)]
#[must_use]
pub fn pa13_cden(&mut self) -> PA13_CDEN_W<CFGR2_SPEC, 20> {
PA13_CDEN_W::new(self)
}
#[doc = "Bit 21 - PB0_CDEN"]
#[inline(always)]
#[must_use]
pub fn pb0_cden(&mut self) -> PB0_CDEN_W<CFGR2_SPEC, 21> {
PB0_CDEN_W::new(self)
}
#[doc = "Bit 22 - PB1_CDEN"]
#[inline(always)]
#[must_use]
pub fn pb1_cden(&mut self) -> PB1_CDEN_W<CFGR2_SPEC, 22> {
PB1_CDEN_W::new(self)
}
#[doc = "Bit 23 - PB2_CDEN"]
#[inline(always)]
#[must_use]
pub fn pb2_cden(&mut self) -> PB2_CDEN_W<CFGR2_SPEC, 23> {
PB2_CDEN_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "SYSCFG configuration register 1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cfgr2::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cfgr2::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct CFGR2_SPEC;
impl crate::RegisterSpec for CFGR2_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`cfgr2::R`](R) reader structure"]
impl crate::Readable for CFGR2_SPEC {}
#[doc = "`write(|w| ..)` method takes [`cfgr2::W`](W) writer structure"]
impl crate::Writable for CFGR2_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets CFGR2 to value 0"]
impl crate::Resettable for CFGR2_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use crate::math::line_of_sight::line_of_sight;
use crate::types::{Position, Segment};
pub fn reachable_positions<'a>(
start: Position,
obstacles: &'a [Segment],
destination: Position,
) -> impl Iterator<Item=Position> + 'a {
// Take all the segment start & end position and put them into an iterator.
let obstacles_positions = obstacles
.iter()
.flat_map(|segment| {
return vec![segment.start, segment.end].into_iter();
});
let obstacles_positions: Vec<Position> = obstacles_positions.collect();
let obstacles_positions = obstacles_positions.into_iter();
// All segment positions + end position.
let candidates = std::iter::once(destination)
.chain(obstacles_positions);
// Remove all positions that are not reachable.
let candidates = candidates
.filter(move |candidate| {
line_of_sight(start, obstacles, *candidate)
});
return candidates;
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn happy_path() {
let start = Position { x: 0, y: 0 };
let end = Position { x: 10, y: 10 };
let segments = [
Segment {
start: Position { x: 5, y: -1 },
end: Position { x: 5, y: 4 },
}, Segment { // Partly hidden behind the other segment.
start: Position { x: 6, y: 5 },
end: Position { x: 6, y: 0 },
},
];
let got: Vec<Position> = reachable_positions(start, &segments, end).collect();
assert_eq!(got, [
Position { x: 10, y: 10 }, // End
Position { x: 5, y: -1 }, // Segment 1
Position { x: 5, y: 4 }, // Segment 1
Position { x: 6, y: 5 }, // Segment 2 (the other is hidden behind segment 1)
]);
}
#[test]
fn no_reachable_pos() {
let start = Position { x: 5, y: 5 };
let end = Position { x: 100, y: 100 };
let segments = [
Segment {
start: Position { x: -1, y: 0 },
end: Position { x: 11, y: 0 },
},
Segment {
start: Position { x: 10, y: 11 },
end: Position { x: 10, y: -1 },
},
Segment {
start: Position { x: 11, y: 10 },
end: Position { x: -1, y: 10 },
},
Segment {
start: Position { x: 0, y: 11 },
end: Position { x: 0, y: -1 },
},
];
let got: Vec<Position> = reachable_positions(start, &segments, end).collect();
assert_eq!(got, []);
}
}
|
// Copyright (c) 2016, <daggerbot@gmail.com>
// This software is available under the terms of the zlib license.
// See COPYING.md for more information.
use std::sync::Arc;
use x11_dl::xlib;
use error::Result;
// Global library instances.
lazy_static! {
static ref XLIB: Result<Arc<xlib::Xlib>> = match xlib::Xlib::open() {
Ok(xlib) => Ok(Arc::new(xlib)),
Err(err) => Err(err!(LibraryError("xlib"): err)),
};
}
/// Opens or returns a global Xlib instance.
pub fn open_xlib () -> Result<Arc<xlib::Xlib>> {
XLIB.clone()
}
|
//! Puck WebSocket support.
pub mod frame;
pub mod message;
pub mod send;
pub mod upgrade;
pub mod websocket;
pub use upgrade::*;
|
//error-pattern:begin_panic_fmt
fn main() {
assert_eq!(5, 6);
}
|
use rocket::catch;
use rocket_contrib::{json, json::JsonValue};
#[catch(404)]
pub fn index() -> JsonValue {
json!({ "message": "Language not found." })
}
|
use std::{collections::BTreeSet, fmt, hash};
use crate::{
AnyId, AtomId, Context, ExclusivelyContextual, InContext, Atomic, AcesError, AcesErrorKind, sat,
};
/// An identifier of a domain element used in c-e structures.
///
/// In line with the theory, the set of all dots is a shared resource
/// common to all c-e structures. On the other hand, properties of a
/// dot depend on a particular c-e structure, visualization method,
/// etc.
///
/// Therefore, there is no type `Dot` in _aces_. Instead, structural
/// information is stored in [`FusetHolder`] objects and accessed
/// through structural identifiers, [`PortId`], [`LinkId`], [`ForkId`]
/// and [`JoinId`]. Remaining dot-related data is retrieved through
/// `DotId`s from [`Context`] instances (many such instances may
/// coexist in a single process).
///
/// [`PortId`]: crate::PortId
/// [`LinkId`]: crate::LinkId
/// [`ForkId`]: crate::ForkId
/// [`JoinId`]: crate::JoinId
/// [`FusetHolder`]: crate::FusetHolder
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
#[repr(transparent)]
pub struct DotId(pub(crate) AnyId);
impl DotId {
#[inline]
pub const fn get(self) -> AnyId {
self.0
}
}
impl From<AnyId> for DotId {
#[inline]
fn from(id: AnyId) -> Self {
DotId(id)
}
}
impl From<DotId> for AnyId {
#[inline]
fn from(id: DotId) -> Self {
id.0
}
}
impl ExclusivelyContextual for DotId {
fn format_locked(&self, ctx: &Context) -> Result<String, AcesError> {
let name = ctx
.get_dot_name(*self)
.ok_or_else(|| AcesError::from(AcesErrorKind::DotMissingForId(*self)))?;
Ok(name.to_owned())
}
}
impl Atomic for DotId {
fn into_dot_id(this: InContext<Self>) -> Option<DotId> {
Some(*this.get_thing())
}
fn into_sat_literal(self, negated: bool) -> sat::Literal {
sat::Literal::from_atom_id(self.get(), negated)
}
}
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
pub enum Polarity {
Tx,
Rx,
}
impl std::ops::Not for Polarity {
type Output = Polarity;
fn not(self) -> Self::Output {
match self {
Polarity::Tx => Polarity::Rx,
Polarity::Rx => Polarity::Tx,
}
}
}
impl fmt::Display for Polarity {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Polarity::Tx => write!(f, ">"),
Polarity::Rx => write!(f, "<"),
}
}
}
/// An identifier of a [`Dotset`], a type derived from [`AtomId`].
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
#[repr(transparent)]
pub struct DotsetId(pub(crate) AtomId);
impl DotsetId {
#[inline]
pub const fn get(self) -> AtomId {
self.0
}
}
impl From<AtomId> for DotsetId {
#[inline]
fn from(id: AtomId) -> Self {
DotsetId(id)
}
}
impl From<DotsetId> for AtomId {
#[inline]
fn from(id: DotsetId) -> Self {
id.0
}
}
impl ExclusivelyContextual for DotsetId {
fn format_locked(&self, ctx: &Context) -> Result<String, AcesError> {
let dotset = ctx
.get_dotset(*self)
.ok_or_else(|| AcesError::from(AcesErrorKind::DotsetMissingForId(*self)))?;
dotset.format_locked(ctx)
}
}
/// A set of dots: pre-set or post-set of a transition or a pit of a
/// wedge.
///
/// Represented as an ordered and deduplicated `Vec` of `DotId`s.
#[derive(Clone, Eq, Debug)]
pub struct Dotset {
pub(crate) atom_id: Option<AtomId>,
pub(crate) dot_ids: Vec<DotId>,
}
impl Dotset {
/// [`Dotset`] constructor.
///
/// See also [`Dotset::new_unchecked()`].
pub fn new<I>(dot_ids: I) -> Self
where
I: IntoIterator<Item = DotId>,
{
let dot_ids: BTreeSet<_> = dot_ids.into_iter().collect();
if dot_ids.is_empty() {
// FIXME
}
Self::new_unchecked(dot_ids)
}
/// A more efficient variant of [`Dotset::new()`].
///
/// Note: new [`Dotset`] is created under the assumption that
/// `dot_ids` are nonempty and listed in ascending order. If
/// the caller fails to provide an ordered dotset, the library
/// may panic in some other call (the constructor itself panics
/// immediately in debug mode).
pub fn new_unchecked<I>(dot_ids: I) -> Self
where
I: IntoIterator<Item = DotId>,
{
let dot_ids: Vec<_> = dot_ids.into_iter().collect();
trace!("New dotset: {:?}", dot_ids);
if cfg!(debug_assertions) {
let mut niter = dot_ids.iter();
if let Some(nid) = niter.next() {
let mut prev_nid = *nid;
for &nid in niter {
assert!(prev_nid < nid, "Unordered dotset");
prev_nid = nid;
}
} else {
panic!("Empty dotset")
}
}
Dotset { atom_id: None, dot_ids }
}
#[inline]
pub fn get_atom_id(&self) -> AtomId {
self.atom_id.expect("Attempt to access an uninitialized dotset")
}
#[inline]
pub fn get_id(&self) -> Option<DotsetId> {
self.atom_id.map(DotsetId)
}
#[inline]
pub fn get_dot_ids(&self) -> &[DotId] {
self.dot_ids.as_slice()
}
}
impl PartialEq for Dotset {
#[inline]
fn eq(&self, other: &Self) -> bool {
self.dot_ids == other.dot_ids
}
}
impl hash::Hash for Dotset {
#[inline]
fn hash<H: hash::Hasher>(&self, state: &mut H) {
self.dot_ids.hash(state);
}
}
impl ExclusivelyContextual for Dotset {
fn format_locked(&self, ctx: &Context) -> Result<String, AcesError> {
let dot_names: Result<Vec<_>, AcesError> = self
.dot_ids
.iter()
.map(|&dot_id| {
ctx.get_dot_name(dot_id)
.ok_or_else(|| AcesErrorKind::DotMissingForId(dot_id).into())
})
.collect();
Ok(format!("({:?})", dot_names?))
}
}
/// Representation of a port.
///
/// This is an oriented dot, i.e. a dot coupled with a specific
/// [`Polarity`].
#[derive(Clone, Eq, Debug)]
pub struct Port {
pub(crate) atom_id: Option<AtomId>,
pub(crate) polarity: Polarity,
pub(crate) dot_id: DotId,
}
impl Port {
pub(crate) fn new(polarity: Polarity, dot_id: DotId) -> Self {
Self { atom_id: None, polarity, dot_id }
}
pub(crate) fn get_polarity(&self) -> Polarity {
self.polarity
}
pub fn get_atom_id(&self) -> AtomId {
self.atom_id.expect("Attempt to access an uninitialized port")
}
pub fn get_dot_id(&self) -> DotId {
self.dot_id
}
}
impl PartialEq for Port {
fn eq(&self, other: &Self) -> bool {
self.dot_id == other.dot_id
}
}
impl hash::Hash for Port {
fn hash<H: hash::Hasher>(&self, state: &mut H) {
self.polarity.hash(state);
self.dot_id.hash(state);
}
}
impl ExclusivelyContextual for Port {
fn format_locked(&self, ctx: &Context) -> Result<String, AcesError> {
let dot_name = ctx.get_dot_name(self.get_dot_id()).ok_or_else(|| {
AcesError::from(AcesErrorKind::DotMissingForPort(self.get_polarity()))
})?;
Ok(format!("[{} {}]", dot_name, self.get_polarity()))
}
}
|
// This file is part of Darwinia.
//
// Copyright (C) 2018-2021 Darwinia Network
// SPDX-License-Identifier: GPL-3.0
//
// Darwinia is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// Darwinia is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with Darwinia. If not, see <https://www.gnu.org/licenses/>.
// --- std ---
use std::{error::Error, sync::Arc};
// --- crates.io ---
use futures::lock::Mutex;
// --- paritytech ---
use cumulus_client_consensus_aura::{
build_aura_consensus, BuildAuraConsensusParams, BuildVerifierParams, SlotProportion,
};
use cumulus_client_consensus_common::ParachainBlockImport;
use cumulus_client_consensus_relay_chain::{
BuildRelayChainConsensusParams, Verifier as RelayChainVerifier,
};
use cumulus_primitives_core::ParaId;
use cumulus_primitives_parachain_inherent::ParachainInherentData;
use sc_basic_authorship::ProposerFactory;
use sc_client_api::ExecutorProvider;
use sc_consensus::{import_queue::BasicQueue, DefaultImportQueue};
use sc_executor::{NativeElseWasmExecutor, NativeExecutionDispatch, NativeVersion};
use sc_service::{error::Result, Configuration, TFullClient, TaskManager};
use sc_telemetry::TelemetryHandle;
use sp_consensus::{CanAuthorWithNativeVersion, SlotData};
use sp_consensus_aura::sr25519;
// --- darwinia-network ---
use super::*;
use darwinia_collator_primitives::OpaqueBlock as Block;
use darwinia_parachain_runtime::{api, RuntimeApi};
/// Native executor instance.
pub struct RuntimeExecutor;
impl NativeExecutionDispatch for RuntimeExecutor {
type ExtendHostFunctions = ();
fn dispatch(method: &str, data: &[u8]) -> Option<Vec<u8>> {
api::dispatch(method, data)
}
fn native_version() -> NativeVersion {
darwinia_parachain_runtime::native_version()
}
}
/// Build the import queue for the `Darwinia Parachain` runtime.
pub fn build_import_queue(
client: Arc<TFullClient<Block, RuntimeApi, NativeElseWasmExecutor<RuntimeExecutor>>>,
config: &Configuration,
telemetry: Option<TelemetryHandle>,
task_manager: &TaskManager,
) -> Result<
DefaultImportQueue<
Block,
TFullClient<Block, RuntimeApi, NativeElseWasmExecutor<RuntimeExecutor>>,
>,
> {
let client2 = client.clone();
let aura_verifier = move || {
let slot_duration = cumulus_client_consensus_aura::slot_duration(&*client2).unwrap();
Box::new(cumulus_client_consensus_aura::build_verifier::<
sr25519::AuthorityPair,
_,
_,
_,
>(BuildVerifierParams {
client: client2.clone(),
create_inherent_data_providers: move |_, _| async move {
let time = sp_timestamp::InherentDataProvider::from_system_time();
let slot =
sp_consensus_aura::inherents::InherentDataProvider::from_timestamp_and_duration(
*time,
slot_duration.slot_duration(),
);
Ok((time, slot))
},
can_author_with: CanAuthorWithNativeVersion::new(client2.executor().clone()),
telemetry,
})) as Box<_>
};
let relay_chain_verifier = Box::new(RelayChainVerifier::new(client.clone(), |_, _| async {
Ok(())
})) as Box<_>;
let verifier = Verifier {
client: client.clone(),
relay_chain_verifier,
aura_verifier: BuildOnAccess::Uninitialized(Some(Box::new(aura_verifier))),
};
let registry = config.prometheus_registry().clone();
let spawner = task_manager.spawn_essential_handle();
Ok(BasicQueue::new(
verifier,
Box::new(ParachainBlockImport::new(client.clone())),
None,
&spawner,
registry,
))
}
/// Start a `Darwinia Parachain` parachain node.
pub async fn start_node(
parachain_config: Configuration,
polkadot_config: Configuration,
id: ParaId,
) -> Result<(
TaskManager,
Arc<TFullClient<Block, RuntimeApi, NativeElseWasmExecutor<RuntimeExecutor>>>,
)> {
start_node_impl::<RuntimeApi, RuntimeExecutor, _, _, _>(
parachain_config,
polkadot_config,
id,
|_| Ok(Default::default()),
build_import_queue,
|client,
prometheus_registry,
telemetry,
task_manager,
relay_chain_node,
transaction_pool,
sync_oracle,
keystore,
force_authoring| {
let client2 = client.clone();
let relay_chain_backend = relay_chain_node.backend.clone();
let relay_chain_client = relay_chain_node.client.clone();
let spawn_handle = task_manager.spawn_handle();
let transaction_pool2 = transaction_pool.clone();
let telemetry2 = telemetry.clone();
let prometheus_registry2 = prometheus_registry.map(|r| (*r).clone());
let aura_consensus = BuildOnAccess::Uninitialized(Some(Box::new(move || {
let slot_duration =
cumulus_client_consensus_aura::slot_duration(&*client2).unwrap();
let proposer_factory = ProposerFactory::with_proof_recording(
spawn_handle,
client2.clone(),
transaction_pool2,
prometheus_registry2.as_ref(),
telemetry2.clone(),
);
let relay_chain_backend2 = relay_chain_backend.clone();
let relay_chain_client2 = relay_chain_client.clone();
build_aura_consensus::<sr25519::AuthorityPair, _, _, _, _, _, _, _, _, _>(
BuildAuraConsensusParams {
proposer_factory,
create_inherent_data_providers:
move |_, (relay_parent, validation_data)| {
let parachain_inherent =
ParachainInherentData::create_at_with_client(
relay_parent,
&relay_chain_client,
&*relay_chain_backend,
&validation_data,
id,
);
async move {
let time =
sp_timestamp::InherentDataProvider::from_system_time();
let slot =
sp_consensus_aura::inherents::InherentDataProvider::from_timestamp_and_duration(
*time,
slot_duration.slot_duration(),
);
let parachain_inherent =
parachain_inherent.ok_or_else(|| {
Box::<dyn Error + Send + Sync>::from(
"Failed to create parachain inherent",
)
})?;
Ok((time, slot, parachain_inherent))
}
},
block_import: client2.clone(),
relay_chain_client: relay_chain_client2,
relay_chain_backend: relay_chain_backend2,
para_client: client2.clone(),
backoff_authoring_blocks: Option::<()>::None,
sync_oracle,
keystore,
force_authoring,
slot_duration,
// We got around 500ms for proposing
block_proposal_slot_portion: SlotProportion::new(1f32 / 24f32),
// And a maximum of 750ms if slots are skipped
max_block_proposal_slot_portion: Some(SlotProportion::new(1f32 / 16f32)),
telemetry: telemetry2,
},
)
})));
let proposer_factory = ProposerFactory::with_proof_recording(
task_manager.spawn_handle(),
client.clone(),
transaction_pool,
prometheus_registry.clone(),
telemetry.clone(),
);
let relay_chain_backend = relay_chain_node.backend.clone();
let relay_chain_client = relay_chain_node.client.clone();
let relay_chain_consensus =
cumulus_client_consensus_relay_chain::build_relay_chain_consensus(
BuildRelayChainConsensusParams {
para_id: id,
proposer_factory,
block_import: client.clone(),
relay_chain_client: relay_chain_node.client.clone(),
relay_chain_backend: relay_chain_node.backend.clone(),
create_inherent_data_providers:
move |_, (relay_parent, validation_data)| {
let parachain_inherent =
ParachainInherentData::create_at_with_client(
relay_parent,
&relay_chain_client,
&*relay_chain_backend,
&validation_data,
id,
);
async move {
let parachain_inherent =
parachain_inherent.ok_or_else(|| {
Box::<dyn Error + Send + Sync>::from(
"Failed to create parachain inherent",
)
})?;
Ok(parachain_inherent)
}
},
},
);
let parachain_consensus = Box::new(WaitForAuraConsensus {
client: client.clone(),
aura_consensus: Arc::new(Mutex::new(aura_consensus)),
relay_chain_consensus: Arc::new(Mutex::new(relay_chain_consensus)),
});
Ok(parachain_consensus)
},
)
.await
}
|
use crate::{BaseInterface, EthernetInterface};
pub(crate) fn np_ethernet_to_nmstate(
_np_iface: nispor::Iface,
base_iface: BaseInterface,
) -> EthernetInterface {
EthernetInterface {
base: base_iface,
..Default::default()
}
}
|
use std::{
collections::{HashMap, HashSet},
iter::FromIterator,
};
use quote::ToTokens;
use syn::{
braced, bracketed,
parse::{Parse, ParseStream},
punctuated::Punctuated,
token::{self, Brace},
ExprLit, Ident, Lit, LitInt, LitStr, Result, Token,
};
use tabled::{
builder::Builder,
settings::{style::BorderSpanCorrection, Alignment, Margin, Modify, Padding, Span, Style},
Table,
};
struct MatrixRow {
#[allow(dead_code)]
bracket_token: token::Bracket,
elems: MatrixRowElements,
}
enum MatrixRowElements {
List(Punctuated<ExprVal, Token![,]>),
Static {
elem: ExprVal,
#[allow(dead_code)]
semi_token: Token![;],
len: LitInt,
},
}
impl Parse for MatrixRow {
fn parse(input: ParseStream<'_>) -> Result<Self> {
let content;
let bracket_token = bracketed!(content in input);
if content.peek2(Token![;]) {
return Ok(Self {
bracket_token,
elems: MatrixRowElements::Static {
elem: content.parse()?,
semi_token: content.parse()?,
len: content.parse()?,
},
});
}
let mut elems = Punctuated::new();
while !content.is_empty() {
let val = content.parse()?;
elems.push_value(val);
if content.is_empty() {
break;
}
let punct = content.parse()?;
elems.push_punct(punct);
}
Ok(Self {
bracket_token,
elems: MatrixRowElements::List(elems),
})
}
}
enum ExprVal {
Lit(ExprLit),
Scope {
#[allow(dead_code)]
brace_token: token::Brace,
expr: Option<ScopeVal>,
},
}
enum ScopeVal {
Expr(ExprLit),
List(Punctuated<ExprLit, Token![,]>),
Sized {
elem: ExprLit,
#[allow(dead_code)]
semi_token: Token![;],
len: LitInt,
},
}
impl Parse for ExprVal {
fn parse(input: ParseStream<'_>) -> Result<Self> {
if input.peek(Brace) {
let content;
let brace_token = braced!(content in input);
if content.is_empty() {
return Ok(ExprVal::Scope {
brace_token,
expr: None,
});
}
if content.peek2(Token![;]) {
return Ok(ExprVal::Scope {
brace_token,
expr: Some(ScopeVal::Sized {
elem: content.parse()?,
semi_token: content.parse()?,
len: content.parse()?,
}),
});
}
let elem: ExprLit = content.parse()?;
if content.is_empty() {
return Ok(ExprVal::Scope {
brace_token,
expr: Some(ScopeVal::Expr(elem)),
});
}
let mut elems = Punctuated::new();
elems.push(elem);
while !content.is_empty() {
let punct: Token![,] = content.parse()?;
elems.push_punct(punct);
if content.is_empty() {
// trailing comma
break;
}
let val: ExprLit = content.parse()?;
elems.push_value(val);
}
return Ok(ExprVal::Scope {
brace_token,
expr: Some(ScopeVal::List(elems)),
});
}
Ok(Self::Lit(input.parse()?))
}
}
struct MatrixInput {
#[allow(dead_code)]
bracket_token: token::Bracket,
data: MatrixData,
}
enum MatrixData {
List(Punctuated<MatrixRow, Token![,]>),
Static {
elem: MatrixRow,
#[allow(dead_code)]
semi_token: Token![;],
len: LitInt,
},
}
impl Parse for MatrixInput {
fn parse(input: ParseStream<'_>) -> Result<Self> {
let content;
let bracket_token = bracketed!(content in input);
if content.is_empty() {
return Ok(Self {
bracket_token,
data: MatrixData::List(Punctuated::new()),
});
}
let elem = content.parse()?;
if content.peek(Token![;]) {
return Ok(MatrixInput {
bracket_token,
data: MatrixData::Static {
elem,
semi_token: content.parse()?,
len: content.parse()?,
},
});
}
let mut elems = Punctuated::new();
elems.push(elem);
while !content.is_empty() {
let punct: Token![,] = content.parse()?;
elems.push_punct(punct);
if content.is_empty() {
// trailing comma
break;
}
let val = content.parse()?;
elems.push_value(val);
}
Ok(MatrixInput {
bracket_token,
data: MatrixData::List(elems),
})
}
}
struct KeyValue<V> {
key: Ident,
#[allow(dead_code)]
token: Token!(=),
value: V,
}
impl<V: Parse> Parse for KeyValue<V> {
fn parse(input: ParseStream<'_>) -> Result<Self> {
Ok(Self {
key: input.parse()?,
token: input.parse()?,
value: input.parse()?,
})
}
}
pub(crate) struct TableStruct {
matrix: MatrixInput,
comma_token: Option<Token![,]>,
settings: Punctuated<KeyValue<LitStr>, Token!(,)>,
}
impl Parse for TableStruct {
fn parse(input: ParseStream<'_>) -> Result<Self> {
let matrix = input.parse()?;
let mut comma_token = None;
let mut settings = Punctuated::new();
if input.peek(Token![,]) {
comma_token = Some(input.parse()?);
while !input.is_empty() {
let val = input.parse()?;
settings.push_value(val);
if input.is_empty() {
break;
}
let punct = input.parse()?;
settings.push_punct(punct);
}
}
Ok(Self {
matrix,
comma_token,
settings,
})
}
}
struct Pad<T> {
left: T,
#[allow(dead_code)]
comma1_tk: Token!(,),
right: T,
#[allow(dead_code)]
comma2_tk: Token!(,),
top: T,
#[allow(dead_code)]
comma3_tk: Token!(,),
bottom: T,
}
impl<T: Parse> Parse for Pad<T> {
fn parse(input: ParseStream<'_>) -> Result<Self> {
Ok(Self {
left: input.parse()?,
comma1_tk: input.parse()?,
right: input.parse()?,
comma2_tk: input.parse()?,
top: input.parse()?,
comma3_tk: input.parse()?,
bottom: input.parse()?,
})
}
}
fn expr_lit_to_string(expr_lit: &ExprLit) -> String {
match &expr_lit.lit {
Lit::Str(val) => val.value(),
Lit::ByteStr(val) => format!("{:?}", val.value()),
Lit::Int(val) => val.base10_digits().to_string(),
Lit::Float(val) => val.base10_digits().to_string(),
Lit::Char(val) => val.value().to_string(),
Lit::Byte(val) => val.value().to_string(),
Lit::Bool(val) => val.value().to_string(),
Lit::Verbatim(val) => val.to_token_stream().to_string(),
}
}
fn expr_val_to_list(expr_val: &ExprVal) -> Result<Vec<String>> {
match expr_val {
ExprVal::Lit(lit) => Ok(vec![expr_lit_to_string(lit)]),
ExprVal::Scope { expr, .. } => match expr {
Some(val) => match val {
ScopeVal::Expr(lit) => Ok(vec![expr_lit_to_string(lit)]),
ScopeVal::List(list) => Ok(list.into_iter().map(expr_lit_to_string).collect()),
ScopeVal::Sized { elem, len, .. } => {
let len = len.base10_parse::<usize>()?;
let mut data = vec![String::new(); len];
if len > 0 {
data[0] = expr_lit_to_string(elem);
}
Ok(data)
}
},
None => Ok(vec![String::new()]),
},
}
}
fn collect_matrix(matrix: &MatrixInput) -> Result<Vec<Vec<String>>> {
match &matrix.data {
MatrixData::List(list) => {
let mut data = vec![];
for row in list {
let row = collect_row(&row.elems)?;
data.push(row)
}
Ok(data)
}
MatrixData::Static { elem, len, .. } => {
let data = collect_row(&elem.elems)?;
let len = len.base10_parse::<usize>()?;
Ok(vec![data; len])
}
}
}
fn collect_row(elems: &MatrixRowElements) -> Result<Vec<String>> {
let mut row = vec![];
match elems {
MatrixRowElements::List(list) => {
for val in list {
let vals = expr_val_to_list(val)?;
row.extend(vals);
}
}
MatrixRowElements::Static { elem, len, .. } => {
let len = len.base10_parse::<usize>()?;
let elem = expr_val_to_list(elem)?;
let iter = std::iter::repeat(elem).take(len).flatten();
row.extend(iter);
}
}
Ok(row)
}
fn collect_vspan(matrix: &MatrixInput) -> Result<HashMap<(usize, usize), usize>> {
let mut spans = HashMap::new();
match &matrix.data {
MatrixData::List(list) => {
for (row, e) in list.iter().enumerate() {
match &e.elems {
MatrixRowElements::List(list) => {
let mut i = 0;
for e in list {
match e {
ExprVal::Lit(_) => i += 1,
ExprVal::Scope { expr, .. } => match expr {
Some(val) => match val {
ScopeVal::Expr(_) => i += 1,
ScopeVal::List(list) => i += list.len(),
ScopeVal::Sized { len, .. } => {
let len = len.base10_parse::<usize>()?;
if len > 0 {
spans.insert((row, i), len);
i += len;
}
}
},
None => i += 1,
},
}
}
}
MatrixRowElements::Static { elem, len, .. } => {
let arr_len = len.base10_parse::<usize>()?;
match elem {
ExprVal::Lit(_) => {}
ExprVal::Scope { expr, .. } => match expr {
Some(val) => match val {
ScopeVal::Expr(_) => {}
ScopeVal::List(_) => {}
ScopeVal::Sized { len, .. } => {
let len = len.base10_parse::<usize>()?;
if len > 0 {
let iter = (0..arr_len).map(|i| ((row, i * len), len));
spans.extend(iter);
}
}
},
None => {}
},
}
}
}
}
}
MatrixData::Static { elem, len, .. } => {
let count_rows = len.base10_parse::<usize>()?;
match &elem.elems {
MatrixRowElements::List(list) => {
let mut i = 0;
for e in list {
match e {
ExprVal::Lit(_) => i += 1,
ExprVal::Scope { expr, .. } => match expr {
Some(val) => match val {
ScopeVal::Expr(_) => i += 1,
ScopeVal::List(list) => i += list.len(),
ScopeVal::Sized { len, .. } => {
let len = len.base10_parse::<usize>()?;
if len > 0 {
spans
.extend((0..count_rows).map(|row| ((row, i), len)));
i += len;
}
}
},
None => i += 1,
},
}
}
}
MatrixRowElements::Static { .. } => {}
}
}
}
Ok(spans)
}
fn collect_hspan(matrix: &MatrixInput) -> Result<HashMap<(usize, usize), usize>> {
let mut filled = HashSet::new();
let mut empties = HashSet::new();
match &matrix.data {
MatrixData::List(list) => {
for (row, e) in list.iter().enumerate() {
match &e.elems {
MatrixRowElements::List(list) => {
let mut col = 0;
for e in list {
match e {
ExprVal::Lit(_) => col += 1,
ExprVal::Scope { expr, .. } => match expr {
Some(val) => match val {
ScopeVal::List(list) => col += list.len(),
ScopeVal::Expr(_) => {
filled.insert((row, col));
col += 1;
}
ScopeVal::Sized { len, .. } => {
filled.insert((row, col));
let len = len.base10_parse::<usize>()?;
col += len;
}
},
None => {
empties.insert((row, col));
col += 1;
}
},
}
}
}
MatrixRowElements::Static { elem, len, .. } => {
let arr_len = len.base10_parse::<usize>()?;
match elem {
ExprVal::Lit(_) => {}
ExprVal::Scope { expr, .. } => match expr {
Some(val) => match val {
ScopeVal::List(_) => {}
ScopeVal::Expr(_) => {
filled.extend((0..arr_len).map(|col| (row, col)));
}
ScopeVal::Sized { len, .. } => {
let len = len.base10_parse::<usize>()?;
filled.extend((0..arr_len).map(|col| (row, col * len)));
}
},
None => {
empties.extend((0..arr_len).map(|col| (row, col)));
}
},
}
}
}
}
}
MatrixData::Static { .. } => {}
}
let mut spans = HashMap::new();
for (row, col) in filled {
let mut size = 0;
for row in row + 1.. {
if empties.contains(&(row, col)) {
size += 1;
} else {
break;
}
}
if size > 0 {
spans.insert((row, col), size + 1);
}
}
Ok(spans)
}
// todo: export the constants from crate so they could be highlighted by language servers.
// Yet this is unstable to do.
fn is_supported_theme(name: &str) -> bool {
matches!(
name,
"EMPTY"
| "BLANK"
| "ASCII"
| "ASCII_ROUNDED"
| "DOTS"
| "MODERN"
| "SHARP"
| "ROUNDED"
| "EXTENDED"
| "RE_STRUCTURED_TEXT"
| "MARKDOWN"
| "PSQL"
)
}
fn apply_theme(table: &mut Table, name: &str) {
match name {
"EMPTY" => table.with(Style::empty()),
"BLANK" => table.with(Style::blank()),
"ASCII" => table.with(Style::ascii()),
"ASCII_ROUNDED" => table.with(Style::ascii_rounded()),
"DOTS" => table.with(Style::dots()),
"MODERN" => table.with(Style::modern()),
"SHARP" => table.with(Style::sharp()),
"ROUNDED" => table.with(Style::rounded()),
"EXTENDED" => table.with(Style::extended()),
"RE_STRUCTURED_TEXT" => table.with(Style::re_structured_text()),
"MARKDOWN" => table.with(Style::markdown()),
"PSQL" => table.with(Style::psql()),
_ => unreachable!(),
};
}
fn build_padding(pad: Pad<LitInt>) -> syn::Result<Padding> {
let left = pad.left.base10_parse::<usize>()?;
let right = pad.right.base10_parse::<usize>()?;
let top = pad.top.base10_parse::<usize>()?;
let bottom = pad.bottom.base10_parse::<usize>()?;
Ok(Padding::new(left, right, top, bottom))
}
fn build_margin(pad: Pad<LitInt>) -> syn::Result<Margin> {
let left = pad.left.base10_parse::<usize>()?;
let right = pad.right.base10_parse::<usize>()?;
let top = pad.top.base10_parse::<usize>()?;
let bottom = pad.bottom.base10_parse::<usize>()?;
Ok(Margin::new(left, right, top, bottom))
}
fn panic_not_supported_theme(ident: &LitStr) {
proc_macro_error::abort!(
ident,
"The given settings is not supported";
note="custom themes are yet not supported";
help = r#"Supported themes are [EMPTY, BLANK, ASCII, ASCII_ROUNDED, DOTS, MODERN, SHARP, ROUNDED, EXTENDED, RE_STRUCTURED_TEXT, MARKDOWN, PSQL]"#
)
}
fn panic_not_supported_alignment(ident: &LitStr) {
proc_macro_error::abort!(
ident,
"The given settings is not supported";
help = r#"Supported alignment are [LEFT, RIGHT, CENTER, CENTER_VERTICAL, TOP, BOTTOM]"#
)
}
#[allow(dead_code)]
fn panic_not_supported_bool(ident: &LitStr) {
proc_macro_error::abort!(
ident,
"Unexpected bool value";
help = r#"Expected to get [TRUE, FALSE]"#
)
}
fn panic_not_supported_settings(ident: &Ident) {
proc_macro_error::abort!(
ident,
"The given settings is not supported";
help = r#"Supported list is [THEME, PADDING, MARGIN]"#
)
}
pub(crate) fn build_table(table_st: &TableStruct) -> Result<String> {
let mut table = create_table(&table_st.matrix)?;
if table_st.comma_token.is_some() {
apply_settings(&mut table, &table_st.settings)?;
}
let has_spans = table.get_config().has_column_spans() || table.get_config().has_row_spans();
if has_spans {
table.with(BorderSpanCorrection);
}
Ok(table.to_string())
}
fn apply_settings(
table: &mut Table,
settings: &Punctuated<KeyValue<LitStr>, Token![,]>,
) -> Result<()> {
for kv in settings {
config_table(table, kv)?;
}
Ok(())
}
fn is_supported_alignment(name: &str) -> bool {
matches!(
name,
"LEFT" | "RIGHT" | "CENTER" | "CENTER_VERTICAL" | "TOP" | "BOTTOM"
)
}
fn apply_alignment(table: &mut Table, name: &str) {
match name {
"LEFT" => table.with(Alignment::left()),
"RIGHT" => table.with(Alignment::right()),
"CENTER" => table.with(Alignment::center()),
"CENTER_VERTICAL" => table.with(Alignment::center_vertical()),
"TOP" => table.with(Alignment::top()),
"BOTTOM" => table.with(Alignment::bottom()),
_ => unreachable!(),
};
}
fn config_table(table: &mut Table, kv: &KeyValue<LitStr>) -> Result<()> {
if kv.key == "THEME" {
let theme = kv.value.value();
if !is_supported_theme(&theme) {
panic_not_supported_theme(&kv.value);
}
apply_theme(table, &theme);
} else if kv.key == "PADDING" {
let padding = kv.value.parse().and_then(build_padding)?;
table.with(padding);
} else if kv.key == "MARGIN" {
let margin = kv.value.parse().and_then(build_margin)?;
table.with(margin);
} else if kv.key == "ALIGNMENT" {
let alignment = kv.value.value();
if !is_supported_alignment(&alignment) {
panic_not_supported_alignment(&kv.value);
}
apply_alignment(table, &alignment);
} else {
panic_not_supported_settings(&kv.key);
}
Ok(())
}
fn create_table(mat: &MatrixInput) -> Result<Table> {
let data = collect_matrix(mat)?;
let vspan = collect_vspan(mat)?;
let hspan = collect_hspan(mat)?;
let builder = Builder::from_iter(data);
let mut table = builder.build();
for (pos, span) in vspan {
table.with(Modify::new(pos).with(Span::column(span)));
}
for (pos, span) in hspan {
table.with(Modify::new(pos).with(Span::row(span)));
}
Ok(table)
}
|
use std::collections::HashMap;
use apllodb_shared_components::Expression;
use apllodb_storage_engine_interface::{ColumnName, TableName};
use crate::{
condition::Condition,
sql_processor::query::query_plan::query_plan_tree::query_plan_node::node_id::QueryPlanNodeId,
};
#[derive(Clone, PartialEq, Debug)]
/// Root node of modification plan tree.
pub(crate) enum ModificationPlanNode {
Insert(InsertNode),
Update(UpdateNode),
}
#[derive(Clone, PartialEq, Debug)]
pub(crate) struct InsertNode {
pub(crate) table_name: TableName,
/// Records to insert are sometimes passed in SQL;
///
/// ```sql
/// INSERT INTO t (id, c) VALUES (1, "xyz"), (2, "abc");
/// ```
///
/// and other times fetched from tables.
///
/// ```sql
/// INSERT INTO t (id, c) SELECT c_id, d FROM s;
/// ```
pub(crate) child: QueryPlanNodeId,
}
#[derive(Clone, PartialEq, Debug)]
pub(crate) struct UpdateNode {
pub(crate) table_name: TableName,
pub(crate) column_values: HashMap<ColumnName, Expression>,
pub(crate) where_condition: Option<Condition>,
}
|
#[doc = "Register `WRP2BR` reader"]
pub type R = crate::R<WRP2BR_SPEC>;
#[doc = "Register `WRP2BR` writer"]
pub type W = crate::W<WRP2BR_SPEC>;
#[doc = "Field `WRP2B_STRT` reader - Bank 2 WRP second area B start offset"]
pub type WRP2B_STRT_R = crate::FieldReader;
#[doc = "Field `WRP2B_STRT` writer - Bank 2 WRP second area B start offset"]
pub type WRP2B_STRT_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 8, O>;
#[doc = "Field `WRP2B_END` reader - Bank 2 WRP second area B end offset"]
pub type WRP2B_END_R = crate::FieldReader;
#[doc = "Field `WRP2B_END` writer - Bank 2 WRP second area B end offset"]
pub type WRP2B_END_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 8, O>;
impl R {
#[doc = "Bits 0:7 - Bank 2 WRP second area B start offset"]
#[inline(always)]
pub fn wrp2b_strt(&self) -> WRP2B_STRT_R {
WRP2B_STRT_R::new((self.bits & 0xff) as u8)
}
#[doc = "Bits 16:23 - Bank 2 WRP second area B end offset"]
#[inline(always)]
pub fn wrp2b_end(&self) -> WRP2B_END_R {
WRP2B_END_R::new(((self.bits >> 16) & 0xff) as u8)
}
}
impl W {
#[doc = "Bits 0:7 - Bank 2 WRP second area B start offset"]
#[inline(always)]
#[must_use]
pub fn wrp2b_strt(&mut self) -> WRP2B_STRT_W<WRP2BR_SPEC, 0> {
WRP2B_STRT_W::new(self)
}
#[doc = "Bits 16:23 - Bank 2 WRP second area B end offset"]
#[inline(always)]
#[must_use]
pub fn wrp2b_end(&mut self) -> WRP2B_END_W<WRP2BR_SPEC, 16> {
WRP2B_END_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "Flash Bank 2 WRP area B address register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`wrp2br::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`wrp2br::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct WRP2BR_SPEC;
impl crate::RegisterSpec for WRP2BR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`wrp2br::R`](R) reader structure"]
impl crate::Readable for WRP2BR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`wrp2br::W`](W) writer structure"]
impl crate::Writable for WRP2BR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets WRP2BR to value 0xff00_ff00"]
impl crate::Resettable for WRP2BR_SPEC {
const RESET_VALUE: Self::Ux = 0xff00_ff00;
}
|
use crate::physics::{physics_body::PhysicsBody, physics_collider::PhysicsCollider};
use ptgui::prelude::*;
use raylib::prelude::*;
use serde::Deserialize;
#[derive(PartialEq, Copy, Clone, Deserialize, Debug)]
pub struct Entity {
position: Point,
size: Dimensions,
}
impl Entity {
pub fn new(position: Point, size: Dimensions) -> Self {
Entity { position, size }
}
}
impl Drawable for Entity {
fn draw(&mut self, draw_handler: &mut RaylibDrawHandle) {
draw_handler.draw_rectangle(
self.position.0,
self.position.1,
self.size.0,
self.size.1,
Color::RED,
);
}
}
impl PhysicsCollider for Entity {
fn get_pos(&self) -> Point {
self.position
}
fn get_size(&self) -> Dimensions {
self.size
}
}
impl PhysicsBody for Entity {
fn try_fall<T: PhysicsCollider>(&mut self, others: &[T]) {
let fall_speed = 10; //0;
self.try_move((0, fall_speed), others);
}
fn try_move<T: PhysicsCollider>(&mut self, deltas: Point, others: &[T]) {
self.move_pos(deltas);
let res = others.iter().map(|other| other.is_colliding(*self));
let mut can_move = true;
res.for_each(|result| {
if result {
can_move = false;
}
});
if !can_move {
self.move_pos((-deltas.0, -deltas.1));
}
}
fn move_pos(&mut self, deltas: Point) {
self.position.0 += deltas.0;
self.position.1 += deltas.1;
}
fn set_pos(&mut self, position: Point) {
self.position = position;
}
}
|
// This file is part of rdma-core. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/rdma-core/master/COPYRIGHT. No part of rdma-core, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
// Copyright © 2016 The developers of rdma-core. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/rdma-core/master/COPYRIGHT.
#[repr(C)]
#[derive(Debug, Copy)]
pub struct mxm_context_opts
{
pub async_mode: mxm_async_mode_t,
pub mem: mxm_context_opts__bindgen_ty_1,
pub init_hook: *mut c_char,
pub is_thread_single: c_int,
pub shm_kcopy_mode: mxm_shm_kcopy_mode_t,
pub ib: mxm_context_opts__bindgen_ty_2,
}
|
use cosmwasm_std::{Decimal, StdError};
use thiserror::Error;
#[derive(Error, Debug)]
pub enum ContractError {
#[error("{0}")]
Std(#[from] StdError),
#[error("Unauthorized")]
Unauthorized {},
// Add any other custom errors you like here.
// Look at https://docs.rs/thiserror/1.0.21/thiserror/ for details.
#[error("Min 2 users are required")]
MinUsers {},
#[error("Duplicate users")]
DuplicateUsers {},
#[error("Empty Balance")]
EmptyBalance {},
#[error("Invalid math calc")]
MathCalc {},
#[error("Invalid total {total} percentage")]
InvalidPercentage { total: Decimal },
}
|
//! Tests auto-converted from "sass-spec/spec/core_functions/string"
#[allow(unused)]
use super::rsass;
// From "sass-spec/spec/core_functions/string/index.hrx"
mod index {
#[allow(unused)]
use super::rsass;
#[test]
fn beginning() {
assert_eq!(
rsass(
"a {b: str-index(\"cde\", \"c\")}\
\n"
)
.unwrap(),
"a {\
\n b: 1;\
\n}\
\n"
);
}
#[test]
fn both_empty() {
assert_eq!(
rsass(
"a {b: str-index(\"\", \"\")}\
\n"
)
.unwrap(),
"a {\
\n b: 1;\
\n}\
\n"
);
}
#[test]
fn combining_character() {
assert_eq!(
rsass(
"// Sass does *not* treat strings as sequences of glyphs, so this string which\
\n// contains \"c\" followed by a combining umlaut should be considered two separate\
\n// characters even though it\'s rendered as only one.\
\na {b: str-index(\"c\\0308 a\", \"a\")}\
\n"
)
.unwrap(),
"a {\
\n b: 3;\
\n}\
\n"
);
}
#[test]
fn double_width_character() {
assert_eq!(
rsass(
"// Sass treats strings as sequences of Unicode codepoint; it doesn\'t care if a\
\n// character is represented as two UTF-16 code units.\
\na {b: str-index(\"👭a\", \"a\")}\
\n"
)
.unwrap(),
"a {\
\n b: 2;\
\n}\
\n"
);
}
#[test]
fn empty_substring() {
assert_eq!(
rsass(
"a {b: str-index(\"cde\", \"\")}\
\n"
)
.unwrap(),
"a {\
\n b: 1;\
\n}\
\n"
);
}
#[test]
fn end() {
assert_eq!(
rsass(
"a {b: str-index(\"cde\", \"e\")}\
\n"
)
.unwrap(),
"a {\
\n b: 3;\
\n}\
\n"
);
}
mod error {
#[allow(unused)]
use super::rsass;
// Ignoring "too_few_args", error tests are not supported yet.
// Ignoring "too_many_args", error tests are not supported yet.
mod test_type {
#[allow(unused)]
use super::rsass;
// Ignoring "string", error tests are not supported yet.
// Ignoring "substring", error tests are not supported yet.
}
}
#[test]
fn middle() {
assert_eq!(
rsass(
"a {b: str-index(\"cde\", \"d\")}\
\n"
)
.unwrap(),
"a {\
\n b: 2;\
\n}\
\n"
);
}
#[test]
fn named() {
assert_eq!(
rsass(
"a {b: str-index($string: \"cde\", $substring: \"c\")}\
\n"
)
.unwrap(),
"a {\
\n b: 1;\
\n}\
\n"
);
}
#[test]
fn not_found() {
assert_eq!(
rsass(
"a {b: inspect(str-index(\"cde\", \"f\"))}\
\n"
)
.unwrap(),
"a {\
\n b: null;\
\n}\
\n"
);
}
}
// From "sass-spec/spec/core_functions/string/insert.hrx"
mod insert {
#[allow(unused)]
use super::rsass;
#[test]
fn combining_character() {
assert_eq!(
rsass(
"// Sass does *not* treat strings as sequences of glyphs, so this string which\
\n// contains \"c\" followed by a combining umlaut should be considered two separate\
\n// characters even though it\'s rendered as only one and the \"d\" should be\
\n// injected between the two.\
\na {b: str-insert(\"c\\0308\", \"d\", 2)}\
\n"
)
.unwrap(),
"@charset \"UTF-8\";\
\na {\
\n b: \"cd\u{308}\";\
\n}\
\n"
);
}
#[test]
fn double_width_character() {
assert_eq!(
rsass(
"// Sass treats strings as sequences of Unicode codepoint; it doesn\'t care if a\
\n// character is represented as two UTF-16 code units, so inserting a character\
\n// at index 2 shouldn\'t break this emoji in two.\
\na {b: str-insert(\"👭\", \"c\", 2)}\
\n"
)
.unwrap(),
"@charset \"UTF-8\";\
\na {\
\n b: \"👭c\";\
\n}\
\n"
);
}
mod empty_destination {
#[allow(unused)]
use super::rsass;
#[test]
fn empty_source() {
assert_eq!(
rsass(
"a {b: str-insert(\"\", \"\", 1)}\
\n"
)
.unwrap(),
"a {\
\n b: \"\";\
\n}\
\n"
);
}
#[test]
fn index_0() {
assert_eq!(
rsass(
"a {b: str-insert(\"\", \"c\", 0)}\
\n"
)
.unwrap(),
"a {\
\n b: \"c\";\
\n}\
\n"
);
}
#[test]
fn index_1() {
assert_eq!(
rsass(
"a {b: str-insert(\"\", \"c\", 1)}\
\n"
)
.unwrap(),
"a {\
\n b: \"c\";\
\n}\
\n"
);
}
#[test]
fn index_2() {
assert_eq!(
rsass(
"a {b: str-insert(\"\", \"c\", 2)}\
\n"
)
.unwrap(),
"a {\
\n b: \"c\";\
\n}\
\n"
);
}
#[test]
fn index_negative_1() {
assert_eq!(
rsass(
"a {b: str-insert(\"\", \"c\", -1)}\
\n"
)
.unwrap(),
"a {\
\n b: \"c\";\
\n}\
\n"
);
}
}
#[test]
fn empty_insertion() {
assert_eq!(
rsass(
"a {b: str-insert(\"cde\", \"\", 1)}\
\n"
)
.unwrap(),
"a {\
\n b: \"cde\";\
\n}\
\n"
);
}
mod error {
#[allow(unused)]
use super::rsass;
// Ignoring "decimal", error tests are not supported yet.
// Ignoring "too_few_args", error tests are not supported yet.
// Ignoring "too_many_args", error tests are not supported yet.
mod test_type {
#[allow(unused)]
use super::rsass;
// Ignoring "index", error tests are not supported yet.
// Ignoring "insert", error tests are not supported yet.
// Ignoring "string", error tests are not supported yet.
}
}
mod index {
#[allow(unused)]
use super::rsass;
mod negative {
#[allow(unused)]
use super::rsass;
#[test]
fn t1() {
assert_eq!(
rsass(
"a {b: str-insert(\"cde\", \"f\", -1)}\
\n"
)
.unwrap(),
"a {\
\n b: \"cdef\";\
\n}\
\n"
);
}
#[test]
fn t2() {
assert_eq!(
rsass(
"a {b: str-insert(\"cde\", \"f\", -2)}\
\n"
)
.unwrap(),
"a {\
\n b: \"cdfe\";\
\n}\
\n"
);
}
#[test]
fn after_last() {
assert_eq!(
rsass(
"a {b: str-insert(\"cde\", \"f\", -100)}\
\n"
)
.unwrap(),
"a {\
\n b: \"fcde\";\
\n}\
\n"
);
}
#[test]
fn last() {
assert_eq!(
rsass(
"a {b: str-insert(\"cde\", \"f\", -4)}\
\n"
)
.unwrap(),
"a {\
\n b: \"fcde\";\
\n}\
\n"
);
}
}
mod positive {
#[allow(unused)]
use super::rsass;
#[test]
fn t0() {
assert_eq!(
rsass(
"a {b: str-insert(\"cde\", \"f\", 0)}\
\n"
)
.unwrap(),
"a {\
\n b: \"fcde\";\
\n}\
\n"
);
}
#[test]
fn t1() {
assert_eq!(
rsass(
"a {b: str-insert(\"cde\", \"f\", 1)}\
\n"
)
.unwrap(),
"a {\
\n b: \"fcde\";\
\n}\
\n"
);
}
#[test]
fn t2() {
assert_eq!(
rsass(
"a {b: str-insert(\"cde\", \"f\", 2)}\
\n"
)
.unwrap(),
"a {\
\n b: \"cfde\";\
\n}\
\n"
);
}
#[test]
fn after_last() {
assert_eq!(
rsass(
"a {b: str-insert(\"cde\", \"f\", 100)}\
\n"
)
.unwrap(),
"a {\
\n b: \"cdef\";\
\n}\
\n"
);
}
#[test]
fn last() {
assert_eq!(
rsass(
"a {b: str-insert(\"cde\", \"f\", 4)}\
\n"
)
.unwrap(),
"a {\
\n b: \"cdef\";\
\n}\
\n"
);
}
}
}
#[test]
fn named() {
assert_eq!(
rsass(
"a {b: str-insert($string: \"cde\", $insert: \"f\", $index: 2)}\
\n"
)
.unwrap(),
"a {\
\n b: \"cfde\";\
\n}\
\n"
);
}
}
// From "sass-spec/spec/core_functions/string/length.hrx"
mod length {
#[allow(unused)]
use super::rsass;
#[test]
fn combining_character() {
assert_eq!(
rsass(
"// Sass does *not* treat strings as sequences of glyphs, so this string which\
\n// contains \"c\" followed by a combining umlaut should be considered two separate\
\n// characters even though it\'s rendered as only one.\
\na {b: str-length(\"c\\0308\")}\
\n"
)
.unwrap(),
"a {\
\n b: 2;\
\n}\
\n"
);
}
#[test]
fn double_width_character() {
assert_eq!(
rsass(
"// Sass treats strings as sequences of Unicode codepoint; it doesn\'t care if a\
\n// character is represented as two UTF-16 code units.\
\na {b: str-length(\"👭\")}\
\n"
)
.unwrap(),
"a {\
\n b: 1;\
\n}\
\n"
);
}
#[test]
fn empty() {
assert_eq!(
rsass(
"a {b: str-length(\"\")}\
\n"
)
.unwrap(),
"a {\
\n b: 0;\
\n}\
\n"
);
}
mod error {
#[allow(unused)]
use super::rsass;
// Ignoring "too_few_args", error tests are not supported yet.
// Ignoring "too_many_args", error tests are not supported yet.
// Ignoring "test_type", error tests are not supported yet.
}
#[test]
fn multiple_characters() {
assert_eq!(
rsass(
"a {b: str-length(\"fblthp abatement\")}\
\n"
)
.unwrap(),
"a {\
\n b: 16;\
\n}\
\n"
);
}
#[test]
fn named() {
assert_eq!(
rsass(
"a {b: str-length($string: \"c\")}\
\n"
)
.unwrap(),
"a {\
\n b: 1;\
\n}\
\n"
);
}
#[test]
fn one_character() {
assert_eq!(
rsass(
"a {b: str-length(\"c\")}\
\n"
)
.unwrap(),
"a {\
\n b: 1;\
\n}\
\n"
);
}
#[test]
fn unquoted() {
assert_eq!(
rsass(
"a {b: str-length(loofamonster)}\
\n"
)
.unwrap(),
"a {\
\n b: 12;\
\n}\
\n"
);
}
}
// From "sass-spec/spec/core_functions/string/quote.hrx"
mod quote {
#[allow(unused)]
use super::rsass;
mod error {
#[allow(unused)]
use super::rsass;
// Ignoring "too_few_args", error tests are not supported yet.
// Ignoring "too_many_args", error tests are not supported yet.
// Ignoring "test_type", error tests are not supported yet.
}
#[test]
fn escape() {
assert_eq!(
rsass(
"a {b: quote(\\0)}\
\n"
)
.unwrap(),
"a {\
\n b: \"\\\\0 \";\
\n}\
\n"
);
}
#[test]
fn named() {
assert_eq!(
rsass(
"a {b: quote($string: c)}\
\n"
)
.unwrap(),
"a {\
\n b: \"c\";\
\n}\
\n"
);
}
mod quote_unquoted_quote {
#[allow(unused)]
use super::rsass;
#[test]
fn double() {
assert_eq!(
rsass(
"// See sass/libsass#2873\
\na {b: quote(unquote(\'\"\') + unquote(\"\'\"))}\
\n"
)
.unwrap(),
"a {\
\n b: \"\\\"\'\";\
\n}\
\n"
);
}
#[test]
fn single() {
assert_eq!(
rsass(
"// See sass/libsass#2873\
\na {b: quote(unquote(\'\"\'))}\
\n"
)
.unwrap(),
"a {\
\n b: \'\"\';\
\n}\
\n"
);
}
}
#[test]
fn quoted_double() {
assert_eq!(
rsass(
"a {b: quote(\"c\")}\
\n"
)
.unwrap(),
"a {\
\n b: \"c\";\
\n}\
\n"
);
}
#[test]
fn quoted_single() {
assert_eq!(
rsass(
"a {b: quote(\'c\')}\
\n"
)
.unwrap(),
"a {\
\n b: \"c\";\
\n}\
\n"
);
}
#[test]
fn unquoted() {
assert_eq!(
rsass(
"a {b: quote(c)}\
\n"
)
.unwrap(),
"a {\
\n b: \"c\";\
\n}\
\n"
);
}
}
// From "sass-spec/spec/core_functions/string/slice.hrx"
mod slice {
#[allow(unused)]
use super::rsass;
#[test]
fn combining_character() {
assert_eq!(
rsass(
"// Sass does *not* treat strings as sequences of glyphs, so this string which\
\n// contains \"c\" followed by a combining umlaut should be considered two separate\
\n// characters even though it\'s rendered as only one and only the \"d\" should be\
\n// sliced out.\
\na {b: str-slice(\"cd\\0308e\", 2, 2)}\
\n"
)
.unwrap(),
"a {\
\n b: \"d\";\
\n}\
\n"
);
}
#[test]
fn double_width_character() {
assert_eq!(
rsass(
"// Sass treats strings as sequences of Unicode codepoint; it doesn\'t care if a\
\n// character is represented as two UTF-16 code units, so inserting a character\
\n// at index 2 shouldn\'t break this emoji in two.\
\na {b: str-slice(\"c👭d\", 2, 2)}\
\n"
)
.unwrap(),
"@charset \"UTF-8\";\
\na {\
\n b: \"👭\";\
\n}\
\n"
);
}
mod empty {
#[allow(unused)]
use super::rsass;
mod end {
#[allow(unused)]
use super::rsass;
#[test]
fn t0() {
assert_eq!(
rsass(
"a {b: str-slice(\"\", 1, 0)}\
\n"
)
.unwrap(),
"a {\
\n b: \"\";\
\n}\
\n"
);
}
#[test]
fn t1() {
assert_eq!(
rsass(
"a {b: str-slice(\"\", 1, 1)}\
\n"
)
.unwrap(),
"a {\
\n b: \"\";\
\n}\
\n"
);
}
#[test]
fn t2() {
assert_eq!(
rsass(
"a {b: str-slice(\"\", 1, 2)}\
\n"
)
.unwrap(),
"a {\
\n b: \"\";\
\n}\
\n"
);
}
}
mod start {
#[allow(unused)]
use super::rsass;
#[test]
fn t0() {
assert_eq!(
rsass(
"a {b: str-slice(\"\", 0)}\
\n"
)
.unwrap(),
"a {\
\n b: \"\";\
\n}\
\n"
);
}
#[test]
fn t1() {
assert_eq!(
rsass(
"a {b: str-slice(\"\", 1)}\
\n"
)
.unwrap(),
"a {\
\n b: \"\";\
\n}\
\n"
);
}
#[test]
fn t2() {
assert_eq!(
rsass(
"a {b: str-slice(\"\", 2)}\
\n"
)
.unwrap(),
"a {\
\n b: \"\";\
\n}\
\n"
);
}
#[test]
fn negative_1() {
assert_eq!(
rsass(
"a {b: str-slice(\"\", -1)}\
\n"
)
.unwrap(),
"a {\
\n b: \"\";\
\n}\
\n"
);
}
}
}
mod end {
#[allow(unused)]
use super::rsass;
mod negative {
#[allow(unused)]
use super::rsass;
#[test]
fn t1() {
assert_eq!(
rsass(
"a {b: str-slice(\"cde\", 1, -1)}\
\n"
)
.unwrap(),
"a {\
\n b: \"cde\";\
\n}\
\n"
);
}
#[test]
fn t2() {
assert_eq!(
rsass(
"a {b: str-slice(\"cde\", 1, -2)}\
\n"
)
.unwrap(),
"a {\
\n b: \"cd\";\
\n}\
\n"
);
}
#[test]
fn after_last() {
assert_eq!(
rsass(
"a {b: str-slice(\"cde\", 1, -100)}\
\n"
)
.unwrap(),
"a {\
\n b: \"\";\
\n}\
\n"
);
}
#[test]
fn last() {
assert_eq!(
rsass(
"a {b: str-slice(\"cde\", 1, -4)}\
\n"
)
.unwrap(),
"a {\
\n b: \"\";\
\n}\
\n"
);
}
}
mod positive {
#[allow(unused)]
use super::rsass;
#[test]
fn t0() {
assert_eq!(
rsass(
"a {b: str-slice(\"cde\", 1, 0)}\
\n"
)
.unwrap(),
"a {\
\n b: \"\";\
\n}\
\n"
);
}
#[test]
fn t1() {
assert_eq!(
rsass(
"a {b: str-slice(\"cde\", 1, 1)}\
\n"
)
.unwrap(),
"a {\
\n b: \"c\";\
\n}\
\n"
);
}
#[test]
fn t2() {
assert_eq!(
rsass(
"a {b: str-slice(\"cde\", 1, 2)}\
\n"
)
.unwrap(),
"a {\
\n b: \"cd\";\
\n}\
\n"
);
}
#[test]
fn after_last() {
assert_eq!(
rsass(
"a {b: str-slice(\"cde\", 1, 100)}\
\n"
)
.unwrap(),
"a {\
\n b: \"cde\";\
\n}\
\n"
);
}
#[test]
fn after_start() {
assert_eq!(
rsass(
"a {b: str-slice(\"cdef\", 2, 3)}\
\n"
)
.unwrap(),
"a {\
\n b: \"de\";\
\n}\
\n"
);
}
#[test]
fn last() {
assert_eq!(
rsass(
"a {b: str-slice(\"cde\", 1, 3)}\
\n"
)
.unwrap(),
"a {\
\n b: \"cde\";\
\n}\
\n"
);
}
}
}
mod error {
#[allow(unused)]
use super::rsass;
mod decimal {
#[allow(unused)]
use super::rsass;
// Ignoring "end", error tests are not supported yet.
// Ignoring "start", error tests are not supported yet.
}
// Ignoring "too_few_args", error tests are not supported yet.
// Ignoring "too_many_args", error tests are not supported yet.
mod test_type {
#[allow(unused)]
use super::rsass;
// Ignoring "end_at", error tests are not supported yet.
// Ignoring "start_at", error tests are not supported yet.
// Ignoring "string", error tests are not supported yet.
}
}
#[test]
fn named() {
assert_eq!(
rsass(
"a {b: str-slice($string: \"cde\", $start-at: 2, $end-at: 2)}\
\n"
)
.unwrap(),
"a {\
\n b: \"d\";\
\n}\
\n"
);
}
mod start {
#[allow(unused)]
use super::rsass;
mod negative {
#[allow(unused)]
use super::rsass;
#[test]
fn t1() {
assert_eq!(
rsass(
"a {b: str-slice(\"cde\", -1)}\
\n"
)
.unwrap(),
"a {\
\n b: \"e\";\
\n}\
\n"
);
}
#[test]
fn t2() {
assert_eq!(
rsass(
"a {b: str-slice(\"cde\", -2)}\
\n"
)
.unwrap(),
"a {\
\n b: \"de\";\
\n}\
\n"
);
}
#[test]
fn after_last() {
assert_eq!(
rsass(
"a {b: str-slice(\"cde\", -100)}\
\n"
)
.unwrap(),
"a {\
\n b: \"cde\";\
\n}\
\n"
);
}
#[test]
fn last() {
assert_eq!(
rsass(
"a {b: str-slice(\"cde\", -3)}\
\n"
)
.unwrap(),
"a {\
\n b: \"cde\";\
\n}\
\n"
);
}
}
mod positive {
#[allow(unused)]
use super::rsass;
#[test]
fn t0() {
assert_eq!(
rsass(
"a {b: str-slice(\"cde\", 0)}\
\n"
)
.unwrap(),
"a {\
\n b: \"cde\";\
\n}\
\n"
);
}
#[test]
fn t1() {
assert_eq!(
rsass(
"a {b: str-slice(\"cde\", 1)}\
\n"
)
.unwrap(),
"a {\
\n b: \"cde\";\
\n}\
\n"
);
}
#[test]
fn t2() {
assert_eq!(
rsass(
"a {b: str-slice(\"cde\", 2)}\
\n"
)
.unwrap(),
"a {\
\n b: \"de\";\
\n}\
\n"
);
}
#[test]
fn after_end() {
assert_eq!(
rsass(
"a {b: str-slice(\"cdef\", 3, 2)}\
\n"
)
.unwrap(),
"a {\
\n b: \"\";\
\n}\
\n"
);
}
#[test]
fn after_last() {
assert_eq!(
rsass(
"a {b: str-slice(\"cde\", 100)}\
\n"
)
.unwrap(),
"a {\
\n b: \"\";\
\n}\
\n"
);
}
#[test]
fn last() {
assert_eq!(
rsass(
"a {b: str-slice(\"cde\", 4)}\
\n"
)
.unwrap(),
"a {\
\n b: \"\";\
\n}\
\n"
);
}
}
}
#[test]
fn unquoted() {
assert_eq!(
rsass(
"a {b: str-slice(cdefgh, 3, 5)}\
\n"
)
.unwrap(),
"a {\
\n b: efg;\
\n}\
\n"
);
}
}
// From "sass-spec/spec/core_functions/string/to_lower_case.hrx"
mod to_lower_case {
#[allow(unused)]
use super::rsass;
#[test]
fn alphabet() {
assert_eq!(
rsass(
"a {b: to-lower-case(\"ABCDEFGHIJKLMNOPQRSTUVQXYZ\")}\
\n"
)
.unwrap(),
"a {\
\n b: \"abcdefghijklmnopqrstuvqxyz\";\
\n}\
\n"
);
}
#[test]
fn empty() {
assert_eq!(
rsass(
"a {b: to-lower-case(\"\")}\
\n"
)
.unwrap(),
"a {\
\n b: \"\";\
\n}\
\n"
);
}
mod error {
#[allow(unused)]
use super::rsass;
// Ignoring "too_few_args", error tests are not supported yet.
// Ignoring "too_many_args", error tests are not supported yet.
// Ignoring "test_type", error tests are not supported yet.
}
#[test]
fn named() {
assert_eq!(
rsass(
"a {b: to-lower-case($string: abcDEF)}\
\n"
)
.unwrap(),
"a {\
\n b: abcdef;\
\n}\
\n"
);
}
#[test]
fn non_ascii() {
assert_eq!(
rsass(
"// Only ASCII characters have their case changed.\
\na {b: to-lower-case(\"ÄÇÐØÞ\")}\
\n"
)
.unwrap(),
"@charset \"UTF-8\";\
\na {\
\n b: \"ÄÇÐØÞ\";\
\n}\
\n"
);
}
#[test]
fn number() {
assert_eq!(
rsass(
"a {b: to-lower-case(\"1234567890\")}\
\n"
)
.unwrap(),
"a {\
\n b: \"1234567890\";\
\n}\
\n"
);
}
#[test]
fn unquoted() {
assert_eq!(
rsass(
"a {b: to-lower-case(aBcDeF)}\
\n"
)
.unwrap(),
"a {\
\n b: abcdef;\
\n}\
\n"
);
}
}
// From "sass-spec/spec/core_functions/string/to_upper_case.hrx"
mod to_upper_case {
#[allow(unused)]
use super::rsass;
#[test]
fn alphabet() {
assert_eq!(
rsass(
"a {b: to-upper-case(\"abcdefghijklmnopqrstuvqxyz\")}\
\n"
)
.unwrap(),
"a {\
\n b: \"ABCDEFGHIJKLMNOPQRSTUVQXYZ\";\
\n}\
\n"
);
}
#[test]
fn empty() {
assert_eq!(
rsass(
"a {b: to-upper-case(\"\")}\
\n"
)
.unwrap(),
"a {\
\n b: \"\";\
\n}\
\n"
);
}
mod error {
#[allow(unused)]
use super::rsass;
// Ignoring "too_few_args", error tests are not supported yet.
// Ignoring "too_many_args", error tests are not supported yet.
// Ignoring "test_type", error tests are not supported yet.
}
#[test]
fn named() {
assert_eq!(
rsass(
"a {b: to-upper-case($string: abcDEF)}\
\n"
)
.unwrap(),
"a {\
\n b: ABCDEF;\
\n}\
\n"
);
}
#[test]
fn non_ascii() {
assert_eq!(
rsass(
"// Only ASCII characters have their case changed.\
\na {b: to-upper-case(\"äçðøþ\")}\
\n"
)
.unwrap(),
"@charset \"UTF-8\";\
\na {\
\n b: \"äçðøþ\";\
\n}\
\n"
);
}
#[test]
fn number() {
assert_eq!(
rsass(
"a {b: to-upper-case(\"1234567890\")}\
\n"
)
.unwrap(),
"a {\
\n b: \"1234567890\";\
\n}\
\n"
);
}
#[test]
fn unquoted() {
assert_eq!(
rsass(
"a {b: to-upper-case(aBcDeF)}\
\n"
)
.unwrap(),
"a {\
\n b: ABCDEF;\
\n}\
\n"
);
}
}
// From "sass-spec/spec/core_functions/string/unique_id.hrx"
mod unique_id {
#[allow(unused)]
use super::rsass;
mod error {
#[allow(unused)]
use super::rsass;
// Ignoring "too_many_args", error tests are not supported yet.
}
#[test]
fn is_identifier() {
assert_eq!(
rsass(
"// Every call to unique-id() should return a valid CSS identifier. We can\'t test\
\n// this directly, so we make sure it can parse as a class selector with\
\n// selector-parse().\
\n@for $i from 1 to 1000 {\
\n $_: selector-parse(\".#{unique-id()}\");\
\n}\
\n"
)
.unwrap(),
""
);
}
#[test]
fn is_unique() {
assert_eq!(
rsass(
"// As the name suggests, every call to unique-id() should return a different\
\n// value.\
\n$ids: ();\
\n@for $i from 1 to 1000 {\
\n $id: unique-id();\
\n @if map-has-key($ids, $id) {\
\n @error \"#{$id} generated more than once\";\
\n }\
\n\
\n $ids: map-merge($ids, ($id: null));\
\n}\
\n"
)
.unwrap(),
""
);
}
}
// From "sass-spec/spec/core_functions/string/unquote.hrx"
mod unquote {
#[allow(unused)]
use super::rsass;
#[test]
fn empty() {
assert_eq!(
rsass(
"$result: unquote(\"\");\
\na {\
\n result: $result; // This will not be emitted because the contents is empty.\
\n length: str-length($result);\
\n same: $result == \"\";\
\n}\
\n"
)
.unwrap(),
"a {\
\n length: 0;\
\n same: true;\
\n}\
\n"
);
}
mod error {
#[allow(unused)]
use super::rsass;
// Ignoring "too_few_args", error tests are not supported yet.
// Ignoring "too_many_args", error tests are not supported yet.
// Ignoring "test_type", error tests are not supported yet.
}
#[test]
fn escaped_backslash() {
assert_eq!(
rsass(
"$result: unquote(\"\\\\0 \");\
\na {\
\n result: $result;\
\n length: str-length($result);\
\n same-as-argument: $result == \"\\\\0 \";\
\n same-as-literal: $result == \\0 ;\
\n}\
\n"
)
.unwrap(),
"a {\
\n result: \\0 ;\
\n length: 3;\
\n same-as-argument: true;\
\n same-as-literal: true;\
\n}\
\n"
);
}
mod escaped_quotes {
#[allow(unused)]
use super::rsass;
#[test]
fn quoted() {
assert_eq!(
rsass(
"// Unquoting a quoted string returns an unquoted string with the same code\
\n// points. Code points such as quotes that need to be escaped in the original\
\n// don\'t need escaping in the output.\
\n$result: unquote(\"\\\"c\\\"\");\
\na {\
\n result: $result;\
\n length: str-length($result);\
\n same: $result == \"\\\"c\\\"\";\
\n}\
\n"
)
.unwrap(),
"a {\
\n result: \"c\";\
\n length: 3;\
\n same: true;\
\n}\
\n"
);
}
#[test]
fn unquoted() {
assert_eq!(
rsass(
"// Unquoting an unquoted string returns it exactly as-is, leaving escapes\
\n// totally unchanged (whether they\'re quotes or not).\
\n$result: unquote(\\\"c\\\");\
\na {\
\n result: $result;\
\n length: str-length($result);\
\n same: $result == \\\"c\\\";\
\n}\
\n"
)
.unwrap(),
"a {\
\n result: \\\"c\\\";\
\n length: 5;\
\n same: true;\
\n}\
\n"
);
}
}
#[test]
fn meaningful_css_characters() {
assert_eq!(
rsass(
"// Unquoted strings aren\'t required to be valid CSS identifiers, and the\
\n// `unquote()` function does *not* escape characters that aren\'t valid\
\n// identifier characters. This allows it to be used as an escape hatch to\
\n// produce CSS that Sass doesn\'t otherwise support.\
\n$result: unquote(\"b; c {d: e\");\
\na {\
\n result: $result;\
\n length: str-length($result);\
\n same: $result == \"b; c {d: e\";\
\n}\
\n"
)
.unwrap(),
"a {\
\n result: b; c {d: e;\
\n length: 10;\
\n same: true;\
\n}\
\n"
);
}
#[test]
fn named() {
assert_eq!(
rsass(
"a {b: unquote($string: c)}\
\n"
)
.unwrap(),
"a {\
\n b: c;\
\n}\
\n"
);
}
#[test]
fn quoted() {
assert_eq!(
rsass(
"a {b: unquote(\"c\")}\
\n"
)
.unwrap(),
"a {\
\n b: c;\
\n}\
\n"
);
}
#[test]
fn unquoted() {
assert_eq!(
rsass(
"a {b: unquote(c)}\
\n"
)
.unwrap(),
"a {\
\n b: c;\
\n}\
\n"
);
}
}
|
// Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the root directory of this source tree.
use crate::VerifierError;
use air::{proof::StarkProof, Air, EvaluationFrame};
use crypto::{BatchMerkleProof, ElementHasher, MerkleTree};
use fri::VerifierChannel as FriVerifierChannel;
use math::{FieldElement, StarkField};
use utils::{collections::Vec, string::ToString};
// TYPES AND INTERFACES
// ================================================================================================
pub struct VerifierChannel<B, E, H>
where
B: StarkField,
E: FieldElement<BaseField = B>,
H: ElementHasher<BaseField = B>,
{
// trace queries
trace_root: H::Digest,
trace_proof: BatchMerkleProof<H>,
trace_states: Option<Vec<Vec<B>>>,
// constraint queries
constraint_root: H::Digest,
constraint_proof: BatchMerkleProof<H>,
constraint_evaluations: Option<Vec<Vec<E>>>,
// FRI proof
fri_roots: Option<Vec<H::Digest>>,
fri_layer_proofs: Vec<BatchMerkleProof<H>>,
fri_layer_queries: Vec<Vec<E>>,
fri_remainder: Option<Vec<E>>,
fri_num_partitions: usize,
// out-of-domain evaluation
ood_frame: Option<EvaluationFrame<E>>,
ood_evaluations: Option<Vec<E>>,
// query proof-of-work
pow_nonce: u64,
}
// VERIFIER CHANNEL IMPLEMENTATION
// ================================================================================================
impl<B, E, H> VerifierChannel<B, E, H>
where
B: StarkField,
E: FieldElement<BaseField = B>,
H: ElementHasher<BaseField = B>,
{
// CONSTRUCTOR
// --------------------------------------------------------------------------------------------
/// Creates and returns a new verifier channel initialized from the specified `proof`.
pub fn new<A: Air<BaseElement = B>>(air: &A, proof: StarkProof) -> Result<Self, VerifierError> {
// make AIR and proof base fields are the same
if B::get_modulus_le_bytes() != proof.context.field_modulus_bytes() {
return Err(VerifierError::InconsistentBaseField);
}
let lde_domain_size = air.lde_domain_size();
let num_queries = air.options().num_queries();
let fri_options = air.options().to_fri_options();
// --- parse commitments ------------------------------------------------------------------
let (trace_root, constraint_root, fri_roots) = proof
.commitments
.parse::<H>(fri_options.num_fri_layers(lde_domain_size))
.map_err(|err| VerifierError::ProofDeserializationError(err.to_string()))?;
// --- parse trace queries ----------------------------------------------------------------
let (trace_proof, trace_states) = proof
.trace_queries
.parse::<H, B>(lde_domain_size, num_queries, air.trace_width())
.map_err(|err| {
VerifierError::ProofDeserializationError(format!(
"trace query deserialization failed: {}",
err.to_string()
))
})?;
// --- parse constraint evaluation queries ------------------------------------------------
let (constraint_proof, constraint_evaluations) = proof
.constraint_queries
.parse::<H, E>(lde_domain_size, num_queries, air.ce_blowup_factor())
.map_err(|err| {
VerifierError::ProofDeserializationError(format!(
"constraint evaluation query deserialization failed: {}",
err.to_string()
))
})?;
// --- parse FRI proofs -------------------------------------------------------------------
let fri_num_partitions = proof.fri_proof.num_partitions();
let fri_remainder = proof
.fri_proof
.parse_remainder()
.map_err(|err| VerifierError::ProofDeserializationError(err.to_string()))?;
let (fri_layer_queries, fri_layer_proofs) = proof
.fri_proof
.parse_layers::<H, E>(lde_domain_size, fri_options.folding_factor())
.map_err(|err| VerifierError::ProofDeserializationError(err.to_string()))?;
// --- parse out-of-domain evaluation frame -----------------------------------------------
let (ood_frame, ood_evaluations) = proof
.ood_frame
.parse(air.trace_width(), air.ce_blowup_factor())
.map_err(|err| VerifierError::ProofDeserializationError(err.to_string()))?;
Ok(VerifierChannel {
// trace queries
trace_root,
trace_proof,
trace_states: Some(trace_states),
// constraint queries
constraint_root,
constraint_proof,
constraint_evaluations: Some(constraint_evaluations),
// FRI proof
fri_roots: Some(fri_roots),
fri_layer_proofs,
fri_layer_queries,
fri_remainder: Some(fri_remainder),
fri_num_partitions,
// out-of-domain evaluation
ood_frame: Some(ood_frame),
ood_evaluations: Some(ood_evaluations),
// query seed
pow_nonce: proof.pow_nonce,
})
}
// DATA READERS
// --------------------------------------------------------------------------------------------
/// Returns execution trace commitment sent by the prover.
pub fn read_trace_commitment(&self) -> H::Digest {
self.trace_root
}
/// Returns constraint evaluation commitment sent by the prover.
pub fn read_constraint_commitment(&self) -> H::Digest {
self.constraint_root
}
/// Returns trace polynomial evaluations at out-of-domain points z and z * g, where g is the
/// generator of the LDE domain.
pub fn read_ood_evaluation_frame(&mut self) -> EvaluationFrame<E> {
self.ood_frame.take().expect("already read")
}
/// Returns evaluations of composition polynomial columns at z^m, where z is the out-of-domain
/// point, and m is the number of composition polynomial columns.
pub fn read_ood_evaluations(&mut self) -> Vec<E> {
self.ood_evaluations.take().expect("already read")
}
/// Returns query proof-of-work nonce sent by the prover.
pub fn read_pow_nonce(&self) -> u64 {
self.pow_nonce
}
/// Returns trace states at the specified positions of the LDE domain. This also checks if
/// the trace states are valid against the trace commitment sent by the prover.
pub fn read_trace_states(
&mut self,
positions: &[usize],
commitment: &H::Digest,
) -> Result<Vec<Vec<B>>, VerifierError> {
// make sure the states included in the proof correspond to the trace commitment
MerkleTree::verify_batch(commitment, positions, &self.trace_proof)
.map_err(|_| VerifierError::TraceQueryDoesNotMatchCommitment)?;
Ok(self.trace_states.take().expect("already read"))
}
/// Returns constraint evaluations at the specified positions of the LDE domain. This also
/// checks if the constraint evaluations are valid against the constraint commitment sent by
/// the prover.
pub fn read_constraint_evaluations(
&mut self,
positions: &[usize],
commitment: &H::Digest,
) -> Result<Vec<Vec<E>>, VerifierError> {
MerkleTree::verify_batch(commitment, positions, &self.constraint_proof)
.map_err(|_| VerifierError::ConstraintQueryDoesNotMatchCommitment)?;
Ok(self.constraint_evaluations.take().expect("already read"))
}
}
// FRI VERIFIER CHANNEL IMPLEMENTATION
// ================================================================================================
impl<B, E, H> FriVerifierChannel<E> for VerifierChannel<B, E, H>
where
B: StarkField,
E: FieldElement<BaseField = B>,
H: ElementHasher<BaseField = B>,
{
type Hasher = H;
fn read_fri_num_partitions(&self) -> usize {
self.fri_num_partitions
}
fn read_fri_layer_commitments(&mut self) -> Vec<H::Digest> {
self.fri_roots.take().expect("already read")
}
fn take_next_fri_layer_proof(&mut self) -> BatchMerkleProof<H> {
self.fri_layer_proofs.remove(0)
}
fn take_next_fri_layer_queries(&mut self) -> Vec<E> {
self.fri_layer_queries.remove(0)
}
fn take_fri_remainder(&mut self) -> Vec<E> {
self.fri_remainder.take().expect("already read")
}
}
|
v1_imports!();
use std::sync::Arc;
use rocket::{Route, State};
use authn::AuthnHolder;
use db::staff;
use session::SessionManager;
pub fn get_routes() -> Vec<Route> {
routes![get_staff, rm_staff, new_staff]
}
#[allow(needless_pass_by_value)]
#[get("/staff")]
fn get_staff(_usr: staff::Admin, conn: DatabaseConnection) -> V1Response<StaffList> {
match staff::get_all(&conn) {
Ok(v) => Ok(Json(StaffList { staff: v })),
Err(e) => {
error!("Unable to fetch staff: {:?}", e);
Err(internal_server_error!("database error"))
}
}
}
#[allow(needless_pass_by_value)]
#[delete("/staff/<id>")]
fn rm_staff(
id: i32,
_usr: staff::Admin,
conn: DatabaseConnection,
auth: State<AuthnHolder>,
manager: State<Arc<SessionManager>>,
) -> V1Response<GenericMessage> {
let target = staff::get(&conn, id).map_err(select_error_handler!("no such staff member"))?;
staff::delete(&conn, &target).map_err(|e| diesel_error_handler!(e))?;
manager.remove_session(&target.email, &auth);
Ok(generic_message!("ok"))
}
#[allow(needless_pass_by_value)]
#[post("/staff", data = "<body>")]
fn new_staff(
mut body: Json<NewStaffList>,
_usr: staff::Admin,
conn: DatabaseConnection,
) -> V1Response<GenericMessage> {
body.staff.retain(|s| s.email != "" && s.full_name != "");
staff::create_batch(&conn, &body.staff).map_err(|e| diesel_error_handler!(e))?;
Ok(generic_message!("ok"))
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.