text stringlengths 8 4.13M |
|---|
#[cfg(test)]
mod test;
use bson::RawDocumentBuf;
use serde::Deserialize;
use crate::{
bson::{doc, Document},
cmap::{Command, RawCommandResponse, StreamDescription},
error::Result,
operation::{append_options, OperationWithDefaults, Retryability},
options::ListDatabasesOptions,
selection_criteria::{ReadPreference, SelectionCriteria},
};
#[derive(Debug)]
pub(crate) struct ListDatabases {
filter: Option<Document>,
name_only: bool,
options: Option<ListDatabasesOptions>,
}
impl ListDatabases {
pub fn new(
filter: Option<Document>,
name_only: bool,
options: Option<ListDatabasesOptions>,
) -> Self {
ListDatabases {
filter,
name_only,
options,
}
}
#[cfg(test)]
pub(crate) fn empty() -> Self {
ListDatabases {
filter: None,
name_only: false,
options: None,
}
}
}
impl OperationWithDefaults for ListDatabases {
type O = Vec<RawDocumentBuf>;
type Command = Document;
const NAME: &'static str = "listDatabases";
fn build(&mut self, _description: &StreamDescription) -> Result<Command> {
let mut body: Document = doc! {
Self::NAME: 1,
"nameOnly": self.name_only
};
if let Some(ref filter) = self.filter {
body.insert("filter", filter.clone());
}
append_options(&mut body, self.options.as_ref())?;
Ok(Command::new(
Self::NAME.to_string(),
"admin".to_string(),
body,
))
}
fn handle_response(
&self,
raw_response: RawCommandResponse,
_description: &StreamDescription,
) -> Result<Self::O> {
let response: Response = raw_response.body()?;
Ok(response.databases)
}
fn selection_criteria(&self) -> Option<&SelectionCriteria> {
Some(SelectionCriteria::ReadPreference(ReadPreference::Primary)).as_ref()
}
fn retryability(&self) -> Retryability {
Retryability::Read
}
}
#[derive(Debug, Deserialize)]
pub(crate) struct Response {
databases: Vec<RawDocumentBuf>,
}
|
fn main() {
for n in 1..10 {
println!("Hello, world! {}", n);
if n%2 == 0 {
println!(" even number");
}
else {
println!(" odd number");
}
}
}
|
use super::spot::Spot;
use super::bonuses::{WordBonus, LetterBonus};
use super::{Move, Direction, Tile};
/// The board we're playing on
///
/// It contains an array of `Spot` and provide shortcuts to interract with them.
/// All positions range start at 0.
/// The ordinate position 0 is considered at the top.
pub struct Board {
spots: Vec<Spot>,
}
impl Board {
/// Create a new board
///
/// The board contains the most used position for its bonuses.
/// It is the one found on [the scrabble wikipedia page](https://en.wikipedia.org/wiki/Scrabble).
///
/// # Return Value
/// A board of 15*15
pub fn new() -> Board {
// Create the default board with no bonuses
let mut spots = vec![Spot::new(); 225];
// Add Triple Word
let wtriple_pos = [ 0, 7, 14, 105, 119, 210, 217, 224 ];
for i in wtriple_pos.iter() {
spots.get_mut(*i).unwrap().bonus_word = WordBonus::Triple;
}
// Add Double Word
let wdouble_pos = [ 16, 28, 32, 42, 48, 56, 64, 70, 112, 154, 160,
168, 176, 182, 192, 196, 208];
for i in wdouble_pos.iter() {
spots.get_mut(*i).unwrap().bonus_word = WordBonus::Double;
}
// Add Triple Letter
let ltriple_pos = [ 20, 24, 76, 80, 84, 88, 136, 140, 144, 148,
200, 204];
for i in ltriple_pos.iter() {
spots.get_mut(*i).unwrap().bonus_letter = LetterBonus::Triple;
}
// Add Double Letter
let ldouble_pos = [ 3, 11, 36, 38, 45, 52, 59, 92, 96, 98, 102, 108,
116, 122, 126, 128, 132, 165, 172, 179, 186, 188,
213, 221 ];
for i in ldouble_pos.iter() {
spots.get_mut(*i).unwrap().bonus_letter = LetterBonus::Double;
}
// Create and return the board
Board {
spots,
}
}
/// Get a reference to a spot
///
/// # Arguments
/// * `x` - the absciss position on the board (from left (0) to right (width - 1)).
/// * `y` - the ordinate position on the board (from top (0) to bottom (height - 1)).
///
/// # Panic
/// If `x` or `y` are greater or equal to 15.
fn get_spot(&self, x : u8, y : u8) -> &Spot{
assert!(x < 15, "x is out of the board");
assert!(y < 15, "y is out of the board");
return self.spots.get((y * 15 + x) as usize).unwrap();
}
/// Get a mutable reference to a spot
///
/// # Arguments
/// * `x` - the absciss position on the board.
/// * `y` - the ordinate position on the board.
///
/// # Panic
/// If `x` or `y` are greater or equal to 15.
fn get_spot_mut(&mut self, x : u8, y : u8) -> &mut Spot{
assert!(x < 15, "x is out of the board");
assert!(y < 15, "y is out of the board");
return self.spots.get_mut((y * 15 + x) as usize).unwrap();
}
/// Get a char at a position
///
/// # Arguments
/// * `x` - the absciss position on the board.
/// * `y` - the ordinate position on the board.
///
/// # Return Value
/// An `Option<char>` where None is in the case there is no tile on this spot
pub fn get_letter(&self, x : u8, y : u8) -> Option<char> {
if x >= 15 || y >= 15 {
return None;
}
let spot = self.get_spot(x, y);
let tile = &spot.tile;
match tile {
None => None,
Some(x) => Some(x.letter()),
}
}
/// Get a clone of the optional tile at position (`x`, `y`)
///
/// # Return Value
/// An `Option<Tile>` where None is in the case there is no tile on
/// this spot
pub fn get_tile(&self, x : u8, y : u8) -> Option<Tile> {
if x >= 15 || y >= 15 {
return None;
}
let spot = self.get_spot(x, y);
return spot.tile.clone();
}
/// Whether it is possible to play this move
///
/// Warning : The move should not be added to the board before calling this
/// function. If you do so, you'll get an undefined behavior
///
/// # Argument
/// * `mv` - The move we're trying to add
pub fn can_place(&self, mv : &Move) -> bool {
// Make sure mv is ok
assert!(mv.x() < 15);
assert!(mv.y() < 15);
// Mutable positions
let mut pos_x = mv.x();
let mut pos_y = mv.y();
// Offset helps not having the almost same fonction for each directions
let offset_x : u8;
let offset_y : u8;
match mv.direction() {
Direction::Horizontal => {
// Test for an out of array move
if pos_x as usize + mv.word().chars().count() >= 15 {
return false;
}
offset_x = 1;
offset_y = 0;
}
Direction::Vertical => {
// Test for an out of array move
if pos_y as usize + mv.word().chars().count() >= 15 {
return false;
}
offset_x = 0;
offset_y = 1;
}
}
// Iterator over the move's word
let word_it : Vec<char> = mv.word().chars().collect();
for c in word_it {
let board_letter = self.get_letter(pos_x, pos_y);
// If there is a letter on the board at this spot
if let Some(letter) = board_letter {
// Make sure it matches with the current word
if letter != c {
return false;
}
}
pos_x += offset_x;
pos_y += offset_y;
}
// No problem so far, we're good
true
}
/// Get the needed letters to make the word
///
/// Returns the letters not present on the board to place `mv`
///
/// # Arguments
/// * `mv` - The move the player wants to make
///
/// # Return Value
/// A vector of chars
pub fn needed_letters(&self, mv : &Move) -> Vec<char> {
// The returned value
let mut letters : Vec<char> = Vec::with_capacity(
mv.word().chars().count() - 1);
// The offset we'll add in every loop
let offset_x : u8;
let offset_y : u8;
// The counter to the actual position on the board
let mut pos_x = mv.x();
let mut pos_y = mv.y();
// char iterator and storage for next() return value
let mut word_it = mv.word().chars();
let mut next_char : Option<char>;
match mv.direction() {
Direction::Horizontal => {
offset_x = 1;
offset_y = 0;
},
Direction::Vertical => {
offset_x = 0;
offset_y = 1;
}
}
loop {
next_char = word_it.next();
// If we reach the word end
if next_char == None {
return letters;
}
let next_char = next_char.unwrap();
if let None = self.get_letter(pos_x, pos_y) {
// The current spot is free
letters.push(next_char);
}
pos_x += offset_x;
pos_y += offset_y;
}
}
/// Add a move to the board
///
/// Warning : No check will be made in this function, use with care,
/// you should consider calling `can_place()` and `needed_letters`
/// before calling this function.
///
/// # Arguments
/// * `mv` - The valid move you want to place
/// * `tiles` - The needed tiles. You can get it by using `needed_letters()`
/// and `Hand::remove()`
pub fn add_move(&mut self, mv : Move, tiles : Vec<Tile>) {
let mut pos_x = mv.x();
let mut pos_y = mv.y();
let x_offset : u8;
let y_offset : u8;
match mv.direction() {
Direction::Horizontal => {
x_offset = 1;
y_offset = 0;
}
Direction::Vertical => {
x_offset = 0;
y_offset = 1;
}
}
let chars = mv.word().chars();
let mut tiles_it = tiles.into_iter();
// For each char in the word
for c in chars {
// If we need to add a tile to the current spot
if self.get_letter(pos_x, pos_y) == None {
let mut tile : Tile = tiles_it.next().unwrap();
if tile.wildcard() {
// Define the new use value for the wildcard
tile.set_wildcard(c);
}
self.get_spot_mut(pos_x, pos_y).tile = Some(tile);
}
pos_x += x_offset;
pos_y += y_offset;
}
}
/// Get the score of a word made perpendicularly
///
/// It should be called whenever a tile is being set with at least one
/// perpendicular neighbor around.
///
/// # Arguments
/// * `initial_x` - Absciss position somewhere in the perpendicular word
/// * `initial_y` - Ordinate position somewhere in the perpendicular word
/// * `direction` - The direction the original word was
/// * `added` - The added tile at this position
///
/// # Return Value
/// The score the perpendicular word made including the added letter
fn perp_score(&self, initial_x : u8, initial_y : u8, direction : Direction,
added : &Tile) -> u32 {
assert!(initial_x < 15);
assert!(initial_y < 15);
let mut score = 0;
let mut pos_x = initial_x as i16;
let mut pos_y = initial_y as i16;
let offset_x : i16;
let offset_y : i16;
match direction {
Direction::Horizontal => {
offset_x = 1;
offset_y = 0;
}
Direction::Vertical => {
offset_x = 0;
offset_y = 1;
}
}
// Initial step, count the number of point of the added letter
score += added.points() as u32;
// Then count the previous perpendicular letters
pos_x -= offset_y;
pos_y -= offset_x;
while pos_x >= 0 && pos_y >= 0 && pos_y < 15 &&
self.get_letter(pos_x as u8, pos_y as u8) != None {
score += self.get_tile(pos_x as u8, pos_y as u8)
.unwrap().points() as u32;
pos_x -= offset_y;
pos_y -= offset_x;
}
// Then get the nexts perpendicular letters
pos_x = initial_x as i16 + offset_y;
pos_y = initial_y as i16 + offset_x;
while pos_x < 15 && pos_y < 15 &&
self.get_letter(pos_x as u8, pos_y as u8) != None {
score += self.get_tile(pos_x as u8, pos_y as u8).unwrap().points() as u32;
pos_x += offset_y;
pos_y += offset_x;
}
score
}
/// Get the score of a move
///
/// This function need sources for the way of calculating the points.
/// It seems almost standard but it's good to have a rule somewhere.
///
/// # Arguments
/// * `mv` - The move the player wants to make
/// * `removed` - The tiles the player removed from its hand to play.
/// You can get it by using `needed_letters()` and `Hand::remove()`
pub fn score(&self, mv : &Move, removed : &Vec<Tile>) -> u32 {
let mut score = 0;
let mut word_bonus = 1;
let mut tile_score : u32;
let mut pos_x = mv.x();
let mut pos_y = mv.y();
let offset_x : u8;
let offset_y : u8;
match mv.direction() {
Direction::Horizontal => {
offset_x = 1;
offset_y = 0;
}
Direction::Vertical => {
offset_x = 0;
offset_y = 1;
}
}
let mut remove_it = removed.iter();
let chars = mv.word().chars();
for _ in chars {
let current_spot = self.get_spot(pos_x, pos_y);
match ¤t_spot.tile {
None => {
// get the bonuses
let removed_tile = remove_it.next().unwrap();
let (lb, wb) = current_spot.get_bonuses_value();
word_bonus *= wb;
if self.get_letter(pos_x + offset_y, pos_y + offset_x) == None ||
self.get_letter(pos_x - offset_y, pos_y - offset_x) == None {
tile_score = self.perp_score(pos_x, pos_y, mv.direction(), removed_tile);
}
else {
tile_score = lb * removed_tile.points() as u32;
}
}
Some(tile) => {
tile_score = tile.points() as u32;
}
}
score += tile_score;
pos_x += offset_x;
pos_y += offset_y;
}
score * word_bonus
}
/// Get the bonuses for a given tile
///
/// # Argument
/// * The positions
///
/// # Return Value
/// A tuple made by:
/// * an enum LetterBonus
/// * an enum WordBonus
pub fn get_bonuses(&self, x : u8, y : u8) -> (LetterBonus, WordBonus) {
assert!(x < 15);
assert!(y < 15);
return self.get_spot(x, y).get_bonuses();
}
}
|
// Copyright (c) 2016, <daggerbot@gmail.com>
// This software is available under the terms of the zlib license.
// See COPYING.md for more information.
use std::cell::RefCell;
use std::mem;
use std::ptr;
use std::rc::Rc;
use std::sync::mpsc::Sender;
use aurum::winutil;
use winapi;
use device::DeviceBridge;
use error::Result;
use event::Event;
use imp::windows::display::DisplayProvider;
use imp::windows::pixel_format::PixelFormatProvider;
use imp::windows::window::WindowManager;
use imp::windows::worker::Worker;
use util::GetRef;
/// Holds an `HDC` for the display device.
pub struct DeviceDc {
hdc: winapi::HDC,
}
impl DeviceDc {
pub fn default () -> Result<DeviceDc> {
let driver: Vec<u16> = "DISPLAY\0".encode_utf16().collect();
let hdc;
unsafe {
hdc = winapi::CreateDCW(driver.as_ptr(), ptr::null(), ptr::null(), ptr::null());
}
if hdc.is_null() {
return Err(err!(SystemError("CreateDC"): winutil::Error::last().unwrap()));
}
Ok(DeviceDc {
hdc: hdc,
})
}
}
impl Drop for DeviceDc {
fn drop (&mut self) {
unsafe {
if winapi::DeleteDC(self.hdc) == 0 {
error!("DeleteDC failed: {}", winutil::Error::last().unwrap());
}
}
}
}
/// Windows implementation for `Device`.
pub struct DeviceProvider {
event_sender: Rc<Sender<Event>>,
worker: Rc<RefCell<Option<Rc<Worker>>>>,
window_manager: Rc<WindowManager>,
device_dc: Rc<DeviceDc>,
}
impl DeviceProvider {
pub fn default (display: &DisplayProvider) -> Result<DeviceProvider> {
Ok(DeviceProvider {
event_sender: display.event_sender(),
worker: display.worker(),
window_manager: display.window_manager(),
device_dc: Rc::new(try!(DeviceDc::default())),
})
}
pub fn spawn_worker (&self) -> Result<Rc<Worker>> {
let mut worker_ref = self.worker.borrow_mut();
if let Some(ref worker) = *worker_ref {
return Ok(worker.clone());
}
let worker = Rc::new(try!(Worker::spawn(self)));
*worker_ref = Some(worker.clone());
Ok(worker)
}
pub fn window_manager (&self) -> &WindowManager {
self.window_manager.as_ref()
}
}
impl DeviceBridge for DeviceProvider {
type PixelFormat = PixelFormatProvider;
type PixelFormats = PixelFormatsProvider;
fn default_pixel_format (&self) -> Result<PixelFormatProvider> {
Ok(PixelFormatProvider::default())
}
fn pixel_formats (&self) -> Result<PixelFormatsProvider> {
let max;
unsafe {
winapi::SetLastError(0);
max = winapi::DescribePixelFormat(self.device_dc.hdc, 0, 0, ptr::null_mut());
}
if let Some(err) = winutil::Error::last() {
return Err(err!(SystemError("DescribePixelFormat"): err));
}
Ok(PixelFormatsProvider {
device_dc: self.device_dc.clone(),
cur: 0,
max: max,
})
}
}
impl GetRef<Sender<Event>> for DeviceProvider {
fn get_ref (&self) -> &Sender<Event> {
self.event_sender.as_ref()
}
}
/// Windows implementation for `PixelFormats`.
pub struct PixelFormatsProvider {
device_dc: Rc<DeviceDc>,
cur: i32,
max: i32,
}
impl Iterator for PixelFormatsProvider {
type Item = Result<PixelFormatProvider>;
fn next (&mut self) -> Option<Result<PixelFormatProvider>> {
if self.cur > self.max {
return None;
}
let pf;
if self.cur == 0 {
pf = PixelFormatProvider::default();
self.cur = 1;
} else {
let mut pfd;
unsafe {
pfd = mem::uninitialized();
winapi::SetLastError(0);
winapi::DescribePixelFormat(self.device_dc.hdc,
self.cur,
mem::size_of::<winapi::PIXELFORMATDESCRIPTOR>() as u32,
&mut pfd);
}
self.cur += 1;
if let Some(err) = winutil::Error::last() {
return Some(Err(err!(SystemError("DescribePixelFormat"): err)));
}
pf = PixelFormatProvider::from(pfd);
}
Some(Ok(pf))
}
}
|
use std::ops::AddAssign;
pub struct Callbacks<'a, _Arguments, _Return>
{
callbacks: Vec<Box<dyn 'a + Fn(&_Arguments) -> _Return>>,
}
impl<'a, _Arguments, _Return> Callbacks<'a, _Arguments, _Return>
{
pub fn new() -> Self {
Self{
callbacks: Vec::new(),
}
}
pub fn notify_all(&self, arguments: &_Arguments)
{
for callback in &self.callbacks {
callback(arguments);
}
}
}
impl<'a, _Arguments, _Return, T> AddAssign<T> for Callbacks<'a, _Arguments, _Return>
where T: 'a + Fn(&_Arguments) -> _Return
{
fn add_assign(&mut self, other: T) {
self.callbacks.push(Box::new(other));
}
}
#[cfg(test)]
mod test;
|
// Implementation of the Bisection method algorithm as found on
// CENGAGE Learning's Numerical Analysis, Tenth Edition,
// by Richard L. Burden, J. Douglas Faires, Annete M. Burden.
// Copyright © 2019 Andre Rossi Korol <anrobits@yahoo.com.br>
// This work is free. You can redistribute it and/or modify it under the
// terms of the Do What The Fuck You Want To Public License, Version 2,
// as published by Sam Hocevar. See the COPYING file for more details.
#[macro_use]
extern crate text_io;
extern crate meval;
fn main() {
println!("Enter the function expression (e.g., x - 2 * sin(x)): ");
let expression_str: String = read!("{}\n");
println!("Enter the first interval endpoint (a): ");
let mut a: f64 = read!();
println!("Enter the second interval endpoint (b): ");
let mut b: f64 = read!();
println!("Enter the tolerance (ε): ");
let tol: f64 = read!();
println!("Enter the maximum number of iterations (N₀): ");
let max_iter: usize = read!();
let expr: meval::Expr = expression_str.parse().unwrap();
let func = expr.bind("x").unwrap();
let mut p: f64;
let mut fp: f64;
let dim: f64 = 1.0 / tol;
let mut rounded_fp: f64;
let mut prev_p_opt: Option<f64> = None;
// Step 1
let mut i: usize = 1;
let mut fa: f64 = func(a);
// Step 2
while i <= max_iter {
// Step 3
p = a + (b - a) / 2.0;
fp = func(p);
// Step 4
rounded_fp = (fp * dim).round() / dim;
match prev_p_opt {
None => {
if rounded_fp == 0.0 {
println!("{}", p);
return;
}
}
Some(prev_p) => {
if (p - prev_p).abs() / p.abs() < tol {
println!("{}", p);
return;
}
}
}
// Step 5
i += 1;
// Step 6
if fa * fp > 0.0 {
a = p;
fa = fp;
} else {
b = p;
}
prev_p_opt = Some(p);
}
// Step 7
println!("Method failed after {} iterations", max_iter);
}
|
use cgmath::Vector2;
use cgmath::Matrix4;
use math::Rot;
pub fn view(pos: Vector2<f32>, size: f32, rot: &Rot) -> Matrix4<f32> {
let cos = rot.cos;
let sin = rot.sin;
let Vector2 { x, y } = pos;
let m00 = cos / size;
let m01 = -sin / size;
let m10 = sin / size;
let m11 = m00;
let m03 = y * m10 - x * m00;
let m13 = x * m01 - y * m00;
[
[m00, m01, 0.0, m03],
[m10, m11, 0.0, m13],
[0.0, 0.0, 1.0, 0.0],
[0.0, 0.0, 0.0, 1.0],
].into()
}
pub fn ortho(aspect: f32, near: f32, far: f32) -> Matrix4<f32> {
let n2f = 1.0 / (far - near);
let toa = 2.0 / aspect;
let oof = 1.0 / near;
[
[toa, 0.0, 0.0, 0.0],
[0.0, 2.0, 0.0, 0.0],
[0.0, 0.0, n2f, oof],
[0.0, 0.0, 0.0, 1.0],
].into()
}
|
use serde_derive::Deserialize;
use std::path::PathBuf;
#[derive(Debug, Deserialize, Clone)]
pub struct TomlConfig {
pub artifacts_dir: PathBuf,
pub url_prefix: String,
pub db_path: Option<String>,
}
|
use std::path::PathBuf;
use log::error;
use serde::{Serialize, Deserialize};
use crate::io::ReaderWriter;
use super::{config::Config, note::Note};
#[derive(Debug, Serialize, Deserialize)]
pub struct BucketItem {
context: String,
file: String,
file_name: Option<String>,
dest_path: PathBuf
}
/// The Bucket is a file with the pending tasks
/// for copying note files from the temp directory
/// in case an editor crashed but left the temp file
#[derive(Debug, Serialize, Deserialize)]
pub struct Bucket {
items: Vec<BucketItem>
}
impl Bucket {
pub fn process(&self, config: Config, temp_dir: PathBuf) {
for item in &self.items {
match config.get_notes_dir_from(&item.context) {
Ok(base_path) => {
let temp_note_path = temp_dir.join(&item.file);
let written = match std::fs::read_to_string(&temp_note_path) {
Ok(text) => {
let writer = ReaderWriter::new(base_path);
let note = Note::from_text(text);
let file_name = item.file_name.as_ref().map(|f| f.clone()).unwrap_or_else(|| writer.get_file_name_from_note(¬e));
writer.save_note_at(note, &item.dest_path, file_name, false).and_then(|_| {
Ok(std::fs::remove_file(&temp_note_path)?)
})
}
Err(e) => {
Err(e.into())
}
};
if let Err(e) = written {
error!("{}", e);
}
}
Err(e) => {
error!("{}", e);
}
}
}
}
}
|
use std::path::{Path, PathBuf};
use nu_engine::{current_dir, CallExt};
use nu_path::expand_path_with;
use nu_protocol::{engine::Command, Example, Signature, Span, SyntaxShape, Value};
use super::PathSubcommandArguments;
struct Arguments {
columns: Option<Vec<String>>,
pwd: PathBuf,
}
impl PathSubcommandArguments for Arguments {
fn get_columns(&self) -> Option<Vec<String>> {
self.columns.clone()
}
}
#[derive(Clone)]
pub struct SubCommand;
impl Command for SubCommand {
fn name(&self) -> &str {
"path exists"
}
fn signature(&self) -> Signature {
Signature::build("path exists").named(
"columns",
SyntaxShape::Table,
"Optionally operate by column path",
Some('c'),
)
}
fn usage(&self) -> &str {
"Check whether a path exists"
}
fn run(
&self,
engine_state: &nu_protocol::engine::EngineState,
stack: &mut nu_protocol::engine::Stack,
call: &nu_protocol::ast::Call,
input: nu_protocol::PipelineData,
) -> Result<nu_protocol::PipelineData, nu_protocol::ShellError> {
let head = call.head;
let args = Arguments {
columns: call.get_flag(engine_state, stack, "columns")?,
pwd: current_dir(engine_state, stack)?,
};
input.map(
move |value| super::operate(&exists, &args, value, head),
engine_state.ctrlc.clone(),
)
}
#[cfg(windows)]
fn examples(&self) -> Vec<Example> {
vec![
Example {
description: "Check if a file exists",
example: "'C:\\Users\\joe\\todo.txt' | path exists",
result: Some(Value::Bool {
val: false,
span: Span::test_data(),
}),
},
Example {
description: "Check if a file exists in a column",
example: "ls | path exists -c [ name ]",
result: None,
},
]
}
#[cfg(not(windows))]
fn examples(&self) -> Vec<Example> {
vec![
Example {
description: "Check if a file exists",
example: "'/home/joe/todo.txt' | path exists",
result: Some(Value::Bool {
val: false,
span: Span::test_data(),
}),
},
Example {
description: "Check if a file exists in a column",
example: "ls | path exists -c [ name ]",
result: None,
},
]
}
}
fn exists(path: &Path, span: Span, args: &Arguments) -> Value {
let path = expand_path_with(path, &args.pwd);
Value::Bool {
val: path.exists(),
span,
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_examples() {
use crate::test_examples;
test_examples(SubCommand {})
}
}
|
/// This module contains classifiers for openings.
use crate::{const_tile::pos, DenseBoard, PacoAction, PacoBoard, PacoError, PieceType};
/// Returns all the openings that can be detected on the given replay.
pub(crate) fn classify_opening(
initial_board: &DenseBoard,
actions: &[PacoAction],
) -> Result<String, PacoError> {
if !is_default_starting_position(initial_board) {
return Ok("".to_string());
}
let mut all_found_openings: Vec<String> = vec![];
if is_swedish_knights(initial_board, actions)? {
all_found_openings.push("Swedish Knights".to_string());
}
if is_double_rai(initial_board, actions)? {
all_found_openings.push("Double Rai".to_string());
} else if is_rai(initial_board, actions)? {
// You can't have Rai, if you already have Double Rai
all_found_openings.push("Rai".to_string());
}
// Comma separated list of openings as a string
Ok(all_found_openings.join(", "))
}
/// Swedish Knights: Anything that moves Nc3 as well as Nh3, Nf4 during
/// the first 5 moves of your color.
pub(crate) fn is_swedish_knights(
initial_board: &DenseBoard,
actions: &[PacoAction],
) -> Result<bool, PacoError> {
// Apply the actions one by one to the initial board
let mut board = initial_board.clone();
let mut lift_counter = 0;
for action in actions {
board.execute(*action)?;
// Check if we have a swedish knight
// Is there a knight on c3 and f4?
if board.white[pos("c3").0 as usize] == Some(PieceType::Knight)
&& board.white[pos("f4").0 as usize] == Some(PieceType::Knight)
{
return Ok(true);
}
// Is this a lift action? If so, increment the counter
if let PacoAction::Lift(_) = action {
lift_counter += 1;
if lift_counter > 10 {
return Ok(false);
}
}
}
Ok(false)
}
/// Rai: h Rook goes to 3rd row by move 3, then to e3 or f3 within the next two
/// moves, without a pawn in front of it.
pub fn is_rai(initial_board: &DenseBoard, actions: &[PacoAction]) -> Result<bool, PacoError> {
// Apply the actions one by one to the initial board
let mut board = initial_board.clone();
let mut lift_counter = 0;
// In this first phase, we search until the h-rook is on the 3rd row.
let mut action_pointer = 0;
loop {
if actions.len() <= action_pointer {
return Ok(false);
}
let action = actions[action_pointer];
board.execute(action)?;
action_pointer += 1;
// Is there a rook on h3
if board.white[pos("h3").0 as usize] == Some(PieceType::Rook) {
break;
}
// Is this a lift action? If so, increment the counter
if let PacoAction::Lift(_) = action {
lift_counter += 1;
if lift_counter > 6 {
return Ok(false);
}
}
}
// In this second phase, we search until the h-rook is on e3 or f3.
// Reset the lift counter because you now have 2 move moves to do this.
lift_counter = 0;
loop {
if actions.len() <= action_pointer {
return Ok(false);
}
let action = actions[action_pointer];
board.execute(action)?;
action_pointer += 1;
// Is there a rook on e3
if board.white[pos("e3").0 as usize] == Some(PieceType::Rook) {
// Check if there is a pawn in front of it
if board.white[pos("e4").0 as usize] == Some(PieceType::Pawn) {
return Ok(false);
}
return Ok(true);
}
// Is there a rook on f3
if board.white[pos("f3").0 as usize] == Some(PieceType::Rook) {
// Check if there is a pawn in front of it
if board.white[pos("f4").0 as usize] == Some(PieceType::Pawn) {
return Ok(false);
}
return Ok(true);
}
// Is this a lift action? If so, increment the counter
if let PacoAction::Lift(_) = action {
lift_counter += 1;
if lift_counter > 4 {
return Ok(false);
}
}
}
}
/// Double Rai: Both rooks on row 3 by move 6.
fn is_double_rai(initial_board: &DenseBoard, actions: &[PacoAction]) -> Result<bool, PacoError> {
// Apply the actions one by one to the initial board
let mut board = initial_board.clone();
let mut lift_counter = 0;
// Search until both rooks are on the 3rd row.
for action in actions {
board.execute(*action)?;
// How many rooks are on the 3rd row?
let mut rooks_on_3rd_row = 0;
let mut index = pos("a3").0 as usize;
while index <= pos("h3").0 as usize {
if board.white[index] == Some(PieceType::Rook) {
rooks_on_3rd_row += 1;
}
index += 1;
}
if rooks_on_3rd_row == 2 {
return Ok(true);
}
// Is this a lift action? If so, increment the counter
if let PacoAction::Lift(_) = action {
lift_counter += 1;
if lift_counter > 12 {
return Ok(false);
}
}
}
Ok(false)
}
/// Check if the given DenseBoard is the default starting position.
pub fn is_default_starting_position(initial_board: &DenseBoard) -> bool {
initial_board == &DenseBoard::new()
}
/// Tests module
#[cfg(test)]
mod tests {
use crate::{analysis::history_to_replay_notation, const_tile::pos, DenseBoard, PacoAction::*};
#[test]
fn test_rai() {
let replay = history_to_replay_notation(
DenseBoard::new(),
&[
Lift(pos("d2")),
Place(pos("d4")),
Lift(pos("d7")),
Place(pos("d5")),
Lift(pos("h2")),
Place(pos("h4")),
Lift(pos("b8")),
Place(pos("c6")),
Lift(pos("h1")),
Place(pos("h3")),
Lift(pos("d8")),
Place(pos("d6")),
Lift(pos("b1")),
Place(pos("c3")),
Lift(pos("c8")),
Place(pos("f5")),
Lift(pos("h3")),
Place(pos("e3")),
Lift(pos("d6")),
Place(pos("b4")),
Lift(pos("c1")),
Place(pos("d2")),
Lift(pos("g8")),
Place(pos("f6")),
],
)
.expect("Error in input data");
assert_eq!(replay.opening, "Rai");
}
}
|
// -------------------------------------------------------------------------------//
// Module to write tests for the challenge helper functions.
// Impl by Frodo45127
// -------------------------------------------------------------------------------//
use crate::utils::*;
#[test]
/// Test to ensure our function to calculate the hamming distance is correct.
fn test_detect_hamming_distance() {
let string_1 = b"this is a test";
let string_2 = b"wokka wokka!!!";
let hamming_distance = detect_hamming_distance(string_1, string_2);
assert_eq!(hamming_distance, 37);
}
#[test]
/// Test to ensure our custom ecb encryption implementation works correctly.
fn test_encrypt_decrypt_aes_128_ecb() {
let key: &[u8; 16] = b"YELLOW SUBMARINE";
let plaintext = b"1234567890qwertyuio".to_vec();
let encrypted = encrypt_aes_128_ecb(&plaintext, key);
let decrypted = decrypt_aes_128_ecb(&encrypted, key);
assert_eq!(plaintext, decrypted);
}
#[test]
/// Test to ensure our custom cbc encryption implementation works correctly.
fn test_encrypt_decrypt_aes_128_cbc() {
let key: &[u8; 16] = b"YELLOW SUBMARINE";
let iv: &[u8; 16] = b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00";
let plaintext = b"1234567890qwertyuio".to_vec();
let encrypted = encrypt_aes_128_cbc(&plaintext, iv, key);
let decrypted = decrypt_aes_128_cbc(&encrypted, iv, key);
assert_eq!(plaintext, decrypted);
}
|
//! TODO: how to sort struct fields with serde?
//! within this mod all the struct fields should be "sorted" statically to generate the correct
//! object hash, this is annoying but we have no way to find out how to do that with serde
// use std::collections::{BTreeMap, HashMap};
// use serde::{Serialize, Serializer};
use serde_json::Value;
// this is used to sort a HashMap struct
// #[allow(dead_code)]
// fn ordered_map<S>(value: &HashMap<String, String>, serializer: S) -> Result<S::Ok, S::Error>
// where
// S: Serializer,
// {
// let ordered: BTreeMap<_, _> = value.iter().collect();
// ordered.serialize(serializer)
// }
// #[serde(serialize_with = "ordered_map")]
#[derive(Debug, Serialize, Deserialize)]
pub struct Authentifiers {
pub r: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Authors {
pub address: String,
pub authentifiers: Authentifiers,
pub definition: Vec<Value>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Inputs {
pub message_index: u64,
pub output_index: u64,
pub unit: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Messages {
pub app: String,
pub payload: Payload,
pub payload_hash: String,
pub payload_location: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Outputs {
pub address: String,
pub amount: u64,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Payload {
pub inputs: Vec<Inputs>,
pub outputs: Vec<Outputs>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Ball {
// TODO: need a real definition
pub unit: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Unit {
pub alt: String,
pub authors: Vec<Authors>,
pub content_hash: Option<String>, // this may not exist
pub headers_commission: u64,
pub last_ball: String,
pub last_ball_unit: String,
pub messages: Vec<Messages>,
pub parent_units: Vec<String>,
pub payload_commission: u64,
pub unit: Option<String>, // this may not exist
pub version: String,
pub witness_list_unit: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Joint {
pub ball: Option<Ball>,
pub unit: Unit,
}
|
#[doc = "Reader of register FIFO_LEVELS"]
pub type R = crate::R<u32, super::FIFO_LEVELS>;
#[doc = "Reader of field `RAF_LVL`"]
pub type RAF_LVL_R = crate::R<u8, u8>;
#[doc = "Reader of field `WAF_LVL`"]
pub type WAF_LVL_R = crate::R<u8, u8>;
#[doc = "Reader of field `TDF_LVL`"]
pub type TDF_LVL_R = crate::R<u8, u8>;
impl R {
#[doc = "Bits 16:23 - Current Read-Address-FIFO fill level"]
#[inline(always)]
pub fn raf_lvl(&self) -> RAF_LVL_R {
RAF_LVL_R::new(((self.bits >> 16) & 0xff) as u8)
}
#[doc = "Bits 8:15 - Current Write-Address-FIFO fill level"]
#[inline(always)]
pub fn waf_lvl(&self) -> WAF_LVL_R {
WAF_LVL_R::new(((self.bits >> 8) & 0xff) as u8)
}
#[doc = "Bits 0:7 - Current Transfer-Data-FIFO fill level"]
#[inline(always)]
pub fn tdf_lvl(&self) -> TDF_LVL_R {
TDF_LVL_R::new((self.bits & 0xff) as u8)
}
}
|
use tokio::prelude::*;
use futures::sync::mpsc;
#[derive(Clone)]
pub struct Broadcaster {
sender: mpsc::UnboundedSender<Msg>
}
enum Msg {
Subscribe(Box<dyn Sink<SinkItem=u8, SinkError=()> + Send + Sync + 'static>),
Broadcast(u8),
Close
}
/// This structure adds a convenient interface which you to
/// subscribe and send messages to the broadcaster:
impl Broadcaster {
pub fn new() -> (Broadcaster, mpsc::Receiver<()>) {
make_broadcaster()
}
pub async fn subscribe(&mut self, sink: impl Sink<SinkItem=u8, SinkError=()> + Send + Sync + 'static) -> () {
let msg = Msg::Subscribe(Box::new(sink));
let _ = await!(self.sender.send_async(msg));
}
pub async fn close(&mut self) -> () {
let _ = await!(self.sender.send_async(Msg::Close));
}
}
/// Broadcaster is also a valid Sink, to avoid needing to consume the inner sink
/// on every attempt to send a byte into it, and allow us to use `.forward` to
/// stream bytes into it.
impl Sink for Broadcaster {
type SinkItem = u8;
type SinkError = ();
fn start_send(&mut self, byte: u8) -> Result<AsyncSink<u8>, Self::SinkError> {
match self.sender.start_send(Msg::Broadcast(byte)) {
Err(_) => Err(()),
Ok(inner) => Ok(inner.map(|_| byte))
}
}
fn poll_complete(&mut self) -> Poll<(), Self::SinkError> {
self.sender
.poll_complete()
.map_err(|_| ())
}
}
/// Create a new byte broadcaster (this will panic if it does not execute in the context
/// of a tokio runtime). You can subscribe new Sinks and broadcast bytes to them. If a sink
/// errors (eg it is no longer possible to send to it) it is no longer broadcasted to.
fn make_broadcaster() -> (Broadcaster, mpsc::Receiver<()>) {
let (send_broadcaster, mut recv_broadcaster) = mpsc::unbounded();
let (mut send_closed, recv_closed) = mpsc::channel::<()>(0);
tokio::spawn_async(async move {
let mut outputters: Vec<BoxedSink<u8,()>> = vec![];
while let Some(res) = await!(recv_broadcaster.next()) {
let msg = match res {
Ok(byte) => byte,
Err(e) => { return eprintln!("Error receiving msg to broadcast: {:?}", e); }
};
match msg {
// Subscribe a Sink to being sent output:
Msg::Subscribe(sink) => {
// Subscribe a new sink to receive output. We have to newtype
// the sink into our own struct since Sink isn't implemented
// on Box<dyn Sink> for some reason:
outputters.push(BoxedSink(sink));
},
// Get given some output to send:
Msg::Broadcast(byte) => {
// Send a message to each sink, recording any that failed:
let mut errored = vec![];
for (i, sink) in outputters.iter_mut().enumerate() {
if let Err(_) = await!(sink.send_async(byte)) {
errored.push(i);
}
}
// If sending to a sink fails, remove it from the vec:
if errored.len() > 0 {
outputters = outputters.into_iter().enumerate().filter_map(|(i,sink)| {
if errored.iter().find(|&&n| i == n).is_some() {
None
} else {
Some(sink)
}
}).collect();
}
},
// Close the broadcaster so it can receive no more output
Msg::Close => {
recv_broadcaster.close();
}
}
}
let _ = await!(send_closed.send_async(()));
});
// return our interface:
(Broadcaster {
sender: send_broadcaster,
}, recv_closed)
}
// This is necessary to make Boxed Sinks actually impl the Sink trait,
// as for some reason they do not appear to at the moment:
struct BoxedSink<I,E>(Box<dyn Sink<SinkItem=I, SinkError=E> + Send + Sync + 'static>);
impl <I,E> Sink for BoxedSink<I,E> {
type SinkItem = I;
type SinkError = E;
fn start_send(&mut self, input: Self::SinkItem) -> Result<AsyncSink<Self::SinkItem>, Self::SinkError> {
self.0.start_send(input)
}
fn poll_complete(&mut self) -> Poll<(), Self::SinkError> {
self.0.poll_complete()
}
} |
use super::POS;
pub fn open(pathname: u64, flags: u64, mode: u64) -> u64 {
let pathname_ptr = pathname as usize;
let pathname = unsafe { ::mem::c_to_str(pathname_ptr) };
println!("Syscall: open pathname={} flags={:x} mode={:x}", pathname, flags, mode);
if pathname == "/dev/tty" {
return 0xFFFFFFFF_FFFFFFFF;
} else {
unsafe {
POS = &::__busybox_start as *const _ as usize;
}
}
3
}
|
use crate::commands::utils::handle_io;
use http::Uri;
use serde_json::Value;
use serenity::async_trait;
use songbird::{
input::{cached::Compressed, Metadata},
Event, EventContext, EventHandler,
};
use sqlx::{any::AnyConnection, Connection, Executor, Row};
use std::{fs, path::Path, sync::Arc, time::Duration};
use tokio::{fs::File, io::AsyncWriteExt, sync::Mutex};
use tracing::{info, warn};
pub const BITRATE: u64 = 128_000;
mod metadata;
type DbResult<T> = Result<T, sqlx::Error>;
#[derive(Debug, Clone)]
pub struct TrackCache {
pub connection: Arc<Mutex<AnyConnection>>,
}
#[derive(Debug)]
struct CacheRow {
uri: String,
path: String,
}
#[derive(Debug)]
pub struct TrackEndEvent {
pub cache: TrackCache,
pub compressed: Compressed,
}
impl TrackCache {
pub async fn new(uri: &str) -> DbResult<TrackCache> {
let mut conn = AnyConnection::connect(uri).await?;
conn.execute("BEGIN").await?;
Ok(TrackCache {
connection: Arc::new(Mutex::new(conn)),
})
}
pub async fn get(&self, uri: &str) -> DbResult<Option<String>> {
let mut conn = self.connection.lock().await;
let row = sqlx::query(&format!(
"
select Path from Cache
where Uri=\"{}\"
",
uri
))
.fetch_optional(&mut *conn)
.await?;
Ok(row.map(|r| r.get("Path")).flatten())
}
async fn insert(&self, row: CacheRow) -> DbResult<Option<i64>> {
let mut conn = self.connection.lock().await;
let res = sqlx::query(&format!(
"
insert into Cache values('{}', '{}')
",
row.uri, row.path
))
.execute(&mut *conn)
.await?;
// Use with Any*
Ok(res.last_insert_id())
//Ok(Some(res.last_insert_rowid()))
}
}
#[async_trait]
impl EventHandler for TrackEndEvent {
async fn act(&self, ctx: &EventContext<'_>) -> Option<Event> {
if let EventContext::Track(_) = ctx {
let meta = self.compressed.metadata.clone();
// only cache if shorter than 20min
if let Some(d) = meta.duration {
if d <= Duration::from_secs(1200) {
info!("Starting cache write");
// saves file as audio_cache/host/query
let sauce = meta.source_url.clone().unwrap();
let (query, host) = {
let uri = sauce.parse::<Uri>().unwrap();
(
uri.query().unwrap().to_owned(),
uri.host().unwrap().to_owned(),
)
};
// songbird doesn't output dca1, so I'll do it myself
let dcameta = metadata::DcaMetadata::from(meta.clone());
let path = format!("audio_cache/{}", host);
if !Path::new(&path).exists() {
handle_io(fs::create_dir_all(&path));
};
let path = format!("{}/{}", path, query);
let mut file = handle_io(File::create(&path).await);
let mut size = handle_io(file.write(&dcameta.header()).await) as u64;
let mut send_file = file.into_std().await;
let mut comp_send = self.compressed.raw.new_handle();
// AsyncRead is a mess I dont' really want to deal with ATM.
// Take a look at the traits for TxCatcher and feel my pain
size += handle_io(
tokio::task::spawn_blocking(move || {
std::io::copy(&mut comp_send, &mut send_file)
})
.await
.unwrap(),
);
info!("Wrote {}KiB", size / 1024);
if let Err(e) = self
.cache
.insert(CacheRow {
uri: sauce,
path: format!("{}/{}", host, query),
})
.await
{
warn!("Error adding entry to cache: {}", e)
}
}
}
}
None
}
}
pub fn extra_meta(val: &Value) -> Metadata {
let obj = if let Some(o) = val.as_object().and_then(|o| o.get("extra")) {
o
} else {
return Metadata::default();
};
Metadata {
date: obj.get("date").and_then(Value::as_str).map(str::to_owned),
duration: obj
.get("duration")
.and_then(Value::as_u64)
.map(Duration::from_millis),
thumbnail: obj
.get("thumbnail")
.and_then(Value::as_str)
.map(str::to_owned),
..Default::default()
}
}
|
#[doc = "Reader of register CTRL"]
pub type R = crate::R<u32, super::CTRL>;
#[doc = "Writer for register CTRL"]
pub type W = crate::W<u32, super::CTRL>;
#[doc = "Register CTRL `reset()`'s with value 0"]
impl crate::ResetValue for super::CTRL {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "On power-up this field is initialised to DISABLE and the chip runs from the ROSC.\\n If the chip has subsequently been programmed to run from the XOSC then setting this field to DISABLE may lock-up the chip. If this is a concern then run the clk_ref from the ROSC and enable the clk_sys RESUS feature.\\n The 12-bit code is intended to give some protection against accidental writes. An invalid setting will enable the oscillator.\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
#[repr(u16)]
pub enum ENABLE_A {
#[doc = "3358: `110100011110`"]
DISABLE = 3358,
#[doc = "4011: `111110101011`"]
ENABLE = 4011,
}
impl From<ENABLE_A> for u16 {
#[inline(always)]
fn from(variant: ENABLE_A) -> Self {
variant as _
}
}
#[doc = "Reader of field `ENABLE`"]
pub type ENABLE_R = crate::R<u16, ENABLE_A>;
impl ENABLE_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> crate::Variant<u16, ENABLE_A> {
use crate::Variant::*;
match self.bits {
3358 => Val(ENABLE_A::DISABLE),
4011 => Val(ENABLE_A::ENABLE),
i => Res(i),
}
}
#[doc = "Checks if the value of the field is `DISABLE`"]
#[inline(always)]
pub fn is_disable(&self) -> bool {
*self == ENABLE_A::DISABLE
}
#[doc = "Checks if the value of the field is `ENABLE`"]
#[inline(always)]
pub fn is_enable(&self) -> bool {
*self == ENABLE_A::ENABLE
}
}
#[doc = "Write proxy for field `ENABLE`"]
pub struct ENABLE_W<'a> {
w: &'a mut W,
}
impl<'a> ENABLE_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: ENABLE_A) -> &'a mut W {
unsafe { self.bits(variant.into()) }
}
#[doc = "`110100011110`"]
#[inline(always)]
pub fn disable(self) -> &'a mut W {
self.variant(ENABLE_A::DISABLE)
}
#[doc = "`111110101011`"]
#[inline(always)]
pub fn enable(self) -> &'a mut W {
self.variant(ENABLE_A::ENABLE)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u16) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x0fff << 12)) | (((value as u32) & 0x0fff) << 12);
self.w
}
}
#[doc = "Frequency range. This resets to 0xAA0 and cannot be changed.\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
#[repr(u16)]
pub enum FREQ_RANGE_A {
#[doc = "2720: `101010100000`"]
_1_15MHZ = 2720,
#[doc = "2721: `101010100001`"]
RESERVED_1 = 2721,
#[doc = "2722: `101010100010`"]
RESERVED_2 = 2722,
#[doc = "2723: `101010100011`"]
RESERVED_3 = 2723,
}
impl From<FREQ_RANGE_A> for u16 {
#[inline(always)]
fn from(variant: FREQ_RANGE_A) -> Self {
variant as _
}
}
#[doc = "Reader of field `FREQ_RANGE`"]
pub type FREQ_RANGE_R = crate::R<u16, FREQ_RANGE_A>;
impl FREQ_RANGE_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> crate::Variant<u16, FREQ_RANGE_A> {
use crate::Variant::*;
match self.bits {
2720 => Val(FREQ_RANGE_A::_1_15MHZ),
2721 => Val(FREQ_RANGE_A::RESERVED_1),
2722 => Val(FREQ_RANGE_A::RESERVED_2),
2723 => Val(FREQ_RANGE_A::RESERVED_3),
i => Res(i),
}
}
#[doc = "Checks if the value of the field is `_1_15MHZ`"]
#[inline(always)]
pub fn is_1_15mhz(&self) -> bool {
*self == FREQ_RANGE_A::_1_15MHZ
}
#[doc = "Checks if the value of the field is `RESERVED_1`"]
#[inline(always)]
pub fn is_reserved_1(&self) -> bool {
*self == FREQ_RANGE_A::RESERVED_1
}
#[doc = "Checks if the value of the field is `RESERVED_2`"]
#[inline(always)]
pub fn is_reserved_2(&self) -> bool {
*self == FREQ_RANGE_A::RESERVED_2
}
#[doc = "Checks if the value of the field is `RESERVED_3`"]
#[inline(always)]
pub fn is_reserved_3(&self) -> bool {
*self == FREQ_RANGE_A::RESERVED_3
}
}
#[doc = "Write proxy for field `FREQ_RANGE`"]
pub struct FREQ_RANGE_W<'a> {
w: &'a mut W,
}
impl<'a> FREQ_RANGE_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: FREQ_RANGE_A) -> &'a mut W {
unsafe { self.bits(variant.into()) }
}
#[doc = "`101010100000`"]
#[inline(always)]
pub fn _1_15mhz(self) -> &'a mut W {
self.variant(FREQ_RANGE_A::_1_15MHZ)
}
#[doc = "`101010100001`"]
#[inline(always)]
pub fn reserved_1(self) -> &'a mut W {
self.variant(FREQ_RANGE_A::RESERVED_1)
}
#[doc = "`101010100010`"]
#[inline(always)]
pub fn reserved_2(self) -> &'a mut W {
self.variant(FREQ_RANGE_A::RESERVED_2)
}
#[doc = "`101010100011`"]
#[inline(always)]
pub fn reserved_3(self) -> &'a mut W {
self.variant(FREQ_RANGE_A::RESERVED_3)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u16) -> &'a mut W {
self.w.bits = (self.w.bits & !0x0fff) | ((value as u32) & 0x0fff);
self.w
}
}
impl R {
#[doc = "Bits 12:23 - On power-up this field is initialised to DISABLE and the chip runs from the ROSC.\\n If the chip has subsequently been programmed to run from the XOSC then setting this field to DISABLE may lock-up the chip. If this is a concern then run the clk_ref from the ROSC and enable the clk_sys RESUS feature.\\n The 12-bit code is intended to give some protection against accidental writes. An invalid setting will enable the oscillator."]
#[inline(always)]
pub fn enable(&self) -> ENABLE_R {
ENABLE_R::new(((self.bits >> 12) & 0x0fff) as u16)
}
#[doc = "Bits 0:11 - Frequency range. This resets to 0xAA0 and cannot be changed."]
#[inline(always)]
pub fn freq_range(&self) -> FREQ_RANGE_R {
FREQ_RANGE_R::new((self.bits & 0x0fff) as u16)
}
}
impl W {
#[doc = "Bits 12:23 - On power-up this field is initialised to DISABLE and the chip runs from the ROSC.\\n If the chip has subsequently been programmed to run from the XOSC then setting this field to DISABLE may lock-up the chip. If this is a concern then run the clk_ref from the ROSC and enable the clk_sys RESUS feature.\\n The 12-bit code is intended to give some protection against accidental writes. An invalid setting will enable the oscillator."]
#[inline(always)]
pub fn enable(&mut self) -> ENABLE_W {
ENABLE_W { w: self }
}
#[doc = "Bits 0:11 - Frequency range. This resets to 0xAA0 and cannot be changed."]
#[inline(always)]
pub fn freq_range(&mut self) -> FREQ_RANGE_W {
FREQ_RANGE_W { w: self }
}
}
|
#![feature(proc_macro_hygiene, decl_macro)]
#[macro_use] extern crate rocket;
mod routes;
fn main() {
rocket::ignite().mount("/", routes![routes::index, routes::hello]).launch();
}
|
fn interpreter(code: &str, iterations: usize,
width: usize, height: usize) -> String
{
let mut res: Vec< Vec<char> > = vec![ ];
for i in 0..height{
res.push(vec![]);
for _j in 0..width{
res[i].push('0');
}
}
let mut pos_x = 0;
let mut pos_y = 0;
let old_code = code.clone();
let code: Vec<char> = code.chars().collect();
let mut i = 0;
let mut skip: bool = false;
let iterations = iterations;
let mut counter = 0;
while (i < code.len())&&(counter < iterations) {
if i == code.len() { break; }
match code[i] {
'n' => { if pos_x == 0 {pos_x = height}
pos_x -= 1; },
'e' => { if pos_y == width - 1 {pos_y = 0}
else { pos_y += 1; }},
's' => { if pos_x == height - 1 {pos_x = 0}
else { pos_x += 1; }},
'w' => { if pos_y == 0 {pos_y = width}
pos_y -= 1; },
'*' => { res[pos_x][pos_y] = (res[pos_x][pos_y] as u8 ^ 1) as char; },
'[' => {
if res[pos_x][pos_y] == '0'{
while code[i+1] != ']' {
i+=1;
}
i+=1;
i = old_code.to_string().find(']').unwrap();
}
},
']' => {
if res[pos_x][pos_y] == '1'{
while code[i-1] != '[' {
i-=1;
}
i-=1;
i = old_code.to_string().find('[').unwrap();
}
},
_ => { skip = true; }
}
counter += 1;
i+=1;
if skip { skip = false; counter -= 1; }
else { skip = false; }
}
let temp: Vec<String> = res.into_iter().map({|e| e.iter().cloned().collect::<String>()}).collect();
temp.into_iter().collect::<Vec<String>>().join("\r\n")
}
#[test]
fn test0() {
assert_eq!(interpreter("*e*e*e*es*es*ws*ws*w*w*w*n*n*n*ssss*s*s*s*", 1, 6, 2),
"100000\r\n000000");
}
#[test]
fn test1() {
assert_eq!(interpreter("*e*e*e*es*es*ws*ws*w*w*w*n*n*n*ssss*s*s*s*", 1, 6, 9),
"100000\r\n000000\r\n000000\r\n000000\r\n000000\r\n000000\r\n000000\r\n000000\r\n000000");
}
#[test]
fn test2() {
assert_eq!(interpreter("*e*e*e*e*e*e*e*e*", 6, 6, 1),
"111000");
}
#[test]
fn test3() {
assert_eq!(interpreter("*e*e*e*e*e*e*e*", 0, 6, 1),
"000000");
}
#[test]
fn test4() {
assert_eq!(interpreter("e*e*e*es*es*ws*ws*w*w*w*n*n*n*ssss*s*s*s*", 0, 1, 1),
"0");
}
#[test]
fn test5() {
assert_eq!(interpreter("*e*e*e*es*es*ws*ws*w*w*w*n*n*n*ssss*s*s*s*", 0, 1, 1),
"0");
}
#[test]
fn test6() {
assert_eq!(interpreter("*e*e*e*es*es*ws*ws*w*w*w*n*n*n*ssss*s*s*s*", 0, 6, 9),
"000000\r\n000000\r\n000000\r\n000000\r\n000000\r\n000000\r\n000000\r\n000000\r\n000000");
}
#[test]
fn test7() {
assert_eq!(interpreter("*e*e*e*es*es*ws*ws*w*w*w*n*n*n*ssss*s*s*s*", 7, 6, 9),
"111100\r\n000000\r\n000000\r\n000000\r\n000000\r\n000000\r\n000000\r\n000000\r\n000000");
}
#[test]
fn test8() {
assert_eq!(interpreter("*e*e*e*es*es*ws*ws*w*w*w*n*n*n*ssss*s*s*s*", 19, 6, 9),
"111100\r\n000010\r\n000001\r\n000010\r\n000100\r\n000000\r\n000000\r\n000000\r\n000000");
}
#[test]
fn test9() {
assert_eq!(interpreter("*e*e*e*es*es*ws*ws*w*w*w*n*n*n*ssss*s*s*s*", 42, 6, 9),
"111100\r\n100010\r\n100001\r\n100010\r\n111100\r\n100000\r\n100000\r\n100000\r\n100000");
}
fn main() {
let x = interpreter("*e*e*e*es*es*ws*ws*w*w*w*n*n*n*ssss*s*s*s*", 100, 6, 9);
}
|
use crate::prelude::*;
pub use crate::{Error, Result};
const MSGBOX: usize = 1024;
//#[derive(Message)]
//#[rtype(result = "()")]
//struct ActiveTick(());
#[derive(Debug, Clone, Message)]
#[rtype(result = "()")]
pub enum IncomingMsg {
Msg { msg: Arc<Vec<u8>> },
}
struct SocketActor {
//r: ReadHalf<TcpStream>,
receive_tx: broadcast::Sender<IncomingMsg>,
}
impl Actor for SocketActor {
type Context = Context<Self>;
fn started(&mut self, _ctx: &mut Context<Self>) {
()
}
fn stopped(&mut self, _ctx: &mut Context<Self>) {
()
}
}
impl Handler<IncomingMsg> for SocketActor {
type Result = ();
fn handle(&mut self, msg: IncomingMsg, _ctx: &mut Context<Self>) -> Self::Result {
self.receive_tx.send(msg).unwrap(); //FIXME: error handling
()
}
}
pub struct Socket {
receive_tx: broadcast::Sender<IncomingMsg>,
w: WriteHalf<TcpStream>,
}
impl Socket {
pub async fn connect(addr: SocketAddr) -> Result<Socket> {
let stream = TcpStream::connect(addr).await?;
start_stream(stream)
}
pub fn subscribe(&self) -> broadcast::Receiver<IncomingMsg> {
return self.receive_tx.subscribe();
}
pub async fn send(&mut self, msg: &[u8]) -> Result<()> {
self.w.write(&msg).await?;
Ok(())
}
}
pub struct Listener {
listener: TcpListener,
}
impl Listener {
pub async fn bind(addr: SocketAddr) -> Result<Listener> {
let listener = TcpListener::bind(addr).await?;
Ok(Listener { listener })
}
pub async fn accept(&self) -> Result<Socket> {
let (stream, _) = self.listener.accept().await?;
start_stream(stream)
}
}
fn start_stream(stream: TcpStream) -> Result<Socket> {
let (mut r, w) = tokio::io::split(stream);
let (receive_tx, _receive_rx) = broadcast::channel(MSGBOX);
let socket_receive_tx = receive_tx.clone();
let execution = async move {
// `Actor::start` spawns the `Actor` on the *current* `Arbiter`, which
// in this case is the System arbiter
let addr = SocketActor { receive_tx }.start();
let mut buf = BytesMut::with_capacity(1024);
match r.read_buf(&mut buf).await {
Ok(0) => (),
Ok(_) => {
let payload = buf.to_vec();
addr.send(IncomingMsg::Msg {
msg: Arc::new(payload),
})
.await
.unwrap(); // FIXME: handle error?
}
Err(_) => return (),
}
};
let arb = Arbiter::new();
// Spawn the future onto the current Arbiter/event loop
arb.spawn(execution);
Ok(Socket {
receive_tx: socket_receive_tx,
w,
})
}
|
use imgui_sys;
use libc::size_t;
use std::marker::PhantomData;
use std::ptr;
use super::{ImGuiInputTextFlags,
ImGuiInputTextFlags_AllowTabInput /* ImGuiInputTextFlags_CtrlEnterForNewLine, */,
ImGuiInputTextFlags_AlwaysInsertMode, ImGuiInputTextFlags_AutoSelectAll,
ImGuiInputTextFlags_CallbackAlways, ImGuiInputTextFlags_CallbackCharFilter,
ImGuiInputTextFlags_CallbackCompletion, ImGuiInputTextFlags_CallbackHistory,
ImGuiInputTextFlags_CharsDecimal, ImGuiInputTextFlags_CharsHexadecimal,
ImGuiInputTextFlags_CharsNoBlank, ImGuiInputTextFlags_CharsUppercase,
ImGuiInputTextFlags_EnterReturnsTrue, ImGuiInputTextFlags_NoHorizontalScroll, ImStr,
Ui};
macro_rules! impl_text_flags {
($InputType:ident) => {
#[inline]
pub fn flags(self, flags: ImGuiInputTextFlags) -> Self {
$InputType {
flags: flags,
.. self
}
}
#[inline]
pub fn chars_decimal(self, value: bool) -> Self {
$InputType {
flags: self.flags.with(ImGuiInputTextFlags_CharsDecimal, value),
.. self
}
}
#[inline]
pub fn chars_hexadecimal(self, value: bool) -> Self {
$InputType {
flags: self.flags.with(ImGuiInputTextFlags_CharsHexadecimal, value),
.. self
}
}
#[inline]
pub fn chars_uppercase(self, value: bool) -> Self {
$InputType {
flags: self.flags.with(ImGuiInputTextFlags_CharsUppercase, value),
.. self
}
}
#[inline]
pub fn chars_noblank(self, value: bool) -> Self {
$InputType {
flags: self.flags.with(ImGuiInputTextFlags_CharsNoBlank, value),
.. self
}
}
#[inline]
pub fn auto_select_all(self, value: bool) -> Self {
$InputType {
flags: self.flags.with(ImGuiInputTextFlags_AutoSelectAll, value),
.. self
}
}
#[inline]
pub fn enter_returns_true(self, value: bool) -> Self {
$InputType {
flags: self.flags.with(ImGuiInputTextFlags_EnterReturnsTrue, value),
.. self
}
}
#[inline]
pub fn callback_completion(self, value: bool) -> Self {
$InputType {
flags: self.flags.with(ImGuiInputTextFlags_CallbackCompletion, value),
.. self
}
}
#[inline]
pub fn callback_history(self, value: bool) -> Self {
$InputType {
flags: self.flags.with(ImGuiInputTextFlags_CallbackHistory, value),
.. self
}
}
#[inline]
pub fn callback_always(self, value: bool) -> Self {
$InputType {
flags: self.flags.with(ImGuiInputTextFlags_CallbackAlways, value),
.. self
}
}
#[inline]
pub fn callback_char_filter(self, value: bool) -> Self {
$InputType {
flags: self.flags.with(ImGuiInputTextFlags_CallbackCharFilter, value),
.. self
}
}
#[inline]
pub fn allow_tab_input(self, value: bool) -> Self {
$InputType {
flags: self.flags.with(ImGuiInputTextFlags_AllowTabInput, value),
.. self
}
}
#[inline]
pub fn no_horizontal_scroll(self, value: bool) -> Self {
$InputType {
flags: self.flags.with(ImGuiInputTextFlags_NoHorizontalScroll, value),
.. self
}
}
#[inline]
pub fn always_insert_mode(self, value: bool) -> Self {
$InputType {
flags: self.flags.with(ImGuiInputTextFlags_AlwaysInsertMode, value),
.. self
}
}
}
}
macro_rules! impl_step_params {
($InputType:ident, $Value:ty) => {
#[inline]
pub fn step(self, value: $Value) -> Self {
$InputType {
step: value,
.. self
}
}
#[inline]
pub fn step_fast(self, value: $Value) -> Self {
$InputType {
step_fast: value,
.. self
}
}
}
}
macro_rules! impl_precision_params {
($InputType:ident) => {
#[inline]
pub fn decimal_precision(self, value: i32) -> Self {
$InputType {
decimal_precision: value,
.. self
}
}
}
}
#[must_use]
pub struct InputText<'ui, 'p> {
label: ImStr<'p>,
buf: &'p mut str,
flags: ImGuiInputTextFlags,
_phantom: PhantomData<&'ui Ui<'ui>>,
}
impl<'ui, 'p> InputText<'ui, 'p> {
pub fn new(label: ImStr<'p>, buf: &'p mut str) -> Self {
InputText {
label: label,
buf: buf,
flags: ImGuiInputTextFlags::empty(),
_phantom: PhantomData,
}
}
impl_text_flags!(InputText);
// TODO: boxed closure...?
// pub fn callback(self) -> Self { }
pub fn build(self) -> bool {
unsafe {
imgui_sys::igInputText(self.label.as_ptr(),
// TODO: this is evil.
// Perhaps something else than &mut str is better
self.buf.as_ptr() as *mut i8,
self.buf.len() as size_t,
self.flags,
None,
ptr::null_mut())
}
}
}
#[must_use]
pub struct InputInt<'ui, 'p> {
label: ImStr<'p>,
value: &'p mut i32,
step: i32,
step_fast: i32,
flags: ImGuiInputTextFlags,
_phantom: PhantomData<&'ui Ui<'ui>>,
}
impl<'ui, 'p> InputInt<'ui, 'p> {
pub fn new(label: ImStr<'p>, value: &'p mut i32) -> Self {
InputInt {
label: label,
value: value,
step: 1,
step_fast: 100,
flags: ImGuiInputTextFlags::empty(),
_phantom: PhantomData,
}
}
pub fn build(self) -> bool {
unsafe {
imgui_sys::igInputInt(self.label.as_ptr(),
self.value as *mut i32,
self.step,
self.step_fast,
self.flags)
}
}
impl_step_params!(InputInt, i32);
impl_text_flags!(InputInt);
}
#[must_use]
pub struct InputFloat<'ui, 'p> {
label: ImStr<'p>,
value: &'p mut f32,
step: f32,
step_fast: f32,
decimal_precision: i32,
flags: ImGuiInputTextFlags,
_phantom: PhantomData<&'ui Ui<'ui>>,
}
impl<'ui, 'p> InputFloat<'ui, 'p> {
pub fn new(label: ImStr<'p>, value: &'p mut f32) -> Self {
InputFloat {
label: label,
value: value,
step: 0.0,
step_fast: 0.0,
decimal_precision: -1,
flags: ImGuiInputTextFlags::empty(),
_phantom: PhantomData,
}
}
pub fn build(self) -> bool {
unsafe {
imgui_sys::igInputFloat(self.label.as_ptr(),
self.value as *mut f32,
self.step,
self.step_fast,
self.decimal_precision,
self.flags)
}
}
impl_step_params!(InputFloat, f32);
impl_precision_params!(InputFloat);
impl_text_flags!(InputFloat);
}
macro_rules! impl_input_floatn {
($InputFloatN:ident, $N:expr, $igInputFloatN:ident) => {
#[must_use]
pub struct $InputFloatN<'ui, 'p> {
label: ImStr<'p>,
value: &'p mut [f32;$N],
decimal_precision: i32,
flags: ImGuiInputTextFlags,
_phantom: PhantomData<&'ui Ui<'ui>>
}
impl<'ui, 'p> $InputFloatN<'ui, 'p> {
pub fn new(label: ImStr<'p>, value: &'p mut [f32;$N]) -> Self {
$InputFloatN {
label: label,
value: value,
decimal_precision: -1,
flags: ImGuiInputTextFlags::empty(),
_phantom: PhantomData
}
}
pub fn build(self) -> bool {
unsafe {
imgui_sys::$igInputFloatN(
self.label.as_ptr(),
self.value.as_mut_ptr(),
self.decimal_precision,
self.flags)
}
}
impl_precision_params!($InputFloatN);
impl_text_flags!($InputFloatN);
}
}
}
impl_input_floatn!(InputFloat2, 2, igInputFloat2);
impl_input_floatn!(InputFloat3, 3, igInputFloat3);
impl_input_floatn!(InputFloat4, 4, igInputFloat4);
macro_rules! impl_input_intn {
($InputIntN:ident, $N:expr, $igInputIntN:ident) => {
#[must_use]
pub struct $InputIntN<'ui, 'p> {
label: ImStr<'p>,
value: &'p mut [i32;$N],
flags: ImGuiInputTextFlags,
_phantom: PhantomData<&'ui Ui<'ui>>
}
impl<'ui, 'p> $InputIntN<'ui, 'p> {
pub fn new(label: ImStr<'p>, value: &'p mut [i32;$N]) -> Self {
$InputIntN {
label: label,
value: value,
flags: ImGuiInputTextFlags::empty(),
_phantom: PhantomData
}
}
pub fn build(self) -> bool {
unsafe {
imgui_sys::$igInputIntN(
self.label.as_ptr(),
self.value.as_mut_ptr(),
self.flags)
}
}
impl_text_flags!($InputIntN);
}
}
}
impl_input_intn!(InputInt2, 2, igInputInt2);
impl_input_intn!(InputInt3, 3, igInputInt3);
impl_input_intn!(InputInt4, 4, igInputInt4);
#[must_use]
pub struct ColorEdit3<'ui, 'p> {
label: ImStr<'p>,
value: &'p mut [f32; 3],
_phantom: PhantomData<&'ui Ui<'ui>>,
}
impl<'ui, 'p> ColorEdit3<'ui, 'p> {
pub fn new(label: ImStr<'p>, value: &'p mut [f32; 3]) -> Self {
ColorEdit3 {
label: label,
value: value,
_phantom: PhantomData,
}
}
pub fn build(self) -> bool {
unsafe { imgui_sys::igColorEdit3(self.label.as_ptr(), self.value.as_mut_ptr()) }
}
}
#[must_use]
pub struct ColorEdit4<'ui, 'p> {
label: ImStr<'p>,
value: &'p mut [f32; 4],
show_alpha: bool,
_phantom: PhantomData<&'ui Ui<'ui>>,
}
impl<'ui, 'p> ColorEdit4<'ui, 'p> {
pub fn new(label: ImStr<'p>, value: &'p mut [f32; 4]) -> Self {
ColorEdit4 {
label: label,
value: value,
show_alpha: true,
_phantom: PhantomData,
}
}
pub fn build(self) -> bool {
unsafe {
imgui_sys::igColorEdit4(self.label.as_ptr(),
self.value.as_mut_ptr(),
self.show_alpha)
}
}
}
|
use arkecosystem_client::{Connection, Manager};
#[test]
fn test_create_connection() {
let conn = Connection::new("test");
let mut manager = Manager::new();
assert!(manager.connect(conn).is_ok());
assert_eq!(manager.connections().count(), 1);
}
#[test]
fn test_create_existing_connection() {
let conn1 = Connection::new("test1");
let conn2 = Connection::new("test2");
let mut manager = Manager::new();
assert!(manager.connect(conn1).is_ok());
assert!(manager.connect(conn2).is_err());
}
#[test]
fn test_remove_connection() {
let conn = Connection::new("test1");
let mut manager = Manager::new();
assert!(manager.connect(conn).is_ok());
manager.disconnect("");
assert_eq!(manager.connections().count(), 0);
}
#[test]
fn test_get_connection() {
let conn = Connection::new("test1");
let mut manager = Manager::new();
assert!(manager.connect(conn).is_ok());
let default_conn = manager.connection();
assert!(default_conn.is_some());
}
#[test]
fn test_get_non_existing_connection() {
let manager = Manager::new();
let default_conn = manager.connection();
assert!(default_conn.is_none());
}
#[test]
fn test_get_default_connection() {
let manager = Manager::new();
let default = manager.get_default_connection();
assert_eq!(default, "main");
}
#[test]
fn test_set_default_connection() {
let mut manager = Manager::new();
manager.set_default_connection("test");
let default = manager.get_default_connection();
assert_eq!(default, "test");
}
#[test]
fn test_get_all_connections() {
let conn1 = Connection::new("test1");
let conn2 = Connection::new("test3");
let mut manager = Manager::new();
assert!(manager.connect_as(conn1, "test1").is_ok());
assert!(manager.connect_as(conn2, "test3").is_ok());
let connections = manager.connections();
assert_eq!(connections.count(), 2);
}
|
use rustc_serialize::{Encodable, Encoder, Decodable, Decoder};
macro_rules! custom_encodable {
($struct_name:ident, $($field:ident),*) => {
impl <S: Encoder<E>, E> Encodable<S, E> for $struct_name {
fn encode(&self, encoder: &mut S) -> Result<(), E> {
encoder.emit_struct(stringify!($struct_name), 0, |encoder| {
$(
try!(encoder.emit_struct_field(stringify!($field), 0, |encoder|self.$field.encode(encoder)));
)*
Ok(())
})
}
}
impl<S: Decoder<E>, E> Decodable<S, E> for $struct_name {
fn decode(decoder: &mut S) -> Result<$struct_name, E> {
decoder.read_struct("root", 0, |decoder| {
let d = $struct_name {
$(
$field: try!(decoder.read_struct_field(stringify!($field), 0, |decoder| Decodable::decode(decoder))),
)*
};
Ok(d)
})
}
}
}
}
macro_rules! encode_field {
($field:ident) => {
try!(encoder.emit_struct_field(stringify!($field), 0, |encoder|self.$field.encode(encoder)))
}
}
macro_rules! encode_fields {
($($field:ident),+) => {
$(
encode_field!($field);
)+
}
}
macro_rules! decode_fields {
($($field:ident),+) => {
$(
$field: decode_field!($field),
)+
}
}
macro_rules! decode_field {
($field:ident) => {
try!(decoder.read_struct_field(stringify!($field), 0, |decoder| Decodable::decode(decoder)))
}
}
|
use apllodb_sql_parser::{
apllodb_ast::{Command, Expression, InsertCommand, InsertValue},
ApllodbAst, ApllodbSqlParser,
};
use pretty_assertions::assert_eq;
use apllodb_test_support::setup::setup_test_logger;
#[ctor::ctor]
fn test_setup() {
setup_test_logger();
}
#[test]
fn test_insert_accepted() {
let sql_vs_expected_ast: Vec<(&str, InsertCommand)> = vec![
(
"INSERT INTO t (id, c1) VALUES (1, 123)",
InsertCommand::factory(
"t",
None,
vec!["id", "c1"],
vec![InsertValue::factory(vec![
Expression::factory_integer("1"),
Expression::factory_integer("123"),
])],
),
),
(
r#"INSERT INTO t (c) VALUES ("abc")"#,
InsertCommand::factory(
"t",
None,
vec!["c"],
vec![InsertValue::factory(vec![Expression::factory_text(
r#"abc"#,
)])],
),
),
(
r#"INSERT INTO t (c) VALUES ("abc🍣 '@\")"#,
InsertCommand::factory(
"t",
None,
vec!["c"],
vec![InsertValue::factory(vec![Expression::factory_text(
r#"abc🍣 '@\"#,
)])],
),
),
(
"INSERT INTO long_table_name AS t (id, c1) VALUES (1, 123)",
InsertCommand::factory(
"long_table_name",
Some("t"),
vec!["id", "c1"],
vec![InsertValue::factory(vec![
Expression::factory_integer("1"),
Expression::factory_integer("123"),
])],
),
),
(
// acceptable syntactically though the number of columns and expressions are different.
"INSERT INTO t (id, c1) VALUES (1, 123, 456)",
InsertCommand::factory(
"t",
None,
vec!["id", "c1"],
vec![InsertValue::factory(vec![
Expression::factory_integer("1"),
Expression::factory_integer("123"),
Expression::factory_integer("456"),
])],
),
),
(
"INSERT INTO t (id, c1) VALUES (1, 123), (2, 456)",
InsertCommand::factory(
"t",
None,
vec!["id", "c1"],
vec![
InsertValue::factory(vec![
Expression::factory_integer("1"),
Expression::factory_integer("123"),
]),
InsertValue::factory(vec![
Expression::factory_integer("2"),
Expression::factory_integer("456"),
]),
],
),
),
];
let parser = ApllodbSqlParser::default();
for (sql, expected_ast) in sql_vs_expected_ast {
match parser.parse(sql) {
Ok(ApllodbAst(Command::InsertCommandVariant(insert_command))) => {
assert_eq!(insert_command, expected_ast);
}
Ok(ast) => panic!(
"'{}' should be parsed as INSERT but is parsed like: {:?}",
sql, ast
),
Err(e) => panic!("{}", e),
}
}
}
#[test]
fn test_insert_rejected() {
let sqls: Vec<&str> = vec![
// Lack parentheses.
"INSERT INTO t (id) VALUES 1",
];
let parser = ApllodbSqlParser::default();
for sql in sqls {
assert!(parser.parse(sql).is_err());
}
}
|
use libp2p::core::Negotiated;
use libp2p::swarm::protocols_handler::{KeepAlive, ProtocolsHandlerUpgrErr, ProtocolsHandlerEvent, SubstreamProtocol};
use libp2p::swarm::ProtocolsHandler;
use crate::message::{Message, PrePrepare, Prepare, Commit};
use tokio::prelude::{AsyncRead, AsyncWrite, Async, AsyncSink};
use crate::behavior::PbftFailure;
use futures::Poll;
use futures::sink::Sink;
use futures::stream::Stream;
use crate::protocol_config::{PbftProtocolConfig, PbftOutStreamSink, PbftInStreamSink};
use libp2p::{OutboundUpgrade, InboundUpgrade};
use std::collections::VecDeque;
/// Event to send to the handler.
#[derive(Debug)]
pub enum PbftHandlerIn {
PrePrepareRequest(PrePrepare),
PrePrepareResponse(Vec<u8>, ConnectionId),
PrepareRequest(Prepare),
PrepareResponse(Vec<u8>, ConnectionId),
CommitRequest(Commit),
CommitResponse(Vec<u8>, ConnectionId),
}
pub struct PbftHandler<TSubstream>
where
TSubstream: AsyncRead + AsyncWrite
{
config: PbftProtocolConfig,
substreams: VecDeque<SubstreamState<Negotiated<TSubstream>>>,
next_connection_id: ConnectionId,
_marker: std::marker::PhantomData<TSubstream>,
}
/// Unique identifier for a connection.
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct ConnectionId(u64);
impl ConnectionId {
fn new() -> Self {
Self(0)
}
fn next_id(&mut self) -> Self {
let next = self.0;
self.0 += 1;
println!("[ConnectionId::next_id] issued the connection_id: {:?}", next);
Self(next)
}
}
enum SubstreamState<TSubstream>
where
TSubstream: AsyncRead + AsyncWrite
{
/// We haven't started opening the outgoing substream yet.
/// Contains the request we want to send, and the user data if we expect an answer.
OutPendingOpen(Message),
/// Waiting to send a message to the remote.
OutPendingSend(PbftOutStreamSink<TSubstream>, Message),
/// Waiting to flush the substream so that the data arrives to the remote.
OutPendingFlush(PbftOutStreamSink<TSubstream>),
// TODO: add timeout
OutWaitingAnswer(PbftOutStreamSink<TSubstream>),
/// The substream is being closed.
OutClosing(PbftOutStreamSink<TSubstream>),
/// Waiting for a request from the remote.
InWaitingMessage(ConnectionId, PbftInStreamSink<TSubstream>),
/// Waiting to send a `PbftHandlerIn` event containing the response.
InWaitingToProcessMessage(ConnectionId, PbftInStreamSink<TSubstream>),
/// Waiting to send an answer back to the remote.
InPendingSend(PbftInStreamSink<TSubstream>, Vec<u8>),
/// Waiting to flush an answer back to the remote.
InPendingFlush(PbftInStreamSink<TSubstream>),
/// The substream is being closed.
InClosing(PbftInStreamSink<TSubstream>),
}
#[derive(Debug)]
pub enum PbftHandlerEvent {
ProcessPrePrepareRequest {
request: PrePrepare,
connection_id: ConnectionId,
},
Response {
response: Vec<u8>,
},
ProcessPrepareRequest {
request: Prepare,
connection_id: ConnectionId,
},
ProcessCommitRequest {
request: Commit,
connection_id: ConnectionId,
},
}
impl<TSubstream> PbftHandler<TSubstream>
where
TSubstream: AsyncRead + AsyncWrite
{
pub fn new() -> Self {
Self {
config: PbftProtocolConfig {},
substreams: VecDeque::with_capacity(100), // FIXME
next_connection_id: ConnectionId::new(),
_marker: std::marker::PhantomData,
}
}
fn find_waiting_substream_state_pos(&self, connection_id: &ConnectionId) -> Option<usize> {
self.substreams.iter().position(|state| {
match state {
SubstreamState::InWaitingToProcessMessage(substream_connection_id, _) => {
substream_connection_id == connection_id
}
_ => false
}
})
}
}
impl<TSubstream> ProtocolsHandler for PbftHandler<TSubstream>
where
TSubstream: AsyncRead + AsyncWrite,
{
type InEvent = PbftHandlerIn;
type OutEvent = PbftHandlerEvent;
type Error = PbftFailure;
type Substream = TSubstream;
type InboundProtocol = PbftProtocolConfig;
type OutboundProtocol = PbftProtocolConfig;
type OutboundOpenInfo = Message;
fn listen_protocol(&self) -> SubstreamProtocol<PbftProtocolConfig> {
println!("PbftHandler::listen_protocol()");
SubstreamProtocol::new(self.config.clone())
}
fn inject_fully_negotiated_inbound(
&mut self,
protocol: <Self::InboundProtocol as InboundUpgrade<TSubstream>>::Output,
) {
println!("PbftHandler::inject_fully_negotiated_inbound()");
self.substreams.push_back(SubstreamState::InWaitingMessage(self.next_connection_id.next_id(), protocol));
}
fn inject_fully_negotiated_outbound(
&mut self,
protocol: <Self::OutboundProtocol as OutboundUpgrade<TSubstream>>::Output,
message: Self::OutboundOpenInfo,
) {
println!("PbftHandler::inject_fully_negotiated_outbound()");
self.substreams.push_back(SubstreamState::OutPendingSend(protocol, message));
}
fn inject_event(&mut self, handler_in: PbftHandlerIn) {
println!("[PbftHandler::inject_event] handler_in: {:?}", handler_in);
match handler_in {
PbftHandlerIn::PrePrepareRequest(request) => {
self.substreams.push_back(
SubstreamState::OutPendingOpen(Message::PrePrepare(request))
);
}
PbftHandlerIn::PrePrepareResponse(response, connection_id) => {
println!("[PbftHandler::inject_event] [PbftHandlerIn::PrePrepareResponse] response: {:?}, connection_id: {:?}", response, connection_id);
let pos = self.substreams.iter().position(|state| {
match state {
SubstreamState::InWaitingToProcessMessage(substream_connection_id, _) => substream_connection_id.clone() == connection_id,
_ => false,
}
});
if let Some(pos) = pos {
let (_connection_id, substream) = match self.substreams.remove(pos) {
Some(SubstreamState::InWaitingToProcessMessage(connection_id, substream)) => (connection_id, substream),
_ => unreachable!(),
};
self.substreams.push_back(SubstreamState::InPendingSend(substream, response));
} else {
panic!("[PbftHandler::inject_event] [PbftHandlerIn::PrePrepareResponse] substream state is not found, pos: {:?}, connection_id: {:?}", pos, connection_id);
}
}
PbftHandlerIn::PrepareRequest(request) => {
println!("[PbftHandler::inject_event] [PbftHandlerIn::PrepareRequest] request: {:?}", request);
self.substreams.push_back(
SubstreamState::OutPendingOpen(Message::Prepare(request))
);
}
PbftHandlerIn::PrepareResponse(response, connection_id) => {
println!("[PbftHandler::inject_event] [PbftHandlerIn::PrepareResponse] response: {:?}, connection_id: {:?}", response, connection_id);
if let Some(pos) = self.find_waiting_substream_state_pos(&connection_id) {
let (_connection_id, substream) = match self.substreams.remove(pos) {
Some(SubstreamState::InWaitingToProcessMessage(connection_id, substream)) => (connection_id, substream),
_ => unreachable!(),
};
self.substreams.push_back(SubstreamState::InPendingSend(substream, response));
} else {
panic!("[PbftHandler::inject_event] [PbftHandlerIn::PrepareResponse] substream state is not found, connection_id: {:?}", connection_id);
}
}
PbftHandlerIn::CommitRequest(request) => {
println!("[PbftHandler::inject_event] [PbftHandlerIn::CommitRequest] request: {:?}", request);
self.substreams.push_back(
SubstreamState::OutPendingOpen(Message::Commit(request))
)
}
PbftHandlerIn::CommitResponse(response, connection_id) => {
println!("[PbftHandler::inject_event] [PbftHandlerIn::CommitResponse] response: {:?}, connection_id: {:?}", response, connection_id);
if let Some(pos) = self.find_waiting_substream_state_pos(&connection_id) {
let (_connection_id, substream) = match self.substreams.remove(pos) {
Some(SubstreamState::InWaitingToProcessMessage(connection_id, substream)) => (connection_id, substream),
_ => unreachable!(),
};
self.substreams.push_back(SubstreamState::InPendingSend(substream, response));
} else {
panic!("[PbftHandler::inject_event] [PbftHandlerIn::CommitResponse] substream state is not found, connection_id: {:?}", connection_id);
}
}
}
}
fn inject_dial_upgrade_error(&mut self, info: Message, error: ProtocolsHandlerUpgrErr<std::io::Error>) {
println!("PbftHandler::inject_dial_upgrade_error(), info: {:?}, error: {:?}", info, error);
}
fn connection_keep_alive(&self) -> KeepAlive {
// println!("PbftHandler::connection_keep_alive()");
KeepAlive::Yes
}
fn poll(&mut self) -> Poll<ProtocolsHandlerEvent<PbftProtocolConfig, Message, Self::OutEvent>, Self::Error> {
println!("[PbftHandler::poll]");
for _ in 0..self.substreams.len() {
if let Some(mut substream_state) = self.substreams.pop_front() {
println!("[PbftHandler::poll] [substream_state]");
loop {
match handle_substream(substream_state, self.config.clone()) {
(Some(new_substream_state), None, true) => {
println!("[PbftHandler::poll] (Some, None false)");
substream_state = new_substream_state;
continue;
},
(Some(new_substream_state), None, false) => {
println!("[PbftHandler::poll] (Some, None, false)");
self.substreams.push_back(new_substream_state);
break;
},
(None, Some(protocol_handler_event), _) => {
println!("[PbftHandler::poll] (None, Some, _) protocol_handler_event : {:?}", protocol_handler_event);
return Ok(Async::Ready(protocol_handler_event));
},
(Some(new_substream_state), Some(protocol_handler_event), _) => {
println!("[PbftHandler::poll] (Some, Some, _)");
self.substreams.push_back(new_substream_state);
return Ok(Async::Ready(protocol_handler_event));
}
(None, None, _) => {
// TODO
println!("[PbftHandler::poll] (None, None, _)");
break;
}
}
}
}
}
Ok(Async::NotReady)
}
}
fn handle_substream<TSubstream>(
substream_state: SubstreamState<TSubstream>,
config: PbftProtocolConfig,
) -> (
Option<SubstreamState<TSubstream>>,
Option<
ProtocolsHandlerEvent<
PbftProtocolConfig,
Message,
PbftHandlerEvent,
>,
>,
bool, // whether the substream should be polled again
)
where
TSubstream: AsyncRead + AsyncWrite
{
match substream_state {
SubstreamState::OutPendingOpen(message) => {
println!("[PbftHandler::handle_substream()] [SubstreamState::OutPendingOpen] message: {:?}", message);
let event = ProtocolsHandlerEvent::OutboundSubstreamRequest {
protocol: SubstreamProtocol::new(config),
info: message,
};
return (None, Some(event), false);
}
SubstreamState::OutPendingSend(mut substream, message) => {
println!("[PbftHandler::handle_substream()] [SubstreamState::OutPendingSend] message: {:?}", message);
match substream.start_send(message) {
Ok(AsyncSink::Ready) => {
println!("[PbftHandler::handle_substream()] [SubstreamState::OutPendingSend] [start_send::Ready]");
(
Some(SubstreamState::OutPendingFlush(substream)),
None,
true,
)
},
Ok(AsyncSink::NotReady(msg)) => {
println!("[PbftHandler::handle_substream()] [SubstreamState::OutPendingSend] [start_send::NotReady] msg: {:?}", msg);
(
Some(SubstreamState::OutPendingSend(substream, msg)),
None,
false,
)
},
Err(e) => {
println!("[PbftHandler::handle_substream()] [SubstreamState::OutPendingSend] [start_send::Err] Err: {:?}", e);
(None, None, false) // TODO
}
}
}
SubstreamState::OutPendingFlush(mut substream) => {
match substream.poll_complete() {
Ok(Async::Ready(())) => {
println!("[PbftHandler::handle_substream()] [SubstreamState::OutPendingFlush] [Ready]");
(
Some(SubstreamState::OutWaitingAnswer(substream)),
None,
true,
)
}
Ok(Async::NotReady) => {
println!("[PbftHandler::handle_substream()] [SubstreamState::OutPendingFlush] [NotReady]");
(
Some(SubstreamState::OutPendingFlush(substream)),
None,
false,
)
}
Err(e) => {
println!("[PbftHandler::handle_substream()] [SubstreamState::OutPendingFlush] [Err] Err: {:?}", e);
(None, None, false) // TODO
}
}
}
SubstreamState::OutWaitingAnswer(mut substream) => {
println!("[PbftHandler::handle_substream()] [SubstreamState::OutWaitingAnswer]");
match substream.poll() {
Ok(Async::Ready(Some(response))) => {
println!("[PbftHandler::handle_substream()] [SubstreamState::OutWaitingAnswer] [Ready::Some] response: {:?}", response);
(
Some(SubstreamState::OutClosing(substream)),
Some(ProtocolsHandlerEvent::Custom(PbftHandlerEvent::Response { response })),
true,
)
}
Ok(Async::NotReady) => {
println!("[PbftHandler::handle_substream()] [SubstreamState::OutWaitingAnswer] [NotReady]");
(
Some(SubstreamState::OutWaitingAnswer(substream)),
None,
false,
)
}
Err(e) => {
println!("[PbftHandler::handle_substream()] [SubstreamState::OutWaitingAnswer] [Err] Err: {:?}", e);
(None, None, false) // TODO
}
Ok(Async::Ready(None)) => {
println!("[PbftHandler::handle_substream()] [SubstreamState::OutWaitingAnswer] [Ready::None]");
(None, None, false) // TODO
}
}
}
SubstreamState::OutClosing(mut substream) => {
match substream.close() {
Ok(Async::Ready(())) => {
println!("[PbftHandler::handle_substream()] [SubstreamState::OutClosing] [Ready]");
(None, None, false)
}
Ok(Async::NotReady) => {
println!("[PbftHandler::handle_substream()] [SubstreamState::OutClosing] [NotReady]");
(
Some(SubstreamState::OutClosing(substream)),
None,
false,
)
}
Err(e) => {
println!("[PbftHandler::handle_substream()] [SubstreamState::OutClosing] [Err] Err: {:?}", e);
(None, None, false) // TODO
}
}
}
SubstreamState::InWaitingMessage(connection_id, mut substream) => {
match substream.poll() {
Ok(Async::Ready(Some(msg))) => {
println!("[PbftHandler::handle_substream()] [SubstreamState::InWaitingMessage] [Ready(Some)] msg: {:?}", msg);
(
Some(SubstreamState::InWaitingToProcessMessage(connection_id.clone(), substream)),
Some(ProtocolsHandlerEvent::Custom(message_to_handler_event(msg, connection_id))),
false,
)
},
Ok(Async::NotReady) => {
println!("[PbftHandler::handle_substream()] [SubstreamState::InWaitingMessage] [NotReady]");
(
Some(SubstreamState::InWaitingMessage(connection_id, substream)),
None,
false,
)
},
Ok(Async::Ready(None)) => {
println!("[PbftHandler::handle_substream()] [SubstreamState::InWaitingMessage] [Ready(None)] Inbound substream EOF");
(None, None, false)
},
Err(e) => {
println!("[PbftHandler::handle_substream()] [SubstreamState::InWaitingMessage] [Err] Err: {:?}", e);
(None, None, false) // TODO
}
}
}
SubstreamState::InWaitingToProcessMessage(connection_id, substream) => {
println!("[PbftHandler::handle_substream()] [SubstreamState::InWaitingUser]");
(
Some(SubstreamState::InWaitingToProcessMessage(connection_id, substream)),
None,
false,
)
}
SubstreamState::InPendingSend(mut substream, response) => {
match substream.start_send(response) {
Ok(AsyncSink::Ready) => {
println!("[PbftHandler::handle_substream()] [SubstreamState::InPendingSend] [AsyncSink::Ready]");
(
Some(SubstreamState::InPendingFlush(substream)),
None,
true,
)
},
Ok(AsyncSink::NotReady(response)) => {
println!("[PbftHandler::handle_substream()] [SubstreamState::InPendingSend] [AsyncSink::NotReady]");
(
Some(SubstreamState::InPendingSend(substream, response)),
None,
false,
)
},
Err(e) => {
println!("[PbftHandler::handle_substream()] [SubstreamState::InPendingSend] [Err]: {:?}", e);
(None, None, false) // TODO
}
}
}
SubstreamState::InPendingFlush(mut substream) => {
match substream.poll_complete() {
Ok(Async::Ready(())) => {
println!("[PbftHandler::handle_substream()] [SubstreamState::InPendingFlush] [Async::Ready]");
(
Some(SubstreamState::InClosing(substream)),
None,
true,
)
},
Ok(Async::NotReady) => {
println!("[PbftHandler::handle_substream()] [SubstreamState::InPendingFlush] [Async::NotReady]");
(
Some(SubstreamState::InPendingFlush(substream)),
None,
false,
)
},
Err(e) => {
println!("[PbftHandler::handle_substream()] [SubstreamState::InPendingFlush] [Err]: {:?}", e);
(None, None, false)
}
}
}
SubstreamState::InClosing(mut substream) => {
match substream.close() {
Ok(Async::Ready(())) => {
println!("[PbftHandler::handle_substream()] [SubstreamState::InClosing] [Async::Ready]");
(None, None, false)
},
Ok(Async::NotReady) => {
println!("[PbftHandler::handle_substream()] [SubstreamState::InClosing] [Async::NotReady]");
(Some(SubstreamState::InClosing(substream)), None, false)
},
Err(e) => {
println!("[PbftHandler::handle_substream()] [SubstreamState::InClosing] [Err]: {:?}", e);
(None, None, false) // TODO
}
}
}
}
}
fn message_to_handler_event(
message: Message,
connection_id: ConnectionId,
) -> PbftHandlerEvent {
match message {
Message::PrePrepare(pre_prepare) => {
PbftHandlerEvent::ProcessPrePrepareRequest { request: pre_prepare, connection_id }
}
Message::Prepare(prepare) => {
PbftHandlerEvent::ProcessPrepareRequest { request: prepare, connection_id }
}
Message::Commit(commit) => {
PbftHandlerEvent::ProcessCommitRequest { request: commit, connection_id }
}
Message::ClientRequest(_) => unreachable!()
}
} |
use std::collections::HashMap;
use std::time::Duration;
use serde::{Deserialize, Serialize};
// See line 38+120
//use isolanguage_1::LanguageCode;
use chrono::{DateTime, NaiveDate, Utc};
use crate::model::{Copyright, DatePrecision, Image, Page, TypeEpisode, TypeShow};
use crate::util;
macro_rules! inherit_show_simplified {
($(#[$attr:meta])* $name:ident { $($(#[$f_attr:meta])* $f_name:ident : $f_ty:ty,)* }) => {
to_struct!($(#[$attr])* $name {
$(
$(#[$f_attr])*
$f_name: $f_ty,
)*
/// A list of countries in which the show can be played. These are ISO 3166 2-letter
/// country codes.
available_markets: Vec<String>,
/// The copyright statements of the show.
copyrights: Vec<Copyright>,
/// A description of the show.
description: String,
/// Whether the show is explicit.
explicit: bool,
/// Known externals URLs for this show.
external_urls: HashMap<String, String>,
/// The [Spotify ID](https://developer.spotify.com/documentation/web-api/#spotify-uris-and-ids)
/// for this show.
id: String,
/// The cover art for the show in various sizes, widest first.
images: Vec<Image>,
/// Whether the episode is hosted outside of Spotify's CDN. Can be [`None`].
is_externally_hosted: Option<bool>,
/// The list of languages used in the show. These are ISO 639 codes.
// TODO: it can be en-US/en-GB
languages: Vec<String>,
/// The media type of the show.
media_type: String,
/// The name of the show.
name: String,
/// The publisher of the show.
publisher: String,
/// The item type; `show`.
#[serde(rename = "type")]
item_type: TypeShow,
});
}
}
inherit_show_simplified!(
/// A simplified show object.
ShowSimplified {}
);
inherit_show_simplified!(
/// A show object.
Show {
/// A list of the show's episodes.
episodes: Page<EpisodeSimplified>,
}
);
impl Show {
/// Convert to a `ShowSimplified`.
#[must_use]
pub fn simplify(self) -> ShowSimplified {
ShowSimplified {
available_markets: self.available_markets,
copyrights: self.copyrights,
description: self.description,
explicit: self.explicit,
external_urls: self.external_urls,
id: self.id,
images: self.images,
is_externally_hosted: self.is_externally_hosted,
languages: self.languages,
media_type: self.media_type,
name: self.name,
publisher: self.publisher,
item_type: TypeShow,
}
}
}
impl From<Show> for ShowSimplified {
fn from(show: Show) -> Self {
show.simplify()
}
}
/// Information about a show that has been saved.
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct SavedShow {
/// When the show was saved.
pub added_at: DateTime<Utc>,
/// Information about the show.
pub show: ShowSimplified,
}
macro_rules! inherit_episode_simplified {
($(#[$attr:meta])* $name:ident { $($(#[$f_attr:meta])* $f_name:ident : $f_ty:ty,)* }) => {
to_struct!($(#[$attr])* $name {
$(
$(#[$f_attr])*
$f_name: $f_ty,
)*
/// The URL of a 30 second MP3 preview of the episode, or None.
audio_preview_url: Option<String>,
/// A description of the episode.
description: String,
/// The length of the episode.
#[serde(rename = "duration_ms", with = "serde_millis")]
duration: Duration,
/// Whether the episode is explicit.
explicit: bool,
/// Externals URLs for this episode.
external_urls: HashMap<String, String>,
/// The [Spotify ID](https://developer.spotify.com/documentation/web-api/#spotify-uris-and-ids)
/// for this episode.
id: String,
/// The cover art for this episode in sizes, widest first.
images: Vec<Image>,
/// Whether the episode is hosted outside of Spotify's CDN.
is_externally_hosted: bool,
/// Whether the episode is playable in the given market.
is_playable: bool,
/// The list of languages used in this episode.
// TODO: it can be en-US/en-GB
languages: Vec<String>,
/// The name of the episode.
name: String,
/// When the episode was released.
#[serde(deserialize_with = "util::de_date_any_precision")]
release_date: NaiveDate,
/// How precise the release date is: precise to the year, month or day.
release_date_precision: DatePrecision,
/// The user's most recent position in the episode. [`None`] if there is no user.
resume_point: Option<ResumePoint>,
/// The item type; `episode`.
#[serde(rename = "type")]
item_type: TypeEpisode,
});
}
}
inherit_episode_simplified!(
/// A simplified episode object.
EpisodeSimplified {}
);
inherit_episode_simplified!(
/// An episode object.
Episode {
/// The show on which the episode belongs.
show: ShowSimplified,
}
);
impl Episode {
/// Convert to an [`EpisodeSimplified`].
#[must_use]
pub fn simplify(self) -> EpisodeSimplified {
EpisodeSimplified {
audio_preview_url: self.audio_preview_url,
description: self.description,
duration: self.duration,
explicit: self.explicit,
external_urls: self.external_urls,
id: self.id,
images: self.images,
is_externally_hosted: self.is_externally_hosted,
is_playable: self.is_playable,
languages: self.languages,
name: self.name,
release_date: self.release_date,
release_date_precision: self.release_date_precision,
resume_point: self.resume_point,
item_type: TypeEpisode,
}
}
}
impl From<Episode> for EpisodeSimplified {
fn from(episode: Episode) -> Self {
episode.simplify()
}
}
/// A position to resume from in an object.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize)]
pub struct ResumePoint {
/// Whether the user has fully played the object.
pub fully_played: bool,
/// The user's most recent position in the object.
#[serde(rename = "resume_position_ms", with = "serde_millis")]
pub resume_position: Duration,
}
|
extern crate hyper;
extern crate futures;
extern crate tokio_proto;
#[macro_use]
extern crate serde_json;
extern crate wordcut_engine;
extern crate config;
use futures::future;
use futures::Stream;
use futures::Future;
use hyper::header::{ContentLength, ContentType};
use hyper::server::{Http, Request, Response, Service};
use hyper::{StatusCode, Post};
use tokio_proto::TcpServer;
use serde_json::Value;
use wordcut_engine::{load_dict, Wordcut};
#[macro_use]
extern crate lazy_static;
use std::error;
use std::fmt;
use std::result;
use std::path::Path;
use std::collections::HashMap;
#[derive(Debug)]
pub enum ServerError {
CannotReadBody,
CannotParseJsonRequest,
CannotGetJsonObject,
CannotGetTextAttr,
TextAttrIsNotString
}
lazy_static! {
static ref CONFIG: HashMap<String, String> = {
let mut settings = config::Config::default();
settings.merge(config::File::with_name("config"))
.expect("Can't get config file");;
settings.try_into().expect("Can't turn settings to map")
};
static ref WORDCUT: Wordcut = {
let path_str = CONFIG.get("dict_path")
.expect("Can't get dict_path");
let path = Path::new(path_str);
let dict = load_dict(path)
.expect("Cannot load dict");
Wordcut::new(dict)
};
}
impl fmt::Display for ServerError {
fn fmt(&self, f: &mut fmt::Formatter) -> result::Result<(), fmt::Error> {
write!(f, "Error: {:?}", self)
}
}
impl error::Error for ServerError {
fn description(&self) -> &str {
"Server error"
}
fn cause(&self) -> Option<&error::Error> {
None
}
}
struct WordcutServer;
const NOT_FOUND_MSG: &'static str = "Not found";
type WebFuture = Box<future::Future<Item=Response, Error=hyper::Error>>;
type BodyFuture = Box<future::Future<Item=Vec<u8>, Error=Box<ServerError>>>;
fn resp_with_msg(msg: &str, status: StatusCode) -> Response {
Response::new()
.with_status(status)
.with_header(ContentType::json())
.with_header(ContentLength(msg.len() as u64))
.with_body(String::from(msg))
}
fn read_val(body: Vec<u8>) -> Result<Value, Box<ServerError>> {
match serde_json::from_slice::<Value>(&body) {
Ok(val) => Ok(val),
Err(_) => Err(Box::new(ServerError::CannotParseJsonRequest))
}
}
fn get_text(val: Value) -> Result<String, Box<ServerError>> {
val.as_object().ok_or(Box::new(ServerError::CannotGetJsonObject))
.and_then(|obj| obj.get("text").ok_or(Box::new(ServerError::CannotGetTextAttr)))
.and_then(|text| text.as_str().ok_or(Box::new(ServerError::CannotGetTextAttr)))
.map(|text| String::from(text))
}
fn wordseg(text: String) -> Result<Value, Box<ServerError>> {
let toks = WORDCUT.segment_into_strings(&text);
Ok(json!({"words": toks}))
}
fn read_body(req: Request) -> BodyFuture {
let fut = req.body()
.map_err(|_| Box::new(ServerError::CannotReadBody))
.fold(vec![], |mut body, chunk| {
body.extend_from_slice(&chunk);
Ok::<_, Box<ServerError>>(body)
});
Box::new(fut)
}
fn make_resp(val: Result<Value, Box<ServerError>>) -> Result<Response, hyper::Error> {
match val {
Ok(val) => {
let s = serde_json::to_string(&val);
match s {
Ok(s) => Ok::<_,hyper::Error>(resp_with_msg(&s, StatusCode::Ok)),
Err(e) => Ok::<_,hyper::Error>(
resp_with_msg(&format!("Err {} cannot convert output value to string", e),
StatusCode::InternalServerError))
}
},
Err(e) => Ok::<_,hyper::Error>(
resp_with_msg(&format!("Err {}", e),
StatusCode::InternalServerError))
}
}
fn wordseg_handler(req: Request) -> WebFuture {
let fut = read_body(req)
.and_then(read_val)
.and_then(get_text)
.and_then(wordseg)
.then(make_resp);
return Box::new(fut)
}
fn build_dag(text: String) -> Result<Value, Box<ServerError>> {
let dag = WORDCUT.build_dag(&text);
Ok(json!({"dag": dag}))
}
fn dag_handler(req: Request) -> WebFuture {
let fut = read_body(req)
.and_then(read_val)
.and_then(get_text)
.and_then(build_dag)
.then(make_resp);
return Box::new(fut)
}
fn not_found(_req: Request) -> WebFuture {
let resp = Response::new()
.with_header(ContentLength(NOT_FOUND_MSG.len() as u64))
.with_status(hyper::StatusCode::NotFound)
.with_body(NOT_FOUND_MSG);
let fut = futures::future::ok(resp);
Box::new(fut)
}
impl Service for WordcutServer {
type Request = Request;
type Response = Response;
type Error = hyper::Error;
type Future = WebFuture;
fn call(&self, req: Request) -> Self::Future {
match (req.method(), req.path()) {
(&Post, "/wordseg") => wordseg_handler(req),
(&Post, "/dag") => dag_handler(req),
_ => not_found(req)
}
}
}
fn main() {
let num_threads = CONFIG.get("num_threads")
.expect("Can't get num_threads")
.parse().expect("Can't parse num_threads");
let addr = CONFIG.get("bind_addr")
.expect("Can't get bind_addr")
.parse()
.expect("Can't parse URL");
let http_server = Http::new();
let mut tcp_server = TcpServer::new(http_server, addr);
tcp_server.threads(num_threads);
println!("Listening {:?} ...", addr);
tcp_server.serve(||Ok(WordcutServer));
}
|
use lazy_static::lazy_static;
use std::time;
use humantime;
use ansi_term::Color;
use std::env;
// TODO: Maybe we want to have some global static singleton that does
// different things for these depending on some settings?
//
lazy_static! {
pub static ref LEVEL: u32 = match env::var("FRIDAY_LOG_LEVEL") {
Err(_) => 5,
Ok(s) => match s.as_str() {
"DEBUG" => 5,
"INFO" => 4,
"WARNING" => 3,
"ERROR" => 2,
"FATAL" => 1,
_ => 5
}
};
}
pub fn get_timestamp() -> String {
return humantime::format_rfc3339_seconds(time::SystemTime::now()).to_string();
}
pub fn extract_filename(file_path: &str) -> String {
match file_path.split("/").last() {
Some(f) => f.to_owned(),
None => "unknown".to_owned()
}
}
pub fn red<S: AsRef<str>>(s: S) -> String {
Color::Red.paint(s.as_ref()).to_string()
}
pub fn green<S: AsRef<str>>(s: S) -> String {
Color::Green.paint(s.as_ref()).to_string()
}
pub fn yellow<S: AsRef<str>>(s: S) -> String {
Color::Yellow.paint(s.as_ref()).to_string()
}
pub fn cyan<S: AsRef<str>>(s: S) -> String {
Color::Cyan.paint(s.as_ref()).to_string()
}
pub fn purple<S: AsRef<str>>(s: S) -> String {
Color::Purple.paint(s.as_ref()).to_string()
}
pub fn blue<S: AsRef<str>>(s: S) -> String {
Color::Blue.paint(s.as_ref()).to_string()
}
pub fn white<S: AsRef<str>>(s: S) -> String {
Color::Blue.paint(s.as_ref()).to_string()
}
#[macro_export]
macro_rules! debug {
($str:expr $(,$arg: expr)*) => {
if *$crate::LEVEL > 4 {
println!("{}",
$crate::yellow(
format!(
"{} {}:{} - {}",
$crate::get_timestamp(),
$crate::extract_filename(std::file!()),
std::line!(),
format!($str $(,$arg)*)
)
)
);
}
}
}
#[macro_export]
macro_rules! info {
($str:expr $(,$arg: expr)*) => {
if *$crate::LEVEL > 3 {
println!("{}",
$crate::blue(
format!(
"{} {}:{} - {}",
$crate::get_timestamp(),
$crate::extract_filename(std::file!()),
std::line!(),
format!($str $(,$arg)*)
)
)
);
}
}
}
#[macro_export]
macro_rules! warning {
($str:expr $(,$arg: expr)*) => {
if *$crate::LEVEL > 2 {
println!("{}",
$crate::purple(
format!(
"{} {}:{} - {}",
$crate::get_timestamp(),
$crate::extract_filename(std::file!()),
std::line!(),
format!($str $(,$arg)*)
)
)
);
}
}
}
#[macro_export]
macro_rules! error {
($str:expr $(,$arg: expr)*) => {
if *$crate::LEVEL > 1 {
println!("{}",
$crate::red(
format!(
"{} {}:{} - {}",
$crate::get_timestamp(),
$crate::extract_filename(std::file!()),
std::line!(),
format!($str $(,$arg)*)
)
)
);
}
}
}
#[macro_export]
macro_rules! fatal {
($str:expr $(,$arg: expr)*) => {
if *$crate::LEVEL > 0 {
println!("{}",
$crate::red(
format!(
"{} {}:{} - {}",
$crate::get_timestamp(),
$crate::extract_filename(std::file!()),
std::line!(),
format!($str $(,$arg)*)
)
)
);
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn log_debug() {
debug!("Hello {}", "debug")
}
#[test]
fn log_info() {
info!("Hello {}", "info")
}
#[test]
fn log_warning() {
warning!("Hello {}", "warning")
}
#[test]
fn log_error() {
error!("Hello {}", "error")
}
#[test]
fn log_fatal() {
fatal!("Hello {}", "fatal")
}
}
|
fn main () {
/*
let v = vec! [1,2,3];
v.push(4);
//Error: Cannot borrow immutable local variable `v` as mutable.
*/
let mut v = vec! [1,2,3];
let v1 = &mut v;
}
|
use crate::error::Error;
use crate::error::Result;
use crate::service::CONTEXT;
use chrono::NaiveDateTime;
use rbatis::core::value::DateTimeNow;
use rbatis::crud::CRUD;
use rbatis::plugin::page::{Page, PageRequest};
use crate::domain::domain::{LoginCheck, SysRes, SysUser};
use crate::domain::dto::{IdDTO, SignInDTO, UserAddDTO, UserEditDTO, UserPageDTO, UserRoleAddDTO};
use crate::domain::vo::user::SysUserVO;
use crate::domain::vo::{JWTToken, SignInVO};
use crate::util::password_encoder::PasswordEncoder;
use rbatis::plugin::snowflake::new_snowflake_id;
use std::collections::BTreeMap;
use std::time::Duration;
use crate::service::cache_service::ICacheService;
const REDIS_KEY_RETRY: &'static str = "login:login_retry";
///后台用户服务
pub struct SysUserService {}
impl SysUserService {
/// 后台用户分页
pub async fn page(&self, arg: &UserPageDTO) -> Result<Page<SysUserVO>> {
let wrapper = CONTEXT
.rbatis
.new_wrapper()
.eq("del", 0)
.do_if(arg.name.is_some(), |w| w.like("name", &arg.name))
.do_if(arg.account.is_some(), |w| w.like("account", &arg.account));
let sys_user_page: Page<SysUser> = CONTEXT
.rbatis
.fetch_page_by_wrapper(
&wrapper,
&PageRequest::new(arg.page_no.unwrap_or(1), arg.page_size.unwrap_or(10)),
)
.await?;
let mut vos = vec![];
for x in sys_user_page.records {
vos.push(SysUserVO::from(x));
}
return Ok(Page::<SysUserVO> {
records: vos,
total: sys_user_page.total,
pages: sys_user_page.pages,
page_no: sys_user_page.page_no,
page_size: sys_user_page.page_size,
search_count: sys_user_page.search_count,
});
}
///用户详情
pub async fn detail(&self, arg: &IdDTO) -> Result<SysUserVO> {
let user_id = arg.id.clone().unwrap_or_default();
let user = self
.find(&user_id)
.await?
.ok_or_else(|| Error::from(format!("用户:{:?} 不存在!", user_id)))?;
let mut user_vo = SysUserVO::from(user);
let all_res = CONTEXT.sys_res_service.finds_all_map().await?;
let role = CONTEXT
.sys_user_role_service
.find_user_role(&user_id, &all_res)
.await?;
user_vo.role = role;
return Ok(user_vo);
}
///后台用户根据id查找
pub async fn find(&self, id: &str) -> Result<Option<SysUser>> {
let wrapper = CONTEXT.rbatis.new_wrapper().eq("id", id);
return Ok(CONTEXT.rbatis.fetch_by_wrapper( &wrapper).await?);
}
///根据账户名查找
pub async fn find_by_account(&self, account: &str) -> Result<Option<SysUser>> {
let wrapper = CONTEXT.rbatis.new_wrapper().eq("account", account);
return Ok(CONTEXT.rbatis.fetch_by_wrapper( &wrapper).await?);
}
///添加后台账号
pub async fn add(&self, arg: &UserAddDTO) -> Result<u64> {
if arg.account.is_none()
|| arg.account.as_ref().unwrap().is_empty()
|| arg.name.is_none()
|| arg.name.as_ref().unwrap().is_empty()
{
return Err(Error::from("用户名和姓名不能为空!"));
}
let old_user = self
.find_by_account(arg.account.as_ref().unwrap_or(&"".to_string()))
.await?;
if old_user.is_some() {
return Err(Error::from(format!(
"用户账户:{}已存在!",
arg.account.as_ref().unwrap()
)));
}
let mut password = arg.password.clone().unwrap_or_default();
if password.is_empty() {
//默认密码
password = "123456".to_string();
}
let id = new_snowflake_id().to_string();
let user = SysUser {
id: id.to_string().into(),
account: arg.account.clone(),
password: PasswordEncoder::encode(&password).into(),
name: arg.name.clone(),
login_check: arg.login_check.clone(),
state: 0.into(),
del: 0.into(),
create_date: NaiveDateTime::now().into(),
};
match &arg.role_id {
Some(role_id) => {
CONTEXT
.sys_user_role_service
.add(&UserRoleAddDTO {
id: None,
user_id: user.id.clone(),
role_id: arg.role_id.clone(),
})
.await?;
}
_ => {}
}
return Ok(CONTEXT.rbatis.save( &user,&[]).await?.rows_affected);
}
///登陆后台
pub async fn sign_in(&self, arg: &SignInDTO) -> Result<SignInVO> {
self.is_need_wait_login_ex().await?;
let user: Option<SysUser> = CONTEXT
.rbatis
.fetch_by_wrapper(
&CONTEXT.rbatis.new_wrapper().eq("account", &arg.account),
)
.await?;
let user = user.ok_or_else(|| Error::from(format!("账号:{} 不存在!", arg.account)))?;
if user.state.eq(&Some(0)) {
return Err(Error::from("账户被禁用!"));
}
let mut error = None;
match user
.login_check
.as_ref()
.unwrap_or(&LoginCheck::PasswordCheck)
{
LoginCheck::NoCheck => {
//无校验登录,适合Debug用
}
LoginCheck::PasswordCheck => {
// check pwd
if !PasswordEncoder::verify(
user.password
.as_ref()
.ok_or_else(|| Error::from("错误的用户数据,密码为空!"))?,
&arg.password,
) {
error = Some(Error::from("密码不正确!"));
}
}
LoginCheck::PasswordImgCodeCheck => {
//check img code
let cache_code = CONTEXT
.cache_service
.get_string(&format!("captch:account_{}", &arg.account))
.await?;
if cache_code.eq(&arg.vcode) {
error = Some(Error::from("验证码不正确!"))
}
// check pwd
if !PasswordEncoder::verify(
user.password
.as_ref()
.ok_or_else(|| Error::from("错误的用户数据,密码为空!"))?,
&arg.password,
) {
error = Some(Error::from("密码不正确!"));
}
}
LoginCheck::PhoneCodeCheck => {
//短信验证码登录
let sms_code = CONTEXT
.cache_service
.get_string(&format!(
"{}{}",
CONTEXT.config.sms_redis_send_key_prefix, &arg.account
))
.await?;
if sms_code.eq(&arg.vcode) {
error = Some(Error::from("验证码不正确!"));
}
}
}
if error.is_some() {
self.add_retry_login_limit_num().await?;
return Err(error.unwrap());
}
let sign_in_vo = self.get_user_info(&user).await?;
return Ok(sign_in_vo);
}
///是否需要等待
pub async fn is_need_wait_login_ex(&self) -> Result<()> {
if CONTEXT.config.login_fail_retry > 0 {
let num: Option<i64> = CONTEXT.cache_service.get_json(REDIS_KEY_RETRY).await?;
if num.unwrap_or(0) >= CONTEXT.config.login_fail_retry {
let wait_sec: i64 = CONTEXT.cache_service.ttl(REDIS_KEY_RETRY).await?;
if wait_sec > 0 {
return Err(Error::from(format!("操作过于频繁,请等待{}秒后重试!", wait_sec)));
}
}
}
return Ok(());
}
///增加redis重试记录
pub async fn add_retry_login_limit_num(&self) -> Result<()> {
if CONTEXT.config.login_fail_retry > 0 {
let num: Option<i64> = CONTEXT.cache_service.get_json(REDIS_KEY_RETRY).await?;
let mut num = num.unwrap_or(0);
if num > CONTEXT.config.login_fail_retry {
num = CONTEXT.config.login_fail_retry;
}
num+=1;
CONTEXT
.cache_service
.set_string_ex(
REDIS_KEY_RETRY,
&num.to_string(),
Some(Duration::from_secs(
CONTEXT.config.login_fail_retry_wait_sec as u64,
)),
)
.await?;
}
return Ok(());
}
pub async fn get_user_info_by_token(&self, token: &JWTToken) -> Result<SignInVO> {
let user: Option<SysUser> = CONTEXT
.rbatis
.fetch_by_wrapper( &CONTEXT.rbatis.new_wrapper().eq("id", &token.id))
.await?;
let user = user.ok_or_else(|| Error::from(format!("账号:{} 不存在!", token.account)))?;
return self.get_user_info(&user).await;
}
pub async fn get_user_info(&self, user: &SysUser) -> Result<SignInVO> {
//去除密码,增加安全性
let mut user = user.clone();
user.password = None;
let user_id = user
.id
.clone()
.ok_or_else(|| Error::from("错误的用户数据,id为空!"))?;
let mut sign_vo = SignInVO {
user: user.clone().into(),
permissions: vec![],
access_token: String::new(),
role: None,
};
//提前查找所有权限,避免在各个函数方法中重复查找
let all_res = CONTEXT.sys_res_service.finds_all_map().await?;
sign_vo.permissions = self.loop_load_level_permission(&user_id, &all_res).await?;
let jwt_token = JWTToken {
id: user.id.clone().unwrap_or(String::new()),
account: user.account.clone().unwrap_or(String::new()),
permissions: sign_vo.permissions.clone(),
role_ids: vec![],
exp: chrono::NaiveDateTime::now().timestamp() as usize,
};
sign_vo.access_token = jwt_token.create_token(&CONTEXT.config.jwt_secret)?;
sign_vo.role = CONTEXT
.sys_user_role_service
.find_user_role(
&user.id.unwrap_or_else(|| {
return String::new();
}),
&all_res,
)
.await?;
return Ok(sign_vo);
}
///登出后台
pub async fn sign_out(&self) {}
pub async fn edit(&self, arg: &UserEditDTO) -> Result<u64> {
let mut pwd = None;
//源密码加密后再存储
if arg.password.is_some() {
pwd = Some(PasswordEncoder::encode(arg.password.as_ref().unwrap()));
}
let mut user = SysUser {
id: arg.id.clone(),
account: None,
password: pwd,
name: arg.name.clone(),
login_check: arg.login_check.clone(),
state: arg.state.clone(),
del: None,
create_date: None,
};
if arg.role_id.is_some() {
CONTEXT
.sys_user_role_service
.add(&UserRoleAddDTO {
id: None,
user_id: user.id.clone(),
role_id: arg.role_id.clone(),
})
.await?;
}
Ok(CONTEXT.rbatis.update_by_column("id", &mut user).await?)
}
pub async fn remove(&self, id: &str) -> Result<u64> {
if id.is_empty() {
return Err(Error::from("id 不能为空!"));
}
let r = CONTEXT
.rbatis
.remove_by_column::<SysUser,_>("id", &id)
.await;
CONTEXT.sys_user_role_service.remove_by_user_id(id).await?;
return Ok(r?);
}
///递归查找层级结构权限
pub async fn loop_load_level_permission(
&self,
user_id: &str,
all_res: &BTreeMap<String, SysRes>,
) -> Result<Vec<String>> {
return CONTEXT
.sys_role_service
.find_user_permission(user_id, all_res)
.await;
}
}
|
pub mod check;
pub mod rt;
pub mod syntax;
mod error;
#[cfg(test)]
mod tests;
#[cfg(test)]
mod ui_tests;
pub use crate::error::Error;
pub(crate) use crate::error::ErrorData;
use std::path::Path;
use crate::check::check;
use crate::syntax::{SourceCacheResult, SourceMap};
pub fn run_input(input: String, opts: rt::Opts) -> Result<rt::Output, Error> {
run(opts, |source| source.parse_input("root", input))
}
pub fn run_file(path: &Path, opts: rt::Opts) -> Result<rt::Output, Error> {
run(opts, |source| source.parse_file(path))
}
fn run<F>(opts: rt::Opts, parse: F) -> Result<rt::Output, Error>
where
F: FnOnce(&mut SourceMap) -> Result<SourceCacheResult, ErrorData>,
{
let mut source = SourceMap::new();
let (file, expr) = match parse(&mut source) {
Ok(result) => result.unwrap_miss(),
Err(err) => return Err(Error::new(source, err)),
};
let func = match check(&mut source, file, &expr) {
Ok(func) => func,
Err(err) => return Err(Error::diagnostics(source, err)),
};
match rt::run(func, opts) {
Ok(result) => Ok(result),
Err(err) => Err(Error::basic(Box::new(err))),
}
}
|
extern crate clap;
extern crate reproto;
#[macro_use]
extern crate log;
use reproto::backend::models as m;
use reproto::backend;
use reproto::commands;
use reproto::errors::*;
use reproto::logger;
use reproto::parser;
const VERSION: &'static str = env!("CARGO_PKG_VERSION");
fn setup_opts<'a, 'b>() -> clap::App<'a, 'b> {
clap::App::new("reproto")
.version(VERSION)
.arg(clap::Arg::with_name("debug").long("debug").short("D").help("Enable debug output"))
}
/// Configure logging
///
/// If debug (--debug) is specified, logging should be configured with LogLevelFilter::Debug.
fn setup_logger(matches: &clap::ArgMatches) -> Result<()> {
let level: log::LogLevelFilter = match matches.is_present("debug") {
true => log::LogLevelFilter::Debug,
false => log::LogLevelFilter::Info,
};
logger::init(level)?;
Ok(())
}
fn print_error(m: &str, p: &m::Pos) -> Result<()> {
let (line, lines, range) = parser::find_line(&p.0, (p.1, p.2))?;
println!("{}:{}:{}-{}:", p.0.display(), lines + 1, range.0, range.1);
let line_no = format!("{:>3}", lines + 1);
let diff = range.1 - range.0;
let diff = if diff < 1 { 1 } else { diff };
let mut line_indicator = String::new();
line_indicator.push_str(&::std::iter::repeat(" ")
.take(line_no.len() + range.0 + 1)
.collect::<String>());
line_indicator.push_str(&::std::iter::repeat("^").take(diff).collect::<String>());
println!("{}: {}", line_no, line);
println!("{} - {}", line_indicator, m);
Ok(())
}
fn handle_backend_error(e: &backend::errors::ErrorKind) -> Result<()> {
match *e {
backend::errors::ErrorKind::Pos(ref m, ref p) => {
print_error(m, p)?;
}
backend::errors::ErrorKind::DeclMerge(ref m, ref source, ref target) => {
print_error(m, source)?;
print_error("previous declaration here", target)?;
}
backend::errors::ErrorKind::FieldConflict(ref name, ref source, ref target) => {
print_error(&format!("conflict in field `{}`", name), source)?;
print_error("previous declaration here", target)?;
}
backend::errors::ErrorKind::ExtendEnum(ref m, ref source, ref enum_target) => {
print_error(m, source)?;
print_error("previous declaration here", enum_target)?;
}
backend::errors::ErrorKind::ReservedField(ref field_pos, ref reserved_pos) => {
print_error("field reserved", field_pos)?;
print_error("field reserved here", reserved_pos)?;
}
backend::errors::ErrorKind::MatchConflict(ref source, ref target) => {
print_error("conflicts with existing clause", source)?;
print_error("existing clause here", target)?;
}
backend::errors::ErrorKind::Parser(ref e) => {
handle_parser_error(e)?;
}
_ => {}
}
Ok(())
}
fn handle_parser_error(e: &parser::errors::ErrorKind) -> Result<()> {
match *e {
parser::errors::ErrorKind::Syntax(ref p, ref expected) => {
print_error("syntax error", p)?;
println!("Expected one of:");
let mut expected_list = Vec::new();
for e in expected {
match *e {
parser::parser::Rule::type_identifier => {
println!(" A type identifier, like: `DateRange`");
}
parser::parser::Rule::string => {
println!(" A string, like: `\"foo bar\"`");
}
parser::parser::Rule::number => {
println!(" A number number, like: `3.14`");
}
parser::parser::Rule::boolean => {
println!(" A boolean: `true` or `false`");
}
token => {
expected_list.push(format!("{:?}", token));
}
}
}
if !expected_list.is_empty() {
println!(" A token: {}", expected_list.join(", "));
}
}
_ => {}
}
Ok(())
}
fn entry() -> Result<()> {
let mut opts = setup_opts();
// setup subcommands
for subcommand in commands::commands() {
opts = opts.subcommand(subcommand);
}
let matches = opts.get_matches();
setup_logger(&matches)?;
let (name, matches) = matches.subcommand();
if let Some(matches) = matches {
match name {
"compile" => commands::compile(matches),
"verify" => commands::verify(matches),
_ => Err(format!("No such command: {}", name).into()),
}
} else {
Err("No matching subcommand".into())
}
}
fn compiler_entry() -> Result<()> {
match entry() {
Err(e) => {
match *e.kind() {
ErrorKind::BackendErrors(ref errors) => {
for e in errors {
handle_backend_error(e)?;
}
}
ErrorKind::BackendError(ref e) => {
handle_backend_error(e)?;
}
ErrorKind::Parser(ref e) => {
handle_parser_error(e)?;
}
_ => {}
}
Err(e)
}
ok => ok,
}
}
fn main() {
match compiler_entry() {
Err(e) => {
error!("{}", e);
for e in e.iter().skip(1) {
error!(" caused by: {}", e);
}
if let Some(backtrace) = e.backtrace() {
error!(" backtrace: {:?}", backtrace);
}
::std::process::exit(1);
}
_ => {}
};
::std::process::exit(0);
}
|
#[macro_use] extern crate lazy_static;
extern crate regex;
use std::fs::File;
use std::io::prelude::*;
use std::collections::{HashMap, HashSet};
use regex::Regex;
lazy_static! {
static ref EXTRACT_BAG: Regex = Regex::new(r"(\d+) (.*) bag").unwrap();
}
fn read_data(filepath: &str) -> std::io::Result<String> {
let mut file = File::open(filepath)?;
let mut contents: String = String::new();
file.read_to_string(&mut contents)?;
Ok(contents.trim().to_string())
}
fn sol1(data: &str) {
// Parse bag lines
// Take every line
let mut possibilities: HashMap<String, HashSet<&str>> = HashMap::new();
for line in data.split('\n') {
let linesplit = line
.split(" bags contain ").collect::<Vec<&str>>();
let container_colour = linesplit[0];
let contained = linesplit[1];
for one_contained in contained.split(", ") {
//println!("[{}] [{:?}]", container, EXTRACT_BAG.captures(&one_contained));
let captures = EXTRACT_BAG.captures(&one_contained);
if captures.is_none() {
continue;
}
let captures = captures.unwrap();
let one_colour_string = captures[2].to_string();
let one_colour = one_colour_string;
let hashset = possibilities.entry(one_colour)
.or_insert_with(HashSet::new);
hashset.insert(container_colour);
/* if possibilities.contains_key(&one_colour) {
let tmphashset = possibilities.get_mut(&one_colour).unwrap();
tmphashset.insert(container_colour);
} else {
let mut newhashset = HashSet::new();
newhashset.insert(container_colour);
possibilities.insert(one_colour, newhashset);
}*/
}
}
// The hash is built
//println!("{:?}", possibilities);
// Possibility stack
let mut poss_stack: Vec<&str> = Vec::new();
poss_stack.push("shiny gold");
let mut countedbags: HashSet<&str> = HashSet::new();
while !poss_stack.is_empty() {
let colour = poss_stack.remove(0);
for bag in possibilities.get(colour).unwrap_or(&HashSet::new()) {
poss_stack.push(bag);
countedbags.insert(&bag);
}
}
println!("{:?}", countedbags.len());
}
fn sol2(data: &str) {
// Parse bag lines
// Take every line
let mut possibilities: HashMap<&str, HashSet<(usize,&str)>> = HashMap::new();
for line in data.split('\n') {
let linesplit = line
.split(" bags contain ").collect::<Vec<&str>>();
let container = linesplit[0];
//let contained = linesplit[1];
possibilities.insert(container,
linesplit[1].split(", ")
.map(|x| EXTRACT_BAG.captures(x))
.filter(|x| x.is_some())
.map(|x| {
let caps = x.unwrap();
let count = (&caps[1]).parse::<usize>().unwrap();
(count, caps.get(2).unwrap().as_str())
})
.collect::<HashSet<(usize,&str)>>());
}
// The hash is built
//println!("{:?}", possibilities);
// Possibility stack
let mut poss_stack: Vec<(usize,&str)> = Vec::new();
poss_stack.push((1,"shiny gold"));
let mut totalbags: usize = 0;
while !poss_stack.is_empty() {
let (count_so_far, colour) = poss_stack.remove(0);
//for (count, bag)
//println!("{} {:?}", count_so_far, colour);
let set = possibilities.get(colour).unwrap();
totalbags += count_so_far;
for (count,bag) in set {
poss_stack.push((count_so_far*count,bag))
}
}
println!("{:?}", totalbags-1); // Don't count shiny gold
}
fn main() {
let tmp = read_data("input");
if tmp.is_err() {
panic!("*sigh*");
}
let data = tmp.unwrap().replace(".", "");
sol1(&data);
sol2(&data);
}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use super::{models, API_VERSION};
#[non_exhaustive]
#[derive(Debug, thiserror :: Error)]
#[allow(non_camel_case_types)]
pub enum Error {
#[error(transparent)]
ConfidentialLedger_GetConstitution(#[from] confidential_ledger::get_constitution::Error),
#[error(transparent)]
ConfidentialLedger_GetConsortiumMembers(#[from] confidential_ledger::get_consortium_members::Error),
#[error(transparent)]
ConfidentialLedger_GetEnclaveQuotes(#[from] confidential_ledger::get_enclave_quotes::Error),
#[error(transparent)]
ConfidentialLedger_GetLedgerEntries(#[from] confidential_ledger::get_ledger_entries::Error),
#[error(transparent)]
ConfidentialLedger_PostLedgerEntry(#[from] confidential_ledger::post_ledger_entry::Error),
#[error(transparent)]
ConfidentialLedger_GetLedgerEntry(#[from] confidential_ledger::get_ledger_entry::Error),
#[error(transparent)]
ConfidentialLedger_GetReceipt(#[from] confidential_ledger::get_receipt::Error),
#[error(transparent)]
ConfidentialLedger_GetTransactionStatus(#[from] confidential_ledger::get_transaction_status::Error),
#[error(transparent)]
ConfidentialLedger_GetCurrentLedgerEntry(#[from] confidential_ledger::get_current_ledger_entry::Error),
#[error(transparent)]
ConfidentialLedger_GetUser(#[from] confidential_ledger::get_user::Error),
#[error(transparent)]
ConfidentialLedger_CreateOrUpdateUser(#[from] confidential_ledger::create_or_update_user::Error),
#[error(transparent)]
ConfidentialLedger_DeleteUser(#[from] confidential_ledger::delete_user::Error),
}
pub mod confidential_ledger {
use super::{models, API_VERSION};
pub async fn get_constitution(
operation_config: &crate::OperationConfig,
) -> std::result::Result<models::Constitution, get_constitution::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/app/governance/constitution", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(get_constitution::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_constitution::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_constitution::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_constitution::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Constitution = serde_json::from_slice(rsp_body)
.map_err(|source| get_constitution::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ConfidentialLedgerError = serde_json::from_slice(rsp_body)
.map_err(|source| get_constitution::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_constitution::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_constitution {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ConfidentialLedgerError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_consortium_members(
operation_config: &crate::OperationConfig,
) -> std::result::Result<models::Consortium, get_consortium_members::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/app/governance/members", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(get_consortium_members::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_consortium_members::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(get_consortium_members::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_consortium_members::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Consortium = serde_json::from_slice(rsp_body)
.map_err(|source| get_consortium_members::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ConfidentialLedgerError = serde_json::from_slice(rsp_body)
.map_err(|source| get_consortium_members::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_consortium_members::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_consortium_members {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ConfidentialLedgerError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_enclave_quotes(
operation_config: &crate::OperationConfig,
) -> std::result::Result<models::ConfidentialLedgerEnclaves, get_enclave_quotes::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/app/enclaveQuotes", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(get_enclave_quotes::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_enclave_quotes::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_enclave_quotes::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_enclave_quotes::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ConfidentialLedgerEnclaves = serde_json::from_slice(rsp_body)
.map_err(|source| get_enclave_quotes::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ConfidentialLedgerError = serde_json::from_slice(rsp_body)
.map_err(|source| get_enclave_quotes::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_enclave_quotes::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_enclave_quotes {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ConfidentialLedgerError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_ledger_entries(
operation_config: &crate::OperationConfig,
sub_ledger_id: Option<&str>,
from_transaction_id: Option<&str>,
to_transaction_id: Option<&str>,
) -> std::result::Result<models::PagedLedgerEntries, get_ledger_entries::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/app/transactions", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(get_ledger_entries::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_ledger_entries::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(sub_ledger_id) = sub_ledger_id {
url.query_pairs_mut().append_pair("subLedgerId", sub_ledger_id);
}
if let Some(from_transaction_id) = from_transaction_id {
url.query_pairs_mut().append_pair("fromTransactionId", from_transaction_id);
}
if let Some(to_transaction_id) = to_transaction_id {
url.query_pairs_mut().append_pair("toTransactionId", to_transaction_id);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_ledger_entries::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_ledger_entries::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PagedLedgerEntries = serde_json::from_slice(rsp_body)
.map_err(|source| get_ledger_entries::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ConfidentialLedgerError = serde_json::from_slice(rsp_body)
.map_err(|source| get_ledger_entries::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_ledger_entries::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_ledger_entries {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ConfidentialLedgerError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn post_ledger_entry(
operation_config: &crate::OperationConfig,
sub_ledger_id: Option<&str>,
entry: Option<&models::LedgerEntry>,
) -> std::result::Result<models::LedgerWriteResult, post_ledger_entry::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/app/transactions", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(post_ledger_entry::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(post_ledger_entry::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(sub_ledger_id) = sub_ledger_id {
url.query_pairs_mut().append_pair("subLedgerId", sub_ledger_id);
}
let req_body = if let Some(entry) = entry {
req_builder = req_builder.header("content-type", "application/json");
azure_core::to_json(entry).map_err(post_ledger_entry::Error::SerializeError)?
} else {
bytes::Bytes::from_static(azure_core::EMPTY_BODY)
};
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(post_ledger_entry::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(post_ledger_entry::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::LedgerWriteResult = serde_json::from_slice(rsp_body)
.map_err(|source| post_ledger_entry::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ConfidentialLedgerError = serde_json::from_slice(rsp_body)
.map_err(|source| post_ledger_entry::Error::DeserializeError(source, rsp_body.clone()))?;
Err(post_ledger_entry::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod post_ledger_entry {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ConfidentialLedgerError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_ledger_entry(
operation_config: &crate::OperationConfig,
sub_ledger_id: Option<&str>,
transaction_id: &str,
) -> std::result::Result<models::LedgerQueryResult, get_ledger_entry::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/app/transactions/{}", operation_config.base_path(), transaction_id);
let mut url = url::Url::parse(url_str).map_err(get_ledger_entry::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_ledger_entry::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(sub_ledger_id) = sub_ledger_id {
url.query_pairs_mut().append_pair("subLedgerId", sub_ledger_id);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_ledger_entry::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_ledger_entry::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::LedgerQueryResult = serde_json::from_slice(rsp_body)
.map_err(|source| get_ledger_entry::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ConfidentialLedgerError = serde_json::from_slice(rsp_body)
.map_err(|source| get_ledger_entry::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_ledger_entry::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_ledger_entry {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ConfidentialLedgerError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_receipt(
operation_config: &crate::OperationConfig,
transaction_id: &str,
) -> std::result::Result<models::TransactionReceipt, get_receipt::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/app/transactions/{}/receipt", operation_config.base_path(), transaction_id);
let mut url = url::Url::parse(url_str).map_err(get_receipt::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_receipt::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_receipt::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_receipt::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::TransactionReceipt =
serde_json::from_slice(rsp_body).map_err(|source| get_receipt::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ConfidentialLedgerError =
serde_json::from_slice(rsp_body).map_err(|source| get_receipt::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_receipt::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_receipt {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ConfidentialLedgerError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_transaction_status(
operation_config: &crate::OperationConfig,
transaction_id: &str,
) -> std::result::Result<models::TransactionStatus, get_transaction_status::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/app/transactions/{}/status", operation_config.base_path(), transaction_id);
let mut url = url::Url::parse(url_str).map_err(get_transaction_status::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_transaction_status::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(get_transaction_status::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_transaction_status::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::TransactionStatus = serde_json::from_slice(rsp_body)
.map_err(|source| get_transaction_status::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ConfidentialLedgerError = serde_json::from_slice(rsp_body)
.map_err(|source| get_transaction_status::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_transaction_status::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_transaction_status {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ConfidentialLedgerError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_current_ledger_entry(
operation_config: &crate::OperationConfig,
sub_ledger_id: Option<&str>,
) -> std::result::Result<models::LedgerEntry, get_current_ledger_entry::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/app/transactions/current", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(get_current_ledger_entry::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_current_ledger_entry::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(sub_ledger_id) = sub_ledger_id {
url.query_pairs_mut().append_pair("subLedgerId", sub_ledger_id);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(get_current_ledger_entry::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_current_ledger_entry::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::LedgerEntry = serde_json::from_slice(rsp_body)
.map_err(|source| get_current_ledger_entry::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ConfidentialLedgerError = serde_json::from_slice(rsp_body)
.map_err(|source| get_current_ledger_entry::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_current_ledger_entry::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_current_ledger_entry {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ConfidentialLedgerError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_user(
operation_config: &crate::OperationConfig,
user_id: &str,
) -> std::result::Result<models::LedgerUser, get_user::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/app/users/{}", operation_config.base_path(), user_id);
let mut url = url::Url::parse(url_str).map_err(get_user::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_user::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_user::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_user::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::LedgerUser =
serde_json::from_slice(rsp_body).map_err(|source| get_user::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ConfidentialLedgerError =
serde_json::from_slice(rsp_body).map_err(|source| get_user::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_user::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_user {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ConfidentialLedgerError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update_user(
operation_config: &crate::OperationConfig,
user_id: &str,
user_details: &models::LedgerUser,
) -> std::result::Result<models::LedgerUser, create_or_update_user::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/app/users/{}", operation_config.base_path(), user_id);
let mut url = url::Url::parse(url_str).map_err(create_or_update_user::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update_user::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(user_details).map_err(create_or_update_user::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(create_or_update_user::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update_user::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::LedgerUser = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update_user::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ConfidentialLedgerError = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update_user::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_or_update_user::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_or_update_user {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ConfidentialLedgerError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete_user(operation_config: &crate::OperationConfig, user_id: &str) -> std::result::Result<(), delete_user::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/app/users/{}", operation_config.base_path(), user_id);
let mut url = url::Url::parse(url_str).map_err(delete_user::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete_user::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete_user::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(delete_user::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::NO_CONTENT => Ok(()),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ConfidentialLedgerError =
serde_json::from_slice(rsp_body).map_err(|source| delete_user::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete_user::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete_user {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ConfidentialLedgerError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
|
use avocado::prelude::*;
use config::{ConfigError, Config, Environment};
use failure::Error;
use failure::ResultExt;
#[derive(Clone, Debug, Deserialize)]
pub struct Mongodb {
pub uri: String,
pub db: String,
}
#[derive(Clone, Debug, Deserialize)]
pub struct Google {
pub id: String,
pub secret: String,
pub callback: String,
}
#[derive(Clone, Debug, Deserialize)]
pub struct Settings {
pub google: Google,
pub mongodb: Mongodb,
pub debug: Option<bool>,
}
#[derive(Clone, Debug)]
pub struct Context {
pub settings: Settings,
pub db: Database,
}
impl Context {
pub fn new() -> Result<Self, Error> {
let settings = create_settings().context("Create settings")?;
let database = create_db(&settings).context("Create db")?;
Ok(Context {
settings: settings,
db: database,
})
}
}
fn create_settings() -> Result<Settings, ConfigError> {
let mut settings = Config::default();
let env = Environment::with_prefix("app").separator("_");
settings.merge(env)?;
settings.try_into()
}
fn create_db(settings: &Settings) -> Result<Database, Error> {
let client = Client::with_uri(&settings.mongodb.uri).context("Connect to Mongo")?;
// TODO: setup indexes
Ok(client.db(&settings.mongodb.db))
}
|
use core::position::{HasSize, HasPosition};
use ui::core::attributes::{HorizontalAlign, VerticalAlign};
pub trait Alignable: HasSize + HasPosition {
fn halign(&mut self, parent: &HasSize, halign: HorizontalAlign, margin: usize) {
let (cols, _) = self.size();
let (_, y) = self.origin();
let (parent_cols, _) = parent.size();
let newx = match halign {
HorizontalAlign::Left => margin,
HorizontalAlign::Right => parent_cols - cols - margin,
HorizontalAlign::Middle => (parent_cols - cols) / 2,
};
self.set_origin((newx, y));
}
fn valign(&mut self, parent: &HasSize, valign: VerticalAlign, margin: usize) {
let (_, rows) = self.size();
let (x, _) = self.origin();
let (_, parent_rows) = parent.size();
let newy = match valign {
VerticalAlign::Top => margin,
VerticalAlign::Bottom => parent_rows - rows - margin,
VerticalAlign::Middle => (parent_rows - rows) / 2,
};
self.set_origin((x, newy));
}
fn align(&mut self, parent: &HasSize, halign: HorizontalAlign, valign: VerticalAlign,
margin: (usize, usize)) {
self.halign(parent, halign, margin.0);
self.valign(parent, valign, margin.1);
}
}
|
/// A distribution of n distinct elements in m possible cells
#[derive(Debug)]
pub struct Distribution {
pub n: u16,
pub m: u16
}
pub struct DistributionIter<'a> {
distr: &'a Distribution,
index: u64
}
impl<'a> IntoIterator for &'a Distribution {
type Item = Vec<u16>;
type IntoIter = DistributionIter<'a>;
fn into_iter(self) -> Self::IntoIter {
DistributionIter {
distr: &self,
index: 0
}
}
}
impl<'a> Iterator for DistributionIter<'a> {
type Item = Vec<u16>;
fn next(self: &mut Self) -> Option<Self::Item> {
if self.index < self.distr.how_many() {
let mut distr = vec![0; self.distr.n as usize];
for i in 0..self.distr.n {
distr[i as usize] = ((self.index / (self.distr.m.pow(i as u32) as u64))
% (self.distr.m as u64)) as u16;
}
self.index += 1;
Some(distr)
} else {
None
}
}
}
impl Distribution {
pub fn new(n: u16, m: u16) -> Self {
Distribution {
n,
m
}
}
pub fn how_many(self: &Self) -> u64 {
return (self.m as u64).pow(self.n as u32);
}
}
mod tests;
|
#[macro_use] extern crate c_str_macro;
use glutin::{
self,
dpi,
event_loop::ControlFlow,
event::{
Event,
DeviceEvent,
WindowEvent,
},
};
use gl::types::*;
use std::{
mem,
};
mod shaders;
fn main() {
let el = glutin::event_loop::EventLoop::new();
let wb = glutin::window::WindowBuilder::new()
.with_title("Hello GL world!")
.with_resizable(false)
.with_inner_size(dpi::LogicalSize::new(800.0, 450.0));
let windowed_context = glutin::ContextBuilder::new()
.with_gl_robustness(glutin::Robustness::TryRobustLoseContextOnReset)
.build_windowed(wb, &el)
.unwrap();
let windowed_context = unsafe {
windowed_context.make_current().expect("failed to make context current")
};
gl::load_with(|s| windowed_context.get_proc_address(s));
setup_gl();
windowed_context.swap_buffers().expect("failed to swap buffers");
el.run(move |ev, _, flow| {
*flow = glutin::event_loop::ControlFlow::Wait;
match ev {
| Event::MainEventsCleared
| Event::RedrawEventsCleared
| Event::NewEvents { .. }
| Event::DeviceEvent { event: DeviceEvent::Motion { .. }, .. }
| Event::WindowEvent { event: WindowEvent::AxisMotion { .. }, .. }
=> {}
| Event::WindowEvent { event: WindowEvent::CloseRequested, .. }
| Event::WindowEvent { event: WindowEvent::Destroyed, .. }
=> *flow = ControlFlow::Exit,
e => eprintln!("{:?}", e),
}
});
}
fn setup_gl() {
const VERTICES: [[f32; 2]; 3] = [
[-0.8, -0.8],
[0.8, -0.8],
[0.0, 0.8],
];
let program = shaders::load_shaders();
unsafe {
gl::UseProgram(program);
let loc_pos = gl::GetAttribLocation(
program, c_str!("position").as_ptr()) as GLuint;
let mut vao = 0;
gl::GenVertexArrays(1, &mut vao);
gl::BindVertexArray(vao);
let mut buffer = 0;
gl::GenBuffers(1, &mut buffer);
gl::BindBuffer(gl::ARRAY_BUFFER, buffer);
gl::BufferData(
gl::ARRAY_BUFFER,
mem::size_of_val(&VERTICES) as GLsizeiptr,
&VERTICES as *const _ as *const GLvoid,
gl::STATIC_DRAW
);
gl::EnableVertexAttribArray(loc_pos);
gl::VertexAttribPointer(
loc_pos, 2, gl::FLOAT, gl::FALSE, 0, 0 as *const _);
gl::Clear(gl::COLOR_BUFFER_BIT | gl::DEPTH_BUFFER_BIT);
gl::DrawArrays(gl::TRIANGLES, 0, VERTICES.len() as GLsizei);
}
}
|
#[doc = "Register `OPAMP2_TCMR` reader"]
pub type R = crate::R<OPAMP2_TCMR_SPEC>;
#[doc = "Register `OPAMP2_TCMR` writer"]
pub type W = crate::W<OPAMP2_TCMR_SPEC>;
#[doc = "Field `VMS_SEL` reader - VMS_SEL"]
pub type VMS_SEL_R = crate::BitReader;
#[doc = "Field `VMS_SEL` writer - VMS_SEL"]
pub type VMS_SEL_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `VPS_SEL` reader - VPS_SEL"]
pub type VPS_SEL_R = crate::FieldReader<VPS_SEL_A>;
#[doc = "VPS_SEL\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(u8)]
pub enum VPS_SEL_A {
#[doc = "0: VINP0 connected to VINP input"]
Vinp0 = 0,
#[doc = "1: VINP1 connected to VINP input"]
Vinp1 = 1,
#[doc = "2: VINP2 connected to VINP input"]
Vinp2 = 2,
#[doc = "3: VINP3 connected to VINP input"]
Vinp3 = 3,
}
impl From<VPS_SEL_A> for u8 {
#[inline(always)]
fn from(variant: VPS_SEL_A) -> Self {
variant as _
}
}
impl crate::FieldSpec for VPS_SEL_A {
type Ux = u8;
}
impl VPS_SEL_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> VPS_SEL_A {
match self.bits {
0 => VPS_SEL_A::Vinp0,
1 => VPS_SEL_A::Vinp1,
2 => VPS_SEL_A::Vinp2,
3 => VPS_SEL_A::Vinp3,
_ => unreachable!(),
}
}
#[doc = "VINP0 connected to VINP input"]
#[inline(always)]
pub fn is_vinp0(&self) -> bool {
*self == VPS_SEL_A::Vinp0
}
#[doc = "VINP1 connected to VINP input"]
#[inline(always)]
pub fn is_vinp1(&self) -> bool {
*self == VPS_SEL_A::Vinp1
}
#[doc = "VINP2 connected to VINP input"]
#[inline(always)]
pub fn is_vinp2(&self) -> bool {
*self == VPS_SEL_A::Vinp2
}
#[doc = "VINP3 connected to VINP input"]
#[inline(always)]
pub fn is_vinp3(&self) -> bool {
*self == VPS_SEL_A::Vinp3
}
}
#[doc = "Field `VPS_SEL` writer - VPS_SEL"]
pub type VPS_SEL_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 2, O, VPS_SEL_A>;
impl<'a, REG, const O: u8> VPS_SEL_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
REG::Ux: From<u8>,
{
#[doc = "VINP0 connected to VINP input"]
#[inline(always)]
pub fn vinp0(self) -> &'a mut crate::W<REG> {
self.variant(VPS_SEL_A::Vinp0)
}
#[doc = "VINP1 connected to VINP input"]
#[inline(always)]
pub fn vinp1(self) -> &'a mut crate::W<REG> {
self.variant(VPS_SEL_A::Vinp1)
}
#[doc = "VINP2 connected to VINP input"]
#[inline(always)]
pub fn vinp2(self) -> &'a mut crate::W<REG> {
self.variant(VPS_SEL_A::Vinp2)
}
#[doc = "VINP3 connected to VINP input"]
#[inline(always)]
pub fn vinp3(self) -> &'a mut crate::W<REG> {
self.variant(VPS_SEL_A::Vinp3)
}
}
#[doc = "Field `T1CM_EN` reader - T1CM_EN"]
pub type T1CM_EN_R = crate::BitReader<T1CM_EN_A>;
#[doc = "T1CM_EN\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum T1CM_EN_A {
#[doc = "0: Automatic input switch triggered by TIM1 disabled"]
Disabled = 0,
#[doc = "1: Automatic input switch triggered by TIM1 enabled"]
Enabled = 1,
}
impl From<T1CM_EN_A> for bool {
#[inline(always)]
fn from(variant: T1CM_EN_A) -> Self {
variant as u8 != 0
}
}
impl T1CM_EN_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> T1CM_EN_A {
match self.bits {
false => T1CM_EN_A::Disabled,
true => T1CM_EN_A::Enabled,
}
}
#[doc = "Automatic input switch triggered by TIM1 disabled"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == T1CM_EN_A::Disabled
}
#[doc = "Automatic input switch triggered by TIM1 enabled"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == T1CM_EN_A::Enabled
}
}
#[doc = "Field `T1CM_EN` writer - T1CM_EN"]
pub type T1CM_EN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, T1CM_EN_A>;
impl<'a, REG, const O: u8> T1CM_EN_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Automatic input switch triggered by TIM1 disabled"]
#[inline(always)]
pub fn disabled(self) -> &'a mut crate::W<REG> {
self.variant(T1CM_EN_A::Disabled)
}
#[doc = "Automatic input switch triggered by TIM1 enabled"]
#[inline(always)]
pub fn enabled(self) -> &'a mut crate::W<REG> {
self.variant(T1CM_EN_A::Enabled)
}
}
#[doc = "Field `T8CM_EN` reader - T8CM_EN"]
pub type T8CM_EN_R = crate::BitReader<T8CM_EN_A>;
#[doc = "T8CM_EN\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum T8CM_EN_A {
#[doc = "0: Automatic input switch triggered by TIM8 disabled"]
Disabled = 0,
#[doc = "1: Automatic input switch triggered by TIM8 enabled"]
Enabled = 1,
}
impl From<T8CM_EN_A> for bool {
#[inline(always)]
fn from(variant: T8CM_EN_A) -> Self {
variant as u8 != 0
}
}
impl T8CM_EN_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> T8CM_EN_A {
match self.bits {
false => T8CM_EN_A::Disabled,
true => T8CM_EN_A::Enabled,
}
}
#[doc = "Automatic input switch triggered by TIM8 disabled"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == T8CM_EN_A::Disabled
}
#[doc = "Automatic input switch triggered by TIM8 enabled"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == T8CM_EN_A::Enabled
}
}
#[doc = "Field `T8CM_EN` writer - T8CM_EN"]
pub type T8CM_EN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, T8CM_EN_A>;
impl<'a, REG, const O: u8> T8CM_EN_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Automatic input switch triggered by TIM8 disabled"]
#[inline(always)]
pub fn disabled(self) -> &'a mut crate::W<REG> {
self.variant(T8CM_EN_A::Disabled)
}
#[doc = "Automatic input switch triggered by TIM8 enabled"]
#[inline(always)]
pub fn enabled(self) -> &'a mut crate::W<REG> {
self.variant(T8CM_EN_A::Enabled)
}
}
#[doc = "Field `T20CM_EN` reader - T20CM_EN"]
pub type T20CM_EN_R = crate::BitReader<T20CM_EN_A>;
#[doc = "T20CM_EN\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum T20CM_EN_A {
#[doc = "0: Automatic input switch triggered by TIM20 disabled"]
Disabled = 0,
#[doc = "1: Automatic input switch triggered by TIM20 enabled"]
Enabled = 1,
}
impl From<T20CM_EN_A> for bool {
#[inline(always)]
fn from(variant: T20CM_EN_A) -> Self {
variant as u8 != 0
}
}
impl T20CM_EN_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> T20CM_EN_A {
match self.bits {
false => T20CM_EN_A::Disabled,
true => T20CM_EN_A::Enabled,
}
}
#[doc = "Automatic input switch triggered by TIM20 disabled"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == T20CM_EN_A::Disabled
}
#[doc = "Automatic input switch triggered by TIM20 enabled"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == T20CM_EN_A::Enabled
}
}
#[doc = "Field `T20CM_EN` writer - T20CM_EN"]
pub type T20CM_EN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, T20CM_EN_A>;
impl<'a, REG, const O: u8> T20CM_EN_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Automatic input switch triggered by TIM20 disabled"]
#[inline(always)]
pub fn disabled(self) -> &'a mut crate::W<REG> {
self.variant(T20CM_EN_A::Disabled)
}
#[doc = "Automatic input switch triggered by TIM20 enabled"]
#[inline(always)]
pub fn enabled(self) -> &'a mut crate::W<REG> {
self.variant(T20CM_EN_A::Enabled)
}
}
#[doc = "Field `LOCK` reader - LOCK"]
pub type LOCK_R = crate::BitReader<LOCK_A>;
#[doc = "LOCK\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum LOCK_A {
#[doc = "0: TCMR is read-write"]
ReadWrite = 0,
#[doc = "1: TCMR is read-only, can only be cleared by system reset"]
ReadOnly = 1,
}
impl From<LOCK_A> for bool {
#[inline(always)]
fn from(variant: LOCK_A) -> Self {
variant as u8 != 0
}
}
impl LOCK_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> LOCK_A {
match self.bits {
false => LOCK_A::ReadWrite,
true => LOCK_A::ReadOnly,
}
}
#[doc = "TCMR is read-write"]
#[inline(always)]
pub fn is_read_write(&self) -> bool {
*self == LOCK_A::ReadWrite
}
#[doc = "TCMR is read-only, can only be cleared by system reset"]
#[inline(always)]
pub fn is_read_only(&self) -> bool {
*self == LOCK_A::ReadOnly
}
}
#[doc = "Field `LOCK` writer - LOCK"]
pub type LOCK_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, LOCK_A>;
impl<'a, REG, const O: u8> LOCK_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "TCMR is read-write"]
#[inline(always)]
pub fn read_write(self) -> &'a mut crate::W<REG> {
self.variant(LOCK_A::ReadWrite)
}
#[doc = "TCMR is read-only, can only be cleared by system reset"]
#[inline(always)]
pub fn read_only(self) -> &'a mut crate::W<REG> {
self.variant(LOCK_A::ReadOnly)
}
}
impl R {
#[doc = "Bit 0 - VMS_SEL"]
#[inline(always)]
pub fn vms_sel(&self) -> VMS_SEL_R {
VMS_SEL_R::new((self.bits & 1) != 0)
}
#[doc = "Bits 1:2 - VPS_SEL"]
#[inline(always)]
pub fn vps_sel(&self) -> VPS_SEL_R {
VPS_SEL_R::new(((self.bits >> 1) & 3) as u8)
}
#[doc = "Bit 3 - T1CM_EN"]
#[inline(always)]
pub fn t1cm_en(&self) -> T1CM_EN_R {
T1CM_EN_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - T8CM_EN"]
#[inline(always)]
pub fn t8cm_en(&self) -> T8CM_EN_R {
T8CM_EN_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 5 - T20CM_EN"]
#[inline(always)]
pub fn t20cm_en(&self) -> T20CM_EN_R {
T20CM_EN_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bit 31 - LOCK"]
#[inline(always)]
pub fn lock(&self) -> LOCK_R {
LOCK_R::new(((self.bits >> 31) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - VMS_SEL"]
#[inline(always)]
#[must_use]
pub fn vms_sel(&mut self) -> VMS_SEL_W<OPAMP2_TCMR_SPEC, 0> {
VMS_SEL_W::new(self)
}
#[doc = "Bits 1:2 - VPS_SEL"]
#[inline(always)]
#[must_use]
pub fn vps_sel(&mut self) -> VPS_SEL_W<OPAMP2_TCMR_SPEC, 1> {
VPS_SEL_W::new(self)
}
#[doc = "Bit 3 - T1CM_EN"]
#[inline(always)]
#[must_use]
pub fn t1cm_en(&mut self) -> T1CM_EN_W<OPAMP2_TCMR_SPEC, 3> {
T1CM_EN_W::new(self)
}
#[doc = "Bit 4 - T8CM_EN"]
#[inline(always)]
#[must_use]
pub fn t8cm_en(&mut self) -> T8CM_EN_W<OPAMP2_TCMR_SPEC, 4> {
T8CM_EN_W::new(self)
}
#[doc = "Bit 5 - T20CM_EN"]
#[inline(always)]
#[must_use]
pub fn t20cm_en(&mut self) -> T20CM_EN_W<OPAMP2_TCMR_SPEC, 5> {
T20CM_EN_W::new(self)
}
#[doc = "Bit 31 - LOCK"]
#[inline(always)]
#[must_use]
pub fn lock(&mut self) -> LOCK_W<OPAMP2_TCMR_SPEC, 31> {
LOCK_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "OPAMP2 control/status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`opamp2_tcmr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`opamp2_tcmr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct OPAMP2_TCMR_SPEC;
impl crate::RegisterSpec for OPAMP2_TCMR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`opamp2_tcmr::R`](R) reader structure"]
impl crate::Readable for OPAMP2_TCMR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`opamp2_tcmr::W`](W) writer structure"]
impl crate::Writable for OPAMP2_TCMR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets OPAMP2_TCMR to value 0"]
impl crate::Resettable for OPAMP2_TCMR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
#[doc = "Reader of register STGENR_CIDR1"]
pub type R = crate::R<u32, super::STGENR_CIDR1>;
#[doc = "Reader of field `PRMBL_1`"]
pub type PRMBL_1_R = crate::R<u8, u8>;
#[doc = "Reader of field `CLASS`"]
pub type CLASS_R = crate::R<u8, u8>;
impl R {
#[doc = "Bits 0:3 - PRMBL_1"]
#[inline(always)]
pub fn prmbl_1(&self) -> PRMBL_1_R {
PRMBL_1_R::new((self.bits & 0x0f) as u8)
}
#[doc = "Bits 4:7 - CLASS"]
#[inline(always)]
pub fn class(&self) -> CLASS_R {
CLASS_R::new(((self.bits >> 4) & 0x0f) as u8)
}
}
|
use std::io::{self};
fn main() {
let mut n = String::new();
io::stdin().read_line(&mut n).unwrap();
let mut numbers = String::new();
io::stdin().read_line(&mut numbers).unwrap();
print!("{}", count(n.as_str(), numbers.as_str()));
}
fn count(n: &str, numbers: &str) -> i32 {
let m = n.trim_end_matches('\n').parse::<i32>().unwrap();
let sv: Vec<&str> = numbers.trim_end_matches('\n').split(" ").collect();
let mut iv: Vec<i32> = sv
.into_iter()
.map(|x| x.parse::<i32>().unwrap())
.rev()
.collect();
let mut count = 0;
loop {
iv = match check(m, iv) {
Some(new_numbers) => {
count += 1;
new_numbers
}
None => break,
}
}
return count;
}
fn check(m: i32, numbers: Vec<i32>) -> Option<Vec<i32>> {
let mut new_numbers: Vec<i32> = vec![];
for n in numbers {
if n % 2 == 1 {
return None;
}
new_numbers.push(n / 2)
}
return Some(new_numbers);
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_count_1() {
assert_eq!(count("3", "8 12 40"), 2);
}
fn test_count2() {
assert_eq!(count("4", "5 6 8 10"), 0);
}
fn test_count3() {
assert_eq!(
count(
"6",
"382253568 723152896 37802240 379425024 404894720 471526144"
),
8
);
}
}
|
#[doc = "Register `SR` reader"]
pub type R = crate::R<SR_SPEC>;
#[doc = "Register `SR` writer"]
pub type W = crate::W<SR_SPEC>;
#[doc = "Field `RXNE` reader - Receive buffer not empty"]
pub type RXNE_R = crate::BitReader<RXNE_A>;
#[doc = "Receive buffer not empty\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum RXNE_A {
#[doc = "0: Rx buffer empty"]
Empty = 0,
#[doc = "1: Rx buffer not empty"]
NotEmpty = 1,
}
impl From<RXNE_A> for bool {
#[inline(always)]
fn from(variant: RXNE_A) -> Self {
variant as u8 != 0
}
}
impl RXNE_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> RXNE_A {
match self.bits {
false => RXNE_A::Empty,
true => RXNE_A::NotEmpty,
}
}
#[doc = "Rx buffer empty"]
#[inline(always)]
pub fn is_empty(&self) -> bool {
*self == RXNE_A::Empty
}
#[doc = "Rx buffer not empty"]
#[inline(always)]
pub fn is_not_empty(&self) -> bool {
*self == RXNE_A::NotEmpty
}
}
#[doc = "Field `TXE` reader - Transmit buffer empty"]
pub type TXE_R = crate::BitReader<TXE_A>;
#[doc = "Transmit buffer empty\n\nValue on reset: 1"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum TXE_A {
#[doc = "0: Tx buffer not empty"]
NotEmpty = 0,
#[doc = "1: Tx buffer empty"]
Empty = 1,
}
impl From<TXE_A> for bool {
#[inline(always)]
fn from(variant: TXE_A) -> Self {
variant as u8 != 0
}
}
impl TXE_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> TXE_A {
match self.bits {
false => TXE_A::NotEmpty,
true => TXE_A::Empty,
}
}
#[doc = "Tx buffer not empty"]
#[inline(always)]
pub fn is_not_empty(&self) -> bool {
*self == TXE_A::NotEmpty
}
#[doc = "Tx buffer empty"]
#[inline(always)]
pub fn is_empty(&self) -> bool {
*self == TXE_A::Empty
}
}
#[doc = "Field `CHSIDE` reader - Channel side"]
pub type CHSIDE_R = crate::BitReader<CHSIDE_A>;
#[doc = "Channel side\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum CHSIDE_A {
#[doc = "0: Channel left has to be transmitted or has been received"]
Left = 0,
#[doc = "1: Channel right has to be transmitted or has been received"]
Right = 1,
}
impl From<CHSIDE_A> for bool {
#[inline(always)]
fn from(variant: CHSIDE_A) -> Self {
variant as u8 != 0
}
}
impl CHSIDE_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> CHSIDE_A {
match self.bits {
false => CHSIDE_A::Left,
true => CHSIDE_A::Right,
}
}
#[doc = "Channel left has to be transmitted or has been received"]
#[inline(always)]
pub fn is_left(&self) -> bool {
*self == CHSIDE_A::Left
}
#[doc = "Channel right has to be transmitted or has been received"]
#[inline(always)]
pub fn is_right(&self) -> bool {
*self == CHSIDE_A::Right
}
}
#[doc = "Field `UDR` reader - Underrun flag"]
pub type UDR_R = crate::BitReader<UDRR_A>;
#[doc = "Underrun flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum UDRR_A {
#[doc = "0: No underrun occurred"]
NoUnderrun = 0,
#[doc = "1: Underrun occurred"]
Underrun = 1,
}
impl From<UDRR_A> for bool {
#[inline(always)]
fn from(variant: UDRR_A) -> Self {
variant as u8 != 0
}
}
impl UDR_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> UDRR_A {
match self.bits {
false => UDRR_A::NoUnderrun,
true => UDRR_A::Underrun,
}
}
#[doc = "No underrun occurred"]
#[inline(always)]
pub fn is_no_underrun(&self) -> bool {
*self == UDRR_A::NoUnderrun
}
#[doc = "Underrun occurred"]
#[inline(always)]
pub fn is_underrun(&self) -> bool {
*self == UDRR_A::Underrun
}
}
#[doc = "Field `CRCERR` reader - CRC error flag"]
pub type CRCERR_R = crate::BitReader<CRCERRR_A>;
#[doc = "CRC error flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum CRCERRR_A {
#[doc = "0: CRC value received matches the SPIx_RXCRCR value"]
Match = 0,
#[doc = "1: CRC value received does not match the SPIx_RXCRCR value"]
NoMatch = 1,
}
impl From<CRCERRR_A> for bool {
#[inline(always)]
fn from(variant: CRCERRR_A) -> Self {
variant as u8 != 0
}
}
impl CRCERR_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> CRCERRR_A {
match self.bits {
false => CRCERRR_A::Match,
true => CRCERRR_A::NoMatch,
}
}
#[doc = "CRC value received matches the SPIx_RXCRCR value"]
#[inline(always)]
pub fn is_match(&self) -> bool {
*self == CRCERRR_A::Match
}
#[doc = "CRC value received does not match the SPIx_RXCRCR value"]
#[inline(always)]
pub fn is_no_match(&self) -> bool {
*self == CRCERRR_A::NoMatch
}
}
#[doc = "CRC error flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum CRCERRW_AW {
#[doc = "0: Clear flag"]
Clear = 0,
}
impl From<CRCERRW_AW> for bool {
#[inline(always)]
fn from(variant: CRCERRW_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `CRCERR` writer - CRC error flag"]
pub type CRCERR_W<'a, REG, const O: u8> = crate::BitWriter0C<'a, REG, O, CRCERRW_AW>;
impl<'a, REG, const O: u8> CRCERR_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Clear flag"]
#[inline(always)]
pub fn clear(self) -> &'a mut crate::W<REG> {
self.variant(CRCERRW_AW::Clear)
}
}
#[doc = "Field `MODF` reader - Mode fault"]
pub type MODF_R = crate::BitReader<MODFR_A>;
#[doc = "Mode fault\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum MODFR_A {
#[doc = "0: No mode fault occurred"]
NoFault = 0,
#[doc = "1: Mode fault occurred"]
Fault = 1,
}
impl From<MODFR_A> for bool {
#[inline(always)]
fn from(variant: MODFR_A) -> Self {
variant as u8 != 0
}
}
impl MODF_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> MODFR_A {
match self.bits {
false => MODFR_A::NoFault,
true => MODFR_A::Fault,
}
}
#[doc = "No mode fault occurred"]
#[inline(always)]
pub fn is_no_fault(&self) -> bool {
*self == MODFR_A::NoFault
}
#[doc = "Mode fault occurred"]
#[inline(always)]
pub fn is_fault(&self) -> bool {
*self == MODFR_A::Fault
}
}
#[doc = "Field `OVR` reader - Overrun flag"]
pub type OVR_R = crate::BitReader<OVRR_A>;
#[doc = "Overrun flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum OVRR_A {
#[doc = "0: No overrun occurred"]
NoOverrun = 0,
#[doc = "1: Overrun occurred"]
Overrun = 1,
}
impl From<OVRR_A> for bool {
#[inline(always)]
fn from(variant: OVRR_A) -> Self {
variant as u8 != 0
}
}
impl OVR_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> OVRR_A {
match self.bits {
false => OVRR_A::NoOverrun,
true => OVRR_A::Overrun,
}
}
#[doc = "No overrun occurred"]
#[inline(always)]
pub fn is_no_overrun(&self) -> bool {
*self == OVRR_A::NoOverrun
}
#[doc = "Overrun occurred"]
#[inline(always)]
pub fn is_overrun(&self) -> bool {
*self == OVRR_A::Overrun
}
}
#[doc = "Field `BSY` reader - Busy flag"]
pub type BSY_R = crate::BitReader<BSYR_A>;
#[doc = "Busy flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum BSYR_A {
#[doc = "0: SPI not busy"]
NotBusy = 0,
#[doc = "1: SPI busy"]
Busy = 1,
}
impl From<BSYR_A> for bool {
#[inline(always)]
fn from(variant: BSYR_A) -> Self {
variant as u8 != 0
}
}
impl BSY_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> BSYR_A {
match self.bits {
false => BSYR_A::NotBusy,
true => BSYR_A::Busy,
}
}
#[doc = "SPI not busy"]
#[inline(always)]
pub fn is_not_busy(&self) -> bool {
*self == BSYR_A::NotBusy
}
#[doc = "SPI busy"]
#[inline(always)]
pub fn is_busy(&self) -> bool {
*self == BSYR_A::Busy
}
}
#[doc = "Field `FRE` reader - Frame Error"]
pub type FRE_R = crate::BitReader<FRER_A>;
#[doc = "Frame Error\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum FRER_A {
#[doc = "0: No frame format error"]
NoError = 0,
#[doc = "1: A frame format error occurred"]
Error = 1,
}
impl From<FRER_A> for bool {
#[inline(always)]
fn from(variant: FRER_A) -> Self {
variant as u8 != 0
}
}
impl FRE_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> FRER_A {
match self.bits {
false => FRER_A::NoError,
true => FRER_A::Error,
}
}
#[doc = "No frame format error"]
#[inline(always)]
pub fn is_no_error(&self) -> bool {
*self == FRER_A::NoError
}
#[doc = "A frame format error occurred"]
#[inline(always)]
pub fn is_error(&self) -> bool {
*self == FRER_A::Error
}
}
impl R {
#[doc = "Bit 0 - Receive buffer not empty"]
#[inline(always)]
pub fn rxne(&self) -> RXNE_R {
RXNE_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - Transmit buffer empty"]
#[inline(always)]
pub fn txe(&self) -> TXE_R {
TXE_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - Channel side"]
#[inline(always)]
pub fn chside(&self) -> CHSIDE_R {
CHSIDE_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - Underrun flag"]
#[inline(always)]
pub fn udr(&self) -> UDR_R {
UDR_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - CRC error flag"]
#[inline(always)]
pub fn crcerr(&self) -> CRCERR_R {
CRCERR_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 5 - Mode fault"]
#[inline(always)]
pub fn modf(&self) -> MODF_R {
MODF_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bit 6 - Overrun flag"]
#[inline(always)]
pub fn ovr(&self) -> OVR_R {
OVR_R::new(((self.bits >> 6) & 1) != 0)
}
#[doc = "Bit 7 - Busy flag"]
#[inline(always)]
pub fn bsy(&self) -> BSY_R {
BSY_R::new(((self.bits >> 7) & 1) != 0)
}
#[doc = "Bit 8 - Frame Error"]
#[inline(always)]
pub fn fre(&self) -> FRE_R {
FRE_R::new(((self.bits >> 8) & 1) != 0)
}
}
impl W {
#[doc = "Bit 4 - CRC error flag"]
#[inline(always)]
#[must_use]
pub fn crcerr(&mut self) -> CRCERR_W<SR_SPEC, 4> {
CRCERR_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`sr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`sr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct SR_SPEC;
impl crate::RegisterSpec for SR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`sr::R`](R) reader structure"]
impl crate::Readable for SR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`sr::W`](W) writer structure"]
impl crate::Writable for SR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0x10;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets SR to value 0x02"]
impl crate::Resettable for SR_SPEC {
const RESET_VALUE: Self::Ux = 0x02;
}
|
//! Module defining the `ConfigSettings` struct, which allows to save and reload
//! the application default configuration.
use serde::{Deserialize, Serialize};
use crate::gui::styles::types::gradient_type::GradientType;
use crate::notifications::types::notifications::Notifications;
use crate::{Language, StyleType};
#[derive(Serialize, Deserialize, Default)]
pub struct ConfigSettings {
pub color_gradient: GradientType,
pub language: Language,
pub notifications: Notifications,
// StyleType should be last in order to deserialize as a table properly
pub style: StyleType,
}
|
mod fixtures;
use fixtures::{server, server_no_stderr, Error, FILES};
use pretty_assertions::assert_eq;
use reqwest::blocking::Client;
use reqwest::StatusCode;
use rstest::rstest;
use select::document::Document;
use select::predicate::Text;
#[rstest(
cli_auth_arg, client_username, client_password,
case("testuser:testpassword", "testuser", "testpassword"),
case(
"testuser:sha256:9f735e0df9a1ddc702bf0a1a7b83033f9f7153a00c29de82cedadc9957289b05",
"testuser",
"testpassword"
),
case(
"testuser:sha512:e9e633097ab9ceb3e48ec3f70ee2beba41d05d5420efee5da85f97d97005727587fda33ef4ff2322088f4c79e8133cc9cd9f3512f4d3a303cbdb5bc585415a00",
"testuser",
"testpassword"
),
)]
fn auth_accepts(
cli_auth_arg: &str,
client_username: &str,
client_password: &str,
) -> Result<(), Error> {
let server = server(&["-a", cli_auth_arg]);
let client = Client::new();
let response = client
.get(server.url())
.basic_auth(client_username, Some(client_password))
.send()?;
let status_code = response.status();
assert_eq!(status_code, StatusCode::OK);
let body = response.error_for_status()?;
let parsed = Document::from_read(body)?;
for &file in FILES {
assert!(parsed.find(Text).any(|x| x.text() == file));
}
Ok(())
}
#[rstest(
cli_auth_arg, client_username, client_password,
case("rightuser:rightpassword", "wronguser", "rightpassword"),
case(
"rightuser:sha256:314eee236177a721d0e58d3ca4ff01795cdcad1e8478ba8183a2e58d69c648c0",
"wronguser",
"rightpassword"
),
case(
"rightuser:sha512:84ec4056571afeec9f5b59453305877e9a66c3f9a1d91733fde759b370c1d540b9dc58bfc88c5980ad2d020c3a8ee84f21314a180856f5a82ba29ecba29e2cab",
"wronguser",
"rightpassword"
),
case("rightuser:rightpassword", "rightuser", "wrongpassword"),
case(
"rightuser:sha256:314eee236177a721d0e58d3ca4ff01795cdcad1e8478ba8183a2e58d69c648c0",
"rightuser",
"wrongpassword"
),
case(
"rightuser:sha512:84ec4056571afeec9f5b59453305877e9a66c3f9a1d91733fde759b370c1d540b9dc58bfc88c5980ad2d020c3a8ee84f21314a180856f5a82ba29ecba29e2cab",
"rightuser",
"wrongpassword"
),
)]
fn auth_rejects(
cli_auth_arg: &str,
client_username: &str,
client_password: &str,
) -> Result<(), Error> {
let server = server_no_stderr(&["-a", cli_auth_arg]);
let client = Client::new();
let status = client
.get(server.url())
.basic_auth(client_username, Some(client_password))
.send()?
.status();
assert_eq!(status, StatusCode::UNAUTHORIZED);
Ok(())
}
/// Command line arguments that register multiple accounts
static ACCOUNTS: &[&str] = &[
"--auth",
"usr0:pwd0",
"--auth",
"usr1:pwd1",
"--auth",
"usr2:sha256:149d2937d1bce53fa683ae652291bd54cc8754444216a9e278b45776b76375af", // pwd2
"--auth",
"usr3:sha256:ffc169417b4146cebe09a3e9ffbca33db82e3e593b4d04c0959a89c05b87e15d", // pwd3
"--auth",
"usr4:sha512:68050a967d061ac480b414bc8f9a6d368ad0082203edcd23860e94c36178aad1a038e061716707d5479e23081a6d920dc6e9f88e5eb789cdd23e211d718d161a", // pwd4
"--auth",
"usr5:sha512:be82a7dccd06122f9e232e9730e67e69e30ec61b268fd9b21a5e5d42db770d45586a1ce47816649a0107e9fadf079d9cf0104f0a3aaa0f67bad80289c3ba25a8",
// pwd5
];
#[rstest(
username,
password,
case("usr0", "pwd0"),
case("usr1", "pwd1"),
case("usr2", "pwd2"),
case("usr3", "pwd3"),
case("usr4", "pwd4"),
case("usr5", "pwd5")
)]
fn auth_multiple_accounts_pass(username: &str, password: &str) -> Result<(), Error> {
let server = server(ACCOUNTS);
let client = Client::new();
let response = client
.get(server.url())
.basic_auth(username, Some(password))
.send()?;
let status = response.status();
assert_eq!(status, StatusCode::OK);
let body = response.error_for_status()?;
let parsed = Document::from_read(body)?;
for &file in FILES {
assert!(parsed.find(Text).any(|x| x.text() == file));
}
Ok(())
}
#[rstest]
fn auth_multiple_accounts_wrong_username() -> Result<(), Error> {
let server = server_no_stderr(ACCOUNTS);
let client = Client::new();
let status = client
.get(server.url())
.basic_auth("unregistered user", Some("pwd0"))
.send()?
.status();
assert_eq!(status, StatusCode::UNAUTHORIZED);
Ok(())
}
#[rstest(
username,
password,
case("usr0", "pwd5"),
case("usr1", "pwd4"),
case("usr2", "pwd3"),
case("usr3", "pwd2"),
case("usr4", "pwd1"),
case("usr5", "pwd0")
)]
fn auth_multiple_accounts_wrong_password(username: &str, password: &str) -> Result<(), Error> {
let server = server_no_stderr(ACCOUNTS);
let client = Client::new();
let status = client
.get(server.url())
.basic_auth(username, Some(password))
.send()?
.status();
assert_eq!(status, StatusCode::UNAUTHORIZED);
Ok(())
}
|
use crate::engine_interaction::MAX_LAYERS;
use crate::gui::{ImCgVec2, ImDragf, ImEntity, ImVec};
use crate::rendering::colors::*;
use cgmath::num_traits::zero;
use cgmath::Vector2;
use imgui::Ui;
use imgui_inspect::InspectArgsDefault;
use imgui_inspect::InspectRenderDefault;
use imgui_inspect_derive::*;
use serde::{Deserialize, Serialize};
use specs::{Component, Entity, FlaggedStorage, VecStorage, World};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum MeshRenderEnum {
Circle(CircleRender),
Rect(RectRender),
#[serde(skip)]
LineTo(LineToRender),
Line(LineRender),
}
impl InspectRenderDefault<MeshRenderEnum> for MeshRenderEnum {
fn render(
_: &[&MeshRenderEnum],
_: &'static str,
_: &mut World,
_: &Ui,
_: &InspectArgsDefault,
) {
unimplemented!()
}
fn render_mut(
data: &mut [&mut MeshRenderEnum],
label: &'static str,
world: &mut World,
ui: &Ui,
args: &InspectArgsDefault,
) -> bool {
if data.len() != 1 {
return false;
}
let mre = &mut data[0];
match mre {
MeshRenderEnum::Circle(x) => {
<CircleRender as InspectRenderDefault<CircleRender>>::render_mut(
&mut [x],
label,
world,
ui,
args,
)
}
MeshRenderEnum::Rect(x) => {
<RectRender as InspectRenderDefault<RectRender>>::render_mut(
&mut [x],
label,
world,
ui,
args,
)
}
MeshRenderEnum::LineTo(x) => {
<LineToRender as InspectRenderDefault<LineToRender>>::render_mut(
&mut [x],
label,
world,
ui,
args,
)
}
MeshRenderEnum::Line(x) => {
<LineRender as InspectRenderDefault<LineRender>>::render_mut(
&mut [x],
label,
world,
ui,
args,
)
}
}
}
}
impl From<CircleRender> for MeshRenderEnum {
fn from(x: CircleRender) -> Self {
MeshRenderEnum::Circle(x)
}
}
impl From<RectRender> for MeshRenderEnum {
fn from(x: RectRender) -> Self {
MeshRenderEnum::Rect(x)
}
}
impl From<LineToRender> for MeshRenderEnum {
fn from(x: LineToRender) -> Self {
MeshRenderEnum::LineTo(x)
}
}
impl From<LineRender> for MeshRenderEnum {
fn from(x: LineRender) -> Self {
MeshRenderEnum::Line(x)
}
}
#[derive(Clone, Serialize, Deserialize)]
pub struct MeshRender {
pub orders: Vec<MeshRenderEnum>,
pub hide: bool,
layer: u32,
}
#[allow(dead_code)]
impl MeshRender {
pub fn empty(layer: u32) -> Self {
if layer >= MAX_LAYERS {
panic!("Invalid layer: {}", layer);
}
MeshRender {
orders: vec![],
hide: false,
layer,
}
}
pub fn layer(&self) -> u32 {
self.layer
}
pub fn add<T: Into<MeshRenderEnum>>(&mut self, x: T) -> &mut Self {
self.orders.push(x.into());
self
}
pub fn simple<T: Into<MeshRenderEnum>>(x: T, layer: u32) -> Self {
if layer >= MAX_LAYERS {
panic!("Invalid layer: {}", layer);
}
MeshRender {
orders: vec![x.into()],
hide: false,
layer,
}
}
pub fn build(self) -> Self {
self
}
}
impl Component for MeshRender {
type Storage = FlaggedStorage<Self, VecStorage<Self>>;
}
impl InspectRenderDefault<MeshRender> for MeshRender {
fn render(
data: &[&MeshRender],
label: &'static str,
world: &mut World,
ui: &Ui,
args: &InspectArgsDefault,
) {
let mapped: Vec<&Vec<MeshRenderEnum>> = data.iter().map(|x| &x.orders).collect();
<ImVec<MeshRenderEnum> as InspectRenderDefault<Vec<MeshRenderEnum>>>::render(
&mapped, label, world, ui, args,
);
}
fn render_mut(
data: &mut [&mut MeshRender],
label: &'static str,
world: &mut World,
ui: &Ui,
args: &InspectArgsDefault,
) -> bool {
let mut mapped: Vec<&mut Vec<MeshRenderEnum>> =
data.iter_mut().map(|x| &mut x.orders).collect();
<ImVec<MeshRenderEnum> as InspectRenderDefault<Vec<MeshRenderEnum>>>::render_mut(
&mut mapped,
label,
world,
ui,
args,
)
}
}
#[derive(Debug, Inspect, Clone, Serialize, Deserialize)]
pub struct CircleRender {
#[inspect(proxy_type = "ImCgVec2")]
pub offset: Vector2<f32>,
#[inspect(proxy_type = "ImDragf")]
pub radius: f32,
pub color: Color,
pub filled: bool,
}
impl Default for CircleRender {
fn default() -> Self {
CircleRender {
offset: zero(),
radius: 0.0,
color: WHITE,
filled: true,
}
}
}
#[derive(Debug, Inspect, Clone, Serialize, Deserialize)]
pub struct RectRender {
#[inspect(proxy_type = "ImCgVec2")]
pub offset: Vector2<f32>,
#[inspect(proxy_type = "ImDragf")]
pub width: f32,
#[inspect(proxy_type = "ImDragf")]
pub height: f32,
pub color: Color,
pub filled: bool,
}
impl Default for RectRender {
fn default() -> Self {
RectRender {
offset: [0.0, 0.0].into(),
width: 0.0,
height: 0.0,
color: WHITE,
filled: true,
}
}
}
#[derive(Debug, Inspect, Clone)]
pub struct LineToRender {
#[inspect(proxy_type = "ImEntity")]
pub to: Entity,
pub color: Color,
#[inspect(proxy_type = "ImDragf")]
pub thickness: f32,
}
#[derive(Debug, Inspect, Clone, Serialize, Deserialize)]
pub struct LineRender {
#[inspect(proxy_type = "ImCgVec2")]
pub offset: Vector2<f32>,
pub color: Color,
#[inspect(proxy_type = "ImDragf")]
pub thickness: f32,
}
|
use std::collections::HashMap;
use crate::read_lines::read_day;
type BagMap = HashMap<String, Vec<(usize, String)>>;
fn read_bags() -> BagMap {
let lines = read_day(7).map(|l| l.unwrap());
let mut bag_map = HashMap::new();
for line in lines {
read_bag(line, &mut bag_map);
}
bag_map
}
fn read_bag(line: String, bag_map: &mut BagMap) {
let words: Vec<&str> = line.split(" ").collect();
let bag_name = words[0..2].join(" ");
let mut bag_vec = Vec::new();
for i in (4..words.len()).step_by(4) {
let sub_bag = words[i+1..i+3].join(" ");
if sub_bag != "other bags." {
bag_vec.push((words[i].parse().unwrap() ,sub_bag));
}
}
bag_map.insert(bag_name, bag_vec);
}
fn count_parents(bag: &str, bag_map: &BagMap ) -> usize {
let mut count = 0;
for target_bag in bag_map {
if is_bag_in_target(&target_bag.0, bag, &bag_map) {
count += 1;
}
}
count
}
//search for bag in target
fn is_bag_in_target(target: &String, bag: &str, bag_map: &BagMap, ) -> bool {
//println!("{}", target);
if let Some(_) = bag_map[target].iter().find(|x| x.1 == bag) {
return true;
}
for bag_tup in &bag_map[target] {
if is_bag_in_target(&bag_tup.1, bag, &bag_map) {
return true;
}
}
false
}
fn count_children(this_bag: &str, bag_map: &BagMap) -> usize {
let this_bag = &bag_map[this_bag];
this_bag.iter().map(|b| b.0 + b.0 * count_children(&b.1, bag_map)).sum::<usize>()
}
pub fn run() {
let bag_map = read_bags();
let count = count_parents("shiny gold", &bag_map);
println!("Number of bags that can hold yours:{}", count);
let count = count_children("shiny gold", &bag_map);
println!("Number of bags in your bag:{}", count);
} |
// SPDX-License-Identifier: GPL-2.0
#[cfg(test)]
mod tests {
use std::collections::HashMap;
#[test]
fn get() {
let mut map = HashMap::new();
map.insert(1, String::from("one"));
map.insert(2, String::from("two"));
map.insert(3, String::from("three"));
map.insert(4, String::from("four"));
map.insert(5, String::from("five"));
struct Test {
name: &'static str,
key: usize,
want: &'static str,
};
let tests = [
Test {
name: "value for 1",
key: 1,
want: "one",
},
Test {
name: "value for 2",
key: 2,
want: "two",
},
Test {
name: "value for 3",
key: 3,
want: "three",
},
Test {
name: "value for 4",
key: 4,
want: "four",
},
Test {
name: "value for 5",
key: 5,
want: "five",
},
];
for t in &tests {
let got = map.get(&t.key);
assert_eq!(t.want, got.unwrap(), "{}", t.name);
}
}
}
|
use crate::objects::{MovingRect, Rect};
pub struct Contact(ContactID, ContactID);
impl Contact {
pub fn get_ids(&self) -> (ContactID, ContactID) {
(self.0, self.1)
}
}
#[derive(Copy, Clone)]
pub enum ContactID {
Obstacle,
Player,
}
pub fn gather_contacts(player: &MovingRect, obstacles: &[Rect]) -> Vec<Contact> {
let mut contacts = Vec::new();
for obstacle in obstacles.iter() {
if player.x <= obstacle.x + obstacle.w
&& obstacle.x <= player.x + player.w
&& player.y <= obstacle.y + obstacle.h
&& obstacle.y <= player.y + player.h
{
contacts.push(Contact(ContactID::Player, ContactID::Obstacle));
}
}
contacts
}
|
// ref: Index of the HTML 4 Attributes https://www.w3.org/TR/html401/index/attributes.html
pub struct AdhocAttr(pub String, pub String);
pub struct PresetAttr(pub &'static str, pub String);
pub struct IntAttr(pub &'static str, pub usize);
pub struct IntPerAttr(pub &'static str, pub usize);
pub struct FlagAttr(pub &'static str);
pub struct Class<'a>(pub &'a [&'a str]);
#[cfg(test)]
mod tests {
use modifier::{Modifier};
use crate::treeer::state_attr::*;
use crate::treeer::state::State;
#[test]
fn it_works() {
let mut state = State::default();
AdhocAttr("any".into(), "some".into()).modify(&mut state);
PresetAttr("preset", "value".into()).modify(&mut state);
IntAttr("height", 100).modify(&mut state);
IntPerAttr("width", 200).modify(&mut state);
Class(&["a", "b"]).modify(&mut state);
FlagAttr("disable").modify(&mut state);
assert_eq!(state.attr().get("any").unwrap(), "some");
assert_eq!(state.attr().get("preset").unwrap(), "value");
assert_eq!(state.attr().get("height").unwrap(), "100");
assert_eq!(state.attr().get("width").unwrap(), "200%");
assert_eq!(state.attr().get("class").unwrap(), "a b");
assert_eq!(state.flags().get(0).unwrap(), "disable");
}
}
|
#![allow(dead_code)]
#![allow(unused_imports)]
use std::{mem::size_of, sync::atomic::AtomicU8};
#[macro_export]
macro_rules! log {
($($arg: tt)*) => {
#[cfg(debug_assertions)]
if $crate::LOG.load(std::sync::atomic::Ordering::Relaxed) {
let lock = std::io::stdout();
let lock = lock.lock();
print!("LOG: ");
println!($($arg)*);
drop(lock);
}
};
}
#[macro_export]
macro_rules! clog {
($cond: expr; $($arg:tt)*) => {
#[cfg(debug_assertions)]{
if $cond && $crate::LOG.load(std::sync::atomic::Ordering::Relaxed) {
let lock = std::io::stdout();
let lock = lock.lock();
print!("LOG: ");
println!($($arg)*);
drop(lock);
}
}
};
}
macro_rules! offset_of {
($ty: ty, $field: ident) => {
unsafe { &(*(0 as *const $ty)).$field as *const _ as usize }
};
}
#[macro_export]
macro_rules! declare_call_frame {
($vm: expr) => {
unsafe { &mut *vm.top_call_frame }
};
}
pub(crate) static mut SAFEPOINT_PAGE: AtomicU8 = AtomicU8::new(0);
pub mod bigint;
pub mod builtins;
pub mod bytecode;
pub mod bytecompiler;
pub mod frontend;
pub mod function;
pub mod gc;
pub mod heap;
pub mod interpreter;
pub mod jit;
pub mod mir;
pub mod object;
pub mod pure_nan;
pub mod runtime;
pub mod table;
pub mod utils;
pub mod value;
pub mod vtable;
pub struct MutatingVecIter<'a, T>(&'a mut Vec<T>, usize);
impl<'a, T> MutatingVecIter<'a, T> {
pub fn push(&mut self, item: T) {
self.0.push(item);
}
pub fn pop(&mut self) -> Option<T> {
self.0.pop()
}
}
impl<'a, T> std::iter::Iterator for MutatingVecIter<'a, T> {
type Item = *mut T;
fn next(&mut self) -> Option<Self::Item> {
if self.1 < self.0.len() {
self.1 += 1;
let ix = self.1 - 1;
return Some(unsafe { self.0.get_unchecked_mut(ix) });
}
None
}
}
#[derive(Default)]
pub struct Globals {
map: std::collections::HashMap<String, value::Value>,
}
impl Globals {
pub fn lookup(&self, name: &str) -> Option<value::Value> {
self.map.get(name).copied()
}
pub fn has(&self, name: &str) -> bool {
self.map.contains_key(name)
}
pub fn insert(&mut self, name: &str, val: value::Value) {
self.map.insert(name.to_owned(), val);
}
}
pub struct VM {
pub top_call_frame: *mut interpreter::callframe::CallFrame,
pub exception: value::Value,
pub empty_string: value::Value,
pub constructor: value::Value,
pub length: value::Value,
pub not_a_func_exc: value::Value,
pub prototype: value::Value,
pub stop_world: bool,
pub dump_bc: bool,
pub disasm: bool,
pub opt_jit: bool,
pub template_jit: bool,
pub jit_threshold: u32,
pub log: bool,
pub heap: heap::Heap,
pub stubs: JITStubs,
pub globals: Globals,
pub verbose_alloc: bool,
}
pub struct JITStubs {
thunks: parking_lot::Mutex<std::collections::HashMap<fn() -> *const u8, *const u8>>,
}
impl JITStubs {
pub fn new() -> Self {
Self {
thunks: parking_lot::Mutex::new(Default::default()),
}
}
pub fn get_stub(&self, f: fn() -> *const u8) -> *const u8 {
let mut thunks = self.thunks.lock();
if let Some(x) = thunks.get(&f) {
return *x;
} else {
let addr = f();
thunks.insert(f, addr);
addr
}
}
}
pub static LOG: std::sync::atomic::AtomicBool = std::sync::atomic::AtomicBool::new(false);
impl VM {
pub fn new(_stack_start: *const bool) -> Box<Self> {
let mut this = Self {
top_call_frame: std::ptr::null_mut(),
exception: value::Value::undefined(),
globals: Default::default(),
jit_threshold: 25000,
template_jit: true,
verbose_alloc: false,
disasm: false,
stubs: JITStubs::new(),
dump_bc: false,
stop_world: false,
log: true,
#[cfg(feature = "opt-jit")]
opt_jit: true,
#[cfg(not(feature = "opt-jit"))]
opt_jit: false,
empty_string: value::Value::undefined(),
heap: heap::Heap::new(&_stack_start as *const *const bool as *const bool),
length: value::Value::undefined(),
constructor: value::Value::undefined(),
prototype: value::Value::undefined(),
not_a_func_exc: value::Value::undefined(),
};
this.length =
value::Value::from(object::WaffleString::new(&mut this.heap, "length").cast());
this.constructor =
value::Value::from(object::WaffleString::new(&mut this.heap, "constructor").cast());
this.empty_string =
value::Value::from(object::WaffleString::new(&mut this.heap, "").cast());
this.prototype =
value::Value::from(object::WaffleString::new(&mut this.heap, "prototype").cast());
this.not_a_func_exc = value::Value::from(
object::WaffleString::new(&mut this.heap, "function value expected").cast(),
);
Box::new(this)
}
pub fn top_call_frame(&self) -> Option<&mut interpreter::callframe::CallFrame> {
if self.top_call_frame.is_null() {
return None;
} else {
return Some(unsafe { &mut *self.top_call_frame });
}
}
pub fn exception_addr(&self) -> *const value::Value {
&self.exception
}
pub fn push_frame(
&mut self,
args: &[value::Value],
regc: u32,
) -> &mut interpreter::callframe::CallFrame {
let mut cf = Box::new(interpreter::callframe::CallFrame::new(args, regc));
unsafe {
let top = &mut *self.top_call_frame;
cf.caller = top as *mut _;
self.top_call_frame = Box::into_raw(cf);
&mut *self.top_call_frame
}
}
pub fn pop_frame(&mut self) -> Box<interpreter::callframe::CallFrame> {
unsafe {
let top = &mut *self.top_call_frame;
let caller = top.caller;
self.top_call_frame = caller;
Box::from_raw(top)
}
}
pub fn throw_exception_str(&mut self, s: impl AsRef<str>) -> WaffleResult {
let val = object::WaffleString::new(&mut self.heap, s);
self.exception = value::Value::from(val.cast());
WaffleResult::error(self.exception)
}
pub fn allocate<T>(&mut self, val: T) -> object::Ref<T> {
unsafe {
let mem = libc::malloc(size_of::<T>());
mem.cast::<T>().write(val);
std::mem::transmute(mem)
}
}
}
pub static mut VM_PTR: *mut VM = std::ptr::null_mut();
pub fn set_vm(vm: *const VM) {
unsafe {
VM_PTR = vm as *mut _;
}
}
pub fn get_vm() -> &'static mut VM {
unsafe { &mut *VM_PTR }
}
#[repr(C)]
pub struct WaffleResult {
pub a: u64,
pub b: u64,
}
impl WaffleResult {
pub fn is_error(&self) -> bool {
self.a == 1
}
pub fn is_okay(&self) -> bool {
self.a == 0
}
pub fn value(&self) -> value::Value {
unsafe { std::mem::transmute(self.b) }
}
pub fn okay(v: value::Value) -> Self {
Self {
a: 0,
b: unsafe { std::mem::transmute(v) },
}
}
pub fn error(v: value::Value) -> Self {
Self {
a: 1,
b: unsafe { std::mem::transmute(v) },
}
}
}
pub type WaffleInternalFn = extern "C" fn(&mut interpreter::callframe::CallFrame) -> WaffleResult;
|
#[macro_use]
extern crate rbatis;
pub mod model;
use model::*;
use rbatis::rbdc::datetime::FastDateTime;
use rbatis::sql::PageRequest;
htmlsql_select_page!(select_page_data(name: &str, dt: &FastDateTime) -> BizActivity => "example/example.html");
#[tokio::main]
pub async fn main() {
fast_log::init(fast_log::Config::new().console()).expect("rbatis init fail");
//use static ref
let rb = init_db().await;
let a = select_page_data(
&mut rb.clone(),
&PageRequest::new(1, 10),
"test",
&FastDateTime::now().set_micro(0),
)
.await
.unwrap();
println!("{:?}", a);
}
|
use std::net::SocketAddr;
use futures_util::future::try_join;
use hyper::upgrade::Upgraded;
use tokio::net::{TcpStream};
//// Create a TCP connection to host:port, build a tunnel between the connection and
//// the upgraded connection
//pub async fn tunnel(upgraded: Upgraded, uri: String, acceptor: tokio_rustls::TlsAcceptor, addr: SocketAddr) -> std::io::Result<()> {
//
// let upgraded = acceptor.accept(upgraded).await?;
//
// use tokio_rustls::{rustls::ClientConfig, webpki::DNSNameRef, TlsConnector};
// let mut config = ClientConfig::new();
//
// let pem = std::process::Command::new("./certs/gen_cert.sh")
// .args(&[uri.clone(), "5436723487".to_string()])
// .output()
// .expect("failed to execute process");
// let mut pem = std::io::BufReader::new(&pem.stdout[..]);
////////////////////////////////DEFAULT CERTS////////////////////////////////////
//// config
//// .root_store
//// .add_server_trust_anchors(&webpki_roots::TLS_SERVER_ROOTS);
/////////////////////////////////////////////////////////////////////////////////
// config
// .root_store
// .add_pem_file(&mut pem)
// .map_err(|_| io::Error::new(io::ErrorKind::InvalidInput, "invalid cert"))?;
//
// let connector = TlsConnector::from(std::sync::Arc::new(config));
// let stream = tokio::net::TcpStream::connect(addr).await?;
// let domain = DNSNameRef::try_from_ascii_str(&uri)
// .map_err(|_| io::Error::new(io::ErrorKind::InvalidInput, "invalid dnsname"))?;
// let stream = connector.connect(domain, stream).await?;
//
// // Proxying data
// {
// let (mut server_rd, mut server_wr) = tokio::io::split(stream);
// let (mut client_rd, mut client_wr) = tokio::io::split(upgraded);
//
// let client_to_server = tokio::io::copy(&mut client_rd, &mut server_wr);
// let server_to_client = tokio::io::copy(&mut server_rd, &mut client_wr);
// try_join(client_to_server, server_to_client).await?;
// }
//
// Ok(())
//}
// Create a TCP connection to host:port, build a tunnel between the connection and
// the upgraded connection (without reading it);
pub async fn tunnel(upgraded: Upgraded, _uri: String, _acceptor: tokio_rustls::TlsAcceptor, addr: SocketAddr) -> std::io::Result<()> {
let stream = TcpStream::connect(&addr).await.unwrap();
// Proxying data
{
let (mut server_rd, mut server_wr) = tokio::io::split(stream);
let (mut client_rd, mut client_wr) = tokio::io::split(upgraded);
let client_to_server = tokio::io::copy(&mut client_rd, &mut server_wr);
let server_to_client = tokio::io::copy(&mut server_rd, &mut client_wr);
try_join(client_to_server, server_to_client).await;
}
Ok(())
} |
use std::collections::HashMap;
pub struct Company {
departments: HashMap<String, Vec<String>>
}
impl Company {
pub fn new() -> Company {
Company {
departments: HashMap::new()
}
}
pub fn add_employee(&mut self, dept: &str, name: &str){
// if dept does not exist in map, add it with a value = empty vector
let department = self.departments.entry(dept.to_string()).or_insert(vec![]);
department.push(name.to_string());
println!("Added {} to department {}\n", name, dept);
}
pub fn list_all(&self) {
//iterate over
let mut all_emps = Vec::new();
for (_dept, emps) in &self.departments {
all_emps.extend(emps);
}
all_emps.sort();
println!("All employees:");
for employee in &all_emps {
println!("- {}", employee);
}
println!();
}
pub fn list_dept(&self, dept: &str) {
match self.departments.get(dept) {
Some(x) => {
let mut employees = x.clone();
employees.sort();
println!("Employees in department {}:", dept);
for employee in &employees {
println!("- {}", employee);
}
},
None => println!("Department does not exist!"),
};
println!();
}
}
|
extern crate bit_vec;
extern crate rust_htslib;
use regex::Regex;
use rust_htslib::bcf::Read;
use std::path::Path;
#[derive(Serialize, Clone, Debug, PartialEq)]
pub struct Variant {
pub(crate) marker_type: String,
pub(crate) reference: String,
pub(crate) alternatives: Option<String>,
pub(crate) start_position: f64,
pub(crate) end_position: f64,
pub(crate) var_type: VariantType,
}
#[derive(Serialize, Clone, Debug, PartialEq)]
pub enum VariantType {
Deletion,
Insertion,
Duplicate,
Inversion,
Variant,
}
#[derive(Serialize, Clone, Debug, PartialEq)]
pub struct Report {
pub(crate) id: String,
pub(crate) name: String,
pub(crate) position: i64,
pub(crate) reference: String,
pub(crate) alternatives: String,
pub(crate) ann: Option<Vec<Vec<String>>>,
}
pub fn read_indexed_vcf(path: &Path, chrom: String, from: u64, to: u64) -> Vec<Variant> {
let mut vcf = rust_htslib::bcf::IndexedReader::from_path(&path).unwrap();
let rid = vcf.header().name2rid(chrom.as_bytes()).unwrap();
vcf.fetch(rid, from, to).unwrap();
let mut variants: Vec<Variant> = Vec::new();
for r in vcf.records() {
let mut rec = r.unwrap();
let pos = rec.pos();
let end_pos = match rec.info(b"END").integer() {
Ok(Some(end_pos)) => {
// Subtraction of 0.5 because of the 0-based positioning in the whole plot
//TODO: Ask @johanneskoester if the END Info Tag is 0-based or not
let end_pos = end_pos[0] as f64 - 0.5; // -1 due to 0-basing, + 0.5 du to end pos
Some(end_pos)
}
_ => None,
};
let alleles = rec.alleles();
let ref_vec = alleles[0].to_owned();
let rfrce = String::from_utf8(ref_vec).unwrap();
let len: u8 = rfrce.len() as u8;
for i in 1..alleles.len() {
let alt = alleles[i];
let var_string = String::from("Variant");
if alt == b"<DEL>" {
let var = Variant {
marker_type: var_string,
reference: rfrce.clone(),
alternatives: None,
start_position: pos as f64 - 0.5,
end_position: end_pos.unwrap(),
var_type: VariantType::Deletion,
};
variants.push(var);
} else if alt == b"<INV>" {
let rev: String = rfrce.chars().rev().collect();
let var = Variant {
marker_type: var_string,
reference: rfrce.clone(),
alternatives: Some(rev),
start_position: pos as f64 - 0.5,
end_position: end_pos.unwrap(),
var_type: VariantType::Inversion,
};
variants.push(var);
} else if alt == b"<DUP>" {
let dup: String = [rfrce.clone(), rfrce.clone()].concat();
let var = Variant {
marker_type: var_string,
reference: rfrce.clone(),
alternatives: Some(dup),
start_position: pos as f64 - 0.5,
end_position: end_pos.unwrap(),
var_type: VariantType::Duplicate,
};
variants.push(var);
} else {
let mut allel = String::from("");
for c in alt {
allel.push(*c as char);
}
let cnv = Regex::new(r"^<CN\d>$").unwrap();
if cnv.is_match(allel.as_ref()) {
warn!("Use of unsupported Copy-Number-Variation {}", allel) // Warning for Copy-Number-Variation
} else {
if allel.len() == rfrce.len() {
let var = Variant {
marker_type: var_string,
reference: rfrce.clone(),
alternatives: Some(allel),
start_position: pos as f64 - 0.5,
end_position: pos as f64 - 0.5 + len as f64,
var_type: VariantType::Variant,
};
variants.push(var);
} else if allel.len() > rfrce.len() {
let var = Variant {
marker_type: var_string,
reference: rfrce.clone(),
alternatives: Some(allel),
start_position: pos as f64, // start end end + 0.5 due to alignment with insertions from bam
end_position: pos as f64 + len as f64,
var_type: VariantType::Insertion,
};
variants.push(var);
} else {
let var = Variant {
marker_type: var_string,
reference: rfrce.clone(),
alternatives: Some(allel),
start_position: pos as f64 + 0.5, // start position + 1 due to alignment with deletions from bam (example: ref: ACTT alt: A -> deletion is just CTT)
end_position: pos as f64 - 0.5 + len as f64,
var_type: VariantType::Deletion,
};
variants.push(var);
}
}
}
}
}
variants
}
|
#![allow(non_snake_case)]
//! The BIOS includes several System Call Functions which can be accessed by SWI
//! instructions.
//!
//! * All BIOS functions clobber `r0`, `r1`, and `r3`.
//! * Some functions also use `r2` as an input register.
//! * All other registers are unaffected.
// Note(Lokathor): This makes intra-doc links work.
#[allow(unused)]
use crate::prelude::*;
/// (`swi 0x00`) Performs a "soft reset" of the device.
///
/// Loads `r14` based on the `u8` value at `0x0300_7FFA`:
/// * zero: `0x0800_0000` (ROM)
/// * non-zero: `0x0200_0000` (EWRAM)
///
/// Then resets the following memory and registers:
/// * `0x300_7E00` ..= `0x300_7FFF`: zeroed
/// * `r0` ..= `r12`: zeroed
/// * `sp_usr`: `0x300_7F00`
/// * `sp_irq`: `0x300_7FA0`
/// * `sp_svc`: `0x300_7FE0`
/// * `lr_svc`, `lr_irq` : zeroed
/// * `spsr_svc`, `spsr_irq`: zeroed
///
/// Then jumps to the `r14` address. This never returns.
pub unsafe fn SoftReset() -> ! {
asm!("swi 0x00", options(noreturn))
}
/// (`swi 0x01`) Resets RAM and/or IO registers
///
/// * Note that if the IWRAM flag is used it doesn't reset the final `0x200`
/// bytes of IWRAM. Instead, those bytes are reset during a call to the
/// [`SoftReset`] function.
/// * BIOS Bug: Data in `SIODATA32` is always destroyed, even if the `sio` flag
/// is not set.
#[inline]
#[instruction_set(arm::t32)]
pub unsafe fn RegisterRamReset(flags: crate::mmio_types::ResetFlags) {
asm!("swi 0x01",
inlateout("r0") flags.0 => _,
out("r1") _,
out("r3") _,
options(nomem, nostack, preserves_flags)
)
}
/// (`swi 0x02`) Halts the CPU until an interrupt request occurs.
///
/// The CPU is placed into low-power mode, while other parts (video, sound,
/// timers, serial, keypad) continue to operate. This mode only terminates when
/// one of the enabled interrupts is requested.
///
/// This halt state uses [`IE`] to determine what interrupts to allow, but the
/// [`IME`] value is ignored (interrupts can occur even if `IME` is `false`).
#[inline]
#[instruction_set(arm::t32)]
pub unsafe fn Halt() {
asm!("swi 0x02",
out("r0") _,
out("r1") _,
out("r3") _,
options(nomem, nostack, preserves_flags)
)
}
/// (`swi 0x03`) Puts the CPU in a *very* low power state.
///
/// While stopped, the CPU, Sound, Video, SIO-shift-clock, DMA, and Timers are
/// all disabled.
///
/// The system can return from this state only if there is an interrupt from the
/// Keypad, Game Pak, or General-Purpose-SIO.
///
/// Before calling Stop you are advised to disable the Video to reduce battery
/// usage, otherwise it just freezes.
#[inline]
#[instruction_set(arm::t32)]
pub unsafe fn Stop() {
asm!("swi 0x03",
out("r0") _,
out("r1") _,
out("r3") _,
options(nomem, nostack, preserves_flags)
)
}
/// (`swi 0x04`) "Interrupt Wait".
///
/// This is similar to [`Halt`], but when an interrupt does occur this function
/// will automatically return the CPU to halt state unless the interrupt is one
/// of the interrupt types specified by `flags`.
///
/// If you set `discard_current_flags` then any pending interrupts are cleared
/// and this function will wait until a new flag is set. Otherwise the function
/// will return immediately if you request a wait for an interrupt that's
/// already pending.
///
/// When handling an interrupt through this function you must perform the normal
/// acknowledgement using [`IRQ_ACKNOWLEDGE`] and **also** acknowledge using
/// [`INTR_WAIT_ACKNOWLEDGE`].
///
/// **Caution:** This function automatically also sets [`IME`] to `true`.
#[inline]
#[instruction_set(arm::t32)]
pub unsafe fn IntrWait(discard_current_flags: bool, flags: crate::mmio_types::InterruptFlags) {
// Note(Lokathor): we don't mark this preserves_flags because the user's IRQ
// handler gets called which might end up trashing the flags.
asm!("swi 0x03",
inlateout("r0") discard_current_flags as u8 => _,
inlateout("r1") flags.0 => _,
out("r3") _,
options(nomem, nostack)
)
}
/// (`swi 0x05`) "VBlank Interrupt Wait"
///
/// Waits for the next VBlank interrupt.
///
/// This function is just shorthand for the following:
/// ```no_run
/// # use crate::prelude::*;
/// const VBLANK_IRQ: InterruptFlags = InterruptFlags::new().with_vblank(true);
/// IntrWait(true, VBLANK_IRQ)
/// ```
/// See [`IntrWait`]
///
/// **Note:** Because this uses `IntrWait`, [`IME`] will be set to `true`
#[inline]
#[instruction_set(arm::t32)]
pub unsafe fn VBlankIntrWait() {
// Note(Lokathor): we don't mark this preserves_flags because the user's IRQ
// handler gets called which might end up trashing the flags.
asm!(
"swi 0x05",
out("r0") _,
out("r1") _,
out("r3") _,
options(nomem, nostack)
)
}
/// (`swi 0x06`) Performs `i32` division.
///
/// **Outputs:** `(n/d, n%d, (n/d).unsigned_abs())`
#[inline]
#[must_use]
#[instruction_set(arm::t32)]
pub fn Div(number: i32, denominator: core::num::NonZeroI32) -> (i32, i32, u32) {
let d: i32;
let m: i32;
let abs_d: u32;
unsafe {
asm!("swi 0x06",
inlateout("r0") number => d,
inlateout("r1") denominator.get() => m,
lateout("r3") abs_d,
options(pure, nomem, nostack, preserves_flags),
)
}
(d, m, abs_d)
}
/// (`swi 0x08`) Square root of an integer value.
///
/// To obtain as much fraction as possible, shift the input left by 2N bits to
/// get an output that is left shifted by N bits.
/// * sqrt(2) => 0
/// * sqrt(2 << 30) => 1.41421 << 15
#[inline]
#[instruction_set(arm::t32)]
pub fn Sqrt(number: u32) -> u16 {
let output: u32;
unsafe {
asm!("swi 0x08",
inlateout("r0") number => output,
out("r1") _,
out("r3") _,
options(pure, nomem, nostack, preserves_flags),
)
}
output as u16
}
/// (`swi 0x09`) Arc tangent
///
/// The input and output have 14 fractional bits.
#[inline]
#[instruction_set(arm::t32)]
pub fn ArcTan(tan: i16) -> i16 {
let output;
unsafe {
asm!("swi 0x09",
inlateout("r0") tan => output,
out("r1") _,
out("r3") _,
options(pure, nomem, nostack, preserves_flags),
)
}
output
}
/// (`swi 0x0A`) Arc tangent 2
///
/// * The inputs have 14 fractional bits.
/// * The output range is `0 ..= u16::MAX`, reprisenting a portion of 2 PI.
#[inline]
#[instruction_set(arm::t32)]
pub fn ArcTan2(x: i16, y: i16) -> u16 {
let output;
unsafe {
asm!("swi 0x0A",
inlateout("r0") x => output,
in("r1") y,
out("r3") _,
options(pure, nomem, nostack, preserves_flags),
)
}
output
}
/// (`swi 0x0B`) Quickly copy/fill some memory.
///
/// * `src`: points to either `u16` or `u32` data.
/// * `dst`: points to the same type of data.
/// * `len_mode`: bitfield value:
/// * bits 0 ..= 20: the number of elements to copy/fill.
/// * bit 24: enable for fill, otherwise this is a copy.
/// * bit 26: enable for `u32` at a time, otherwise this uses `u16` at a time.
///
/// All pointers must be aligned to the appropriate type, and also valid for the
/// appropriate element count.
#[inline]
#[instruction_set(arm::t32)]
pub unsafe fn CpuSet(src: *const core::ffi::c_void, dst: *mut core::ffi::c_void, len_mode: u32) {
asm!("swi 0x0B",
in("r0") src,
in("r1") dst,
in("r2") len_mode,
out("r3") _,
options(nostack, preserves_flags),
)
}
/// (`swi 0x0C`) Quickly copy/fill some memory (most faster!)
///
/// * `src` points to the data source.
/// * `dst` points to the data destination.
/// * `len_mode`: bitfield value:
/// * bits 0 ..= 20: the number of `u32` to copy/fill.
/// * bit 24: enable for fill, otherwise this is a copy.
///
/// All pointers must be aligned. The length must be a multiple of 8.
#[inline]
#[instruction_set(arm::t32)]
pub unsafe fn CpuFastSet(src: *const u32, dst: *mut u32, len_mode: u32) {
asm!("swi 0x0C",
in("r0") src,
in("r1") dst,
in("r2") len_mode,
out("r3") _,
options(nostack, preserves_flags),
)
}
#[repr(C)]
pub struct BgAffineSetSrc {
/// 8-bit fraction
pub origin_center_x: i32,
/// 8-bit fraction
pub origin_center_y: i32,
pub display_center_x: i16,
pub display_center_y: i16,
/// 8-bit fraction
pub scale_ratio_x: i16,
/// 8-bit fraction
pub scale_ratio_y: i16,
/// 8-bit fraction, range 0 to u16::MAX
pub angle_of_rotation: u16,
}
#[repr(C)]
pub struct BgAffineSetDst {
pub pa: i16,
pub pb: i16,
pub pc: i16,
pub pd: i16,
pub start_x_coordinate: i32,
pub start_y_coordinate: i32,
}
/// (`swi 0x0E`) Calculates BG affine data.
///
/// * `src`: Points to the start of a slice of [`BgAffineSetSrc`]
/// * `dst`: Points to the start of a slice of [`BgAffineSetDst`]
/// * `count`: The number of elements to process from `src` to `dst`.
///
/// Both pointers must be aligned and valid for the length given.
#[inline]
#[instruction_set(arm::t32)]
pub unsafe fn BgAffineSet(src: *const BgAffineSetSrc, dst: *mut BgAffineSetDst, count: usize) {
asm!("swi 0x0E",
in("r0") src,
in("r1") dst,
in("r2") count,
out("r3") _,
options(nostack, preserves_flags),
)
}
#[repr(C)]
pub struct ObjAffineSetSrc {
/// 8-bit fraction
pub scale_ratio_x: i16,
/// 8-bit fraction
pub scale_ratio_y: i16,
/// 8-bit fraction, range 0 to u16::MAX
pub angle: u16,
}
/// (`swi 0x0F`) Calculates OBJ affine data.
///
/// Unlike with [`BgAffineSet`], this can optionally write the output data
/// directly into OAM (see below).
///
/// * `src`: points to the start of a slice of [`ObjAffineSetSrc`] values.
/// * `dst`: points to the start of the output location (`pa`).
/// * `count`: The number of `src` values to process to `dst`.
/// * `out_param_offset`: the number of bytes between *each field* of the output
/// data.
/// * Specify 2 if you want to output to an `[i16; 4]` or similar.
/// * Specify 8 if you want to output directly to OAM.
///
/// The pointers must be valid for the count given, and aligned.
#[inline]
#[instruction_set(arm::t32)]
pub unsafe fn ObjAffineSet(
src: *const ObjAffineSetSrc,
dst: *mut i16,
count: usize,
out_param_offset: usize,
) {
asm!("swi 0x0F",
in("r0") src,
in("r1") dst,
in("r2") count,
in("r3") out_param_offset,
options(nostack, preserves_flags),
)
}
#[repr(C)]
pub struct UnpackInfo {
pub source_data_len_bytes: u16,
/// Supports 1, 2, 4, or 8 bit source elements.
pub source_unit_bit_width: u8,
/// Supports 1, 2, 4, 8, 16, or 32 destination elements.
pub destination_unit_bit_width: u8,
/// This field combines two purposes:
/// * bits 0 ..= 30: this value is added to all non-zero source units.
/// * bit 31: if this is set, add the above to all zero source units.
pub data_offset: u32,
}
/// (`swi 0x10`) Used to undo bit packing.
///
/// * `src`: The start of the source bytes.
/// * `dst`: The start of the destination.
/// * `info`: Describes the unpacking to perform.
///
/// All pointers must be valid for the correct memory spans and aligned.
#[inline]
#[instruction_set(arm::t32)]
pub unsafe fn BitUnPack(src: *const u8, dst: *mut u32, info: &UnpackInfo) {
asm!("swi 0x10",
in("r0") src,
in("r1") dst,
in("r2") info,
out("r3") _,
options(nostack, preserves_flags),
)
}
/// (`swi 0x11`) LZ77 Decompression with 8-bit output.
///
/// Arguments
/// * `src`: pointer to the source region. The source region is prefixed with a
/// `u32` bitfield value that describes the decompression to perform. It's
/// then followed by the byte sequence to decompress.
/// * Prefix value: `output_data_size << 8 | (1 << 4) | (0)`
/// * Flags: 1 byte that specifies the types of the next 8 blocks (MSB to
/// LSB).
/// * Blocks:
/// * (0) Literal: Copy 1 byte from the source to the output.
/// * (1) Back Reference: Repeat `N+3` bytes from `BACK+1` bytes earlier in
/// the output. This uses the next two bytes from the source to describe
/// the back reference:
/// * first byte bits 0 ..= 3: most significant bits of `BACK`
/// * first byte bits 4 ..= 7: `N`
/// * second byte: least significant bits of `BACK`
/// * (So each `N` is 3 bits, and each `BACK` is 12 bits.)
/// * After 8 blocks there's another flag and then another 8 blocks.
/// * The overall size of the source data should be a multiple of 4 (pad with
/// 0 as necessary).
/// * `dst`: pointer to the destination region.
///
/// All pointers must be valid for the correct memory spans and aligned.
#[inline]
#[instruction_set(arm::t32)]
pub unsafe fn LZ77UnCompReadNormalWrite8bit(src: *const u32, dst: *mut u8) {
asm!("swi 0x11",
in("r0") src,
in("r1") dst,
out("r3") _,
options(nostack, preserves_flags),
)
}
/// (`swi 0x12`) LZ77 Decompression with 16-bit output.
///
/// This is largely as per [`LZ77UnCompReadNormalWrite8bit`], but each output is
/// 16-bits, which means that `BACK` values of 0 will corrupt the process. This
/// puts a small constraint on the data compressor, but doesn't really affect
/// you when you're using this function to decompress some already-compressed data.
///
/// All pointers must be valid for the correct memory spans and aligned.
#[inline]
#[instruction_set(arm::t32)]
pub unsafe fn LZ77UnCompReadNormalWrite16bit(src: *const u32, dst: *mut u16) {
asm!("swi 0x12",
in("r0") src,
in("r1") dst,
out("r3") _,
options(nostack, preserves_flags),
)
}
/// (`swi 0x13`) Decompresses Huffman-encoded data.
///
/// * `src`: The source buffer. There's a `u32` header, a huffman tree, and then
/// a compressed bitstream.
/// * header (4 bytes): `(output_byte_count << 8) | (2 << 4) |
/// data_unit_bit_size`, the output bit size per data unit can be 4 or 8.
/// * tree size (1 byte): the number of bytes in the tree table.
/// * tree table (up to 255 bytes): a list of 8-bit nodes, starting with the
/// root node.
/// * root node and non-data child nodes (1 byte):
/// * bits 0 ..= 5: offset to next child node.
/// * next_child0: (CurrentAddr AND NOT 1)+Offset*2+2
/// * next_child1: as above +1
/// * bit 6: node1 end flag (1 = next node is data)
/// * bit 7: node0 end flag (1 = next node is data)
/// * data nodes (1 byte):
/// * the literal value to output. If the output unit size is less than 8
/// bits at a time the upper bits of the literal should be 0.
/// * compressed bitstream (stored as a series of `u32` values). The node bits
/// are stored in each `u32` starting from the high bit.
/// * `dst`: The output buffer.
///
/// All pointers must be valid for the correct memory spans and aligned.
#[inline]
#[instruction_set(arm::t32)]
pub unsafe fn HuffUnCompReadNormal(src: *const u32, dst: *mut u32) {
asm!("swi 0x13",
in("r0") src,
in("r1") dst,
out("r3") _,
options(nostack, preserves_flags),
)
}
/// (`swi 0x14`) Expands run-length compressed data, outputting as 8-bit units.
///
/// * `src`: The source buffer. There's a `u32` header, and then a loop of
/// "flag" and then "data" bytes until the end of the stream.
/// * header (4 bytes): `(output_byte_count << 8) | (3 << 4) | 0`
/// * flag byte:
/// * bits 0 ..= 6: expanded data length, uncompressed N-1, compressed N-3.
/// * bit 7: 0=uncompressed, 1=compressed
/// * data byte: N uncompressed bytes or 1 compressed byte repeated N times.
/// * `dst`: The output buffer.
///
/// All pointers must be valid for the correct memory spans and aligned.
#[inline]
#[instruction_set(arm::t32)]
pub unsafe fn RLUnCompReadNormalWrite8bit(src: *const u32, dst: *mut u8) {
asm!("swi 0x14",
in("r0") src,
in("r1") dst,
out("r3") _,
options(nostack, preserves_flags),
)
}
/// (`swi 0x15`) Expands run-length compressed data, outputting as 16-bit units.
///
/// This is like [`RLUnCompReadNormalWrite8bit`] but outputs in 16-bit units, so
/// it's suitable for use with VRAM.
///
/// All pointers must be valid for the correct memory spans and aligned.
#[inline]
#[instruction_set(arm::t32)]
pub unsafe fn RLUnCompReadNormalWrite16bit(src: *const u32, dst: *mut u16) {
asm!("swi 0x15",
in("r0") src,
in("r1") dst,
out("r3") _,
options(nostack, preserves_flags),
)
}
/// (`swi 0x16`) Performs an "unfilter" on 8-bit data units.
///
/// An unfiltering converts a starting value and a series of delta values into
/// the appropriate totals.
/// * Filtered: 10, +1, +1, +1, +1, +5, +5, ...
/// * Unfiltered: 10, 11, 12, 13, 14, 19, 24, ...
///
/// This is not itself a compression technique, but it's far easier to compress
/// the filtered form of data in some cases, so this is often used in
/// *combination* with other compression techniques.
///
/// Arguments
/// * `src`: pointer to the source region. The source region is prefixed with a
/// `u32` bitfield value that describes the unfiltering to perform. It's then
/// followed by the bytes to unfilter.
/// * Prefix value: `element_count << 8 | (8 << 4) | (1)`
/// * `dst`: pointer to the destination region.
///
/// Note that, because this uses 8-bit writes, it cannot output correctly to
/// VRAM.
///
/// The source pointer must be aligned to 4 (the header is read as a `u32`), and
/// both pointers must be valid for the correct span:
/// * `src`: `element_count` + 4 bytes
/// * `dst`: `element_count` bytes
#[inline]
#[instruction_set(arm::t32)]
pub unsafe fn Diff8bitUnFilterWrite8bit(src: *const u8, dst: *mut u32) {
asm!("swi 0x16",
in("r0") src,
in("r1") dst,
out("r3") _,
options(nostack, preserves_flags),
)
}
/// (`swi 0x17`) Performs an "unfilter" on 8-bit data units, using 16-bit
/// output.
///
/// This is *very close* to [`Diff8bitUnFilterWrite8bit`] except that the output
/// is 16-bits per element.
///
/// Arguments
/// * `src`: pointer to the source region. The source region is prefixed with a
/// `u32` bitfield value that describes the unfiltering to perform. It's then
/// followed by the bytes to unfilter.
/// * Prefix value: `element_count << 8 | (8 << 4) | (1)`
/// * `dst`: pointer to the destination region.
///
/// Because this outputs with 16-bit writes, it is suitable for use with VRAM.
///
/// The source pointer must be aligned to 4 (the header is read as a `u32`), and
/// both pointers must be valid for the correct span:
/// * `src`: `element_count` + 4 bytes
/// * `dst`: `element_count` * 2 bytes
#[inline]
#[instruction_set(arm::t32)]
pub unsafe fn Diff8bitUnFilterWrite16bit(src: *const u8, dst: *mut u16) {
asm!("swi 0x17",
in("r0") src,
in("r1") dst,
out("r3") _,
options(nostack, preserves_flags),
)
}
/// (`swi 0x18`) Performs an "unfilter" on 16-bit data units.
///
/// This is *very close* to [`Diff8bitUnFilterWrite8bit`] except that the output
/// is 16-bits per element and the prefix is different.
///
/// Arguments
/// * `src`: pointer to the source region. The source region is prefixed with a
/// `u32` bitfield value that describes the unfiltering to perform. It's then
/// followed by the bytes to unfilter.
/// * Prefix value: `element_count << 8 | (8 << 4) | (2)`
/// * `dst`: pointer to the destination region.
///
/// Because this outputs with 16-bit writes, it is suitable for use with VRAM.
///
/// The source pointer must be aligned to 4 (the header is read as a `u32`), and
/// both pointers must be valid for the correct span:
/// * `src`: (`element_count` * 2) + 4 bytes
/// * `dst`: `element_count` * 2 bytes
#[inline]
#[instruction_set(arm::t32)]
pub unsafe fn Diff16bitUnFilter(src: *const u16, dst: *mut u16) {
asm!("swi 0x18",
in("r0") src,
in("r1") dst,
out("r3") _,
options(nostack, preserves_flags),
)
}
// TODO: MidiKey2Freq (1F)
// TODO: SoundBias (19)
// TODO: SoundChannelClear (1E)
// TODO: SoundDriverInit (1A)
// TODO: SoundDriverMain (1C)
// TODO: SoundDriverMode (1B)
// TODO: SoundDriverVSync (1D)
// TODO: MultiBoot (25)
// TODO: SoundDriverVSyncOff (28)
// TODO: SoundDriverVSyncOn (29)
|
use crate::functions::*;
use crate::types::{Arr1d, Arr2d, Arr3d};
use crate::util::*;
use itertools::izip;
use ndarray::{Array, Array1, Array2, Array3, Axis, Dimension};
pub trait LayerWithLoss {
fn predict(&self, input: Arr2d) -> Arr2d {
unimplemented!()
}
/// (バッチ次元、入力次元)の入力inputに対し、(バッチ次元、出力次元)を返す。
fn forward(&mut self, input: Arr2d, one_hot_target: &Arr2d) -> f32 {
self.forward2(input, reverse_one_hot(one_hot_target))
}
fn forward2(&mut self, input: Arr2d, target: Array1<usize>) -> f32 {
unimplemented!()
}
/// (バッチ次元、出力次元)で伝播してきた誤差doutに対し、(バッチ次元、入力次元)
/// の誤差を後ろに渡す。
fn backward(&mut self) -> Arr2d;
// fn new(wvec: &[Arr2d]) -> Self;
fn params(&mut self) -> Vec<&mut Arr2d> {
vec![]
}
fn grads(&self) -> Vec<Arr2d> {
vec![]
}
}
#[derive(Default)]
pub struct SoftMaxWithLoss {
/// softmaxの出力、すなわち、ラベルの予測確率
pred: Arr2d,
// /// 誤差関数の出力
// out: Arr1d,
/// 教師ラベル
target: Array1<usize>,
}
impl SoftMaxWithLoss {}
impl LayerWithLoss for SoftMaxWithLoss {
fn predict(&self, input: Arr2d) -> Arr2d {
softmax(input)
}
fn forward2(&mut self, input: Arr2d, target: Array1<usize>) -> f32 {
self.pred = self.predict(input);
self.target = target;
cross_entropy_error_target(&self.pred, &self.target)
}
fn backward(&mut self) -> Arr2d {
// let batch_size = dout.Arr1d; // Arr2dッチで平均されるので、各バッチの寄与は1/batch_size か?
// let dout: Arr2d = Array::from_elem((batch_size, 1), 1.0 / batch_size as f32);
let mut dx = self.pred.clone(); // これを使う
for (i, t) in self.target.iter().enumerate() {
dx[[i, *t]] -= 1.0; // 誤差(正解ラベルでの確率は1なのでそれを引く)
}
let batch_size = dx.dim().0 as f32;
dx / batch_size // doutはバッチ次元なので、(バッチサイズ, 1)にしてdxとかけれるようにする。
}
}
#[derive(Default)]
pub struct SigmodWithLoss<D: Dimension> {
// input: Arr2d,
y: Array<f32, D>,
target: Array<bool, D>,
batch_sample_size: usize,
}
impl<D: Dimension> SigmodWithLoss<D> {
// targetは正解ラベル
pub fn forward(&mut self, input: Array<f32, D>, target: Array<bool, D>) -> f32 {
self.y = input.mapv(|x| 1.0 / (1.0 + (-x).exp()));
let mut loss = Array::zeros(self.y.dim());
for (mut l, y, b) in izip!(loss.iter_mut(), self.y.iter(), target.iter()) {
*l = if *b {
-(y + 1e-7).ln()
} else {
-(1.0 - y).ln()
}
}
self.target = target;
self.batch_sample_size = self.y.len();
loss.mean().unwrap()
}
pub fn backward(&mut self) -> Array<f32, D> {
(&self.y - &self.target.mapv(|b| if b { 1.0 } else { 0.0 })) / self.batch_sample_size as f32
}
}
|
/// A libusb implementation of the adapter hardware interface
#[cfg(feature = "libusb")]
mod libusb;
#[cfg(feature = "libusb")]
pub use libusb::*;
/// A trait representing a struct which provides access to a limited set of
/// USB operations
pub trait AdapterHardware {
fn write_interrupt(&mut self, data: &[u8]);
fn read_interrupt(&mut self, data: &mut [u8]);
}
|
use crate::{Elevation, Location, Trace};
// instead of receiving the values `Trace`,
// Trace borrows (pre-existing) instances of Trace
#[derive(Debug)]
pub struct Analyzer<'a> {
pub trace: &'a Trace,
statistics: Vec<Stats>,
}
#[derive(Debug)]
pub struct Stats {
distance: f64,
elevation: Elevation,
}
impl<'a> Analyzer<'a> {
pub fn new(trace: &'a Trace) -> Self {
// use std::time::Instant;
// let now = Instant::now();
let statistics = Analyzer::compute_analytics(trace);
// let elapsed = now.elapsed();
// println!("Elapsed: {:?}", elapsed);
Analyzer { trace, statistics }
}
fn compute_analytics(trace: &Trace) -> Vec<Stats> {
let stat = Stats {
distance: 0.0,
elevation: Elevation {
positive: 0.0,
negative: 0.0,
},
};
let initial_value = vec![stat];
let mut cumulative_distance = 0.0;
let mut cumulative_elevation_gain = 0.0;
let mut cumulative_elevation_loss = 0.0;
trace
.locations
.iter()
.enumerate()
.fold(initial_value, |mut acc, (index, location)| {
let next = trace.locations.iter().nth(index + 1);
match next {
Some(next_location) => {
let distance = location.calculate_distance_to(next_location);
cumulative_distance += distance;
let elevation = location.calculate_elevation_to(next_location);
cumulative_elevation_gain += elevation.positive;
cumulative_elevation_loss += elevation.negative;
let stat = Stats {
distance: cumulative_distance,
elevation: Elevation {
positive: cumulative_elevation_gain,
negative: cumulative_elevation_loss,
},
};
acc.push(stat);
acc
}
None => acc,
}
})
}
pub fn compute_closest_location(&self, current_location: &Location) -> Result<&Location, &str> {
if self.trace.locations.len() == 0 {
return Err("empty trace");
}
let first_location = self.trace.locations.first();
let initial_value = match first_location {
Some(location) => location,
None => return Err("could not retrieve first trace location"),
};
let location = self
.trace
.locations
.iter()
.fold(initial_value, |acc, location| {
// TODO: could be improved (computed twice)
let ref_distance = current_location.calculate_distance_to(acc);
let new_distance = current_location.calculate_distance_to(location);
match new_distance < ref_distance {
true => location,
false => acc,
}
});
Ok(location)
}
pub fn find_location_index(&self, current_location: &Location) -> Result<usize, &str> {
let position = self.trace.locations.iter().position(|location| {
location.longitude == current_location.longitude
&& location.latitude == current_location.latitude
&& location.altitude == current_location.altitude
});
match position {
Some(position) => Ok(position),
None => Err("not found"),
}
}
pub fn find_location_index_at(&self, distance: f64) -> Result<usize, &str> {
if distance < 0.0 {
return Err("negative mark");
}
if self.statistics.len() == 0 {
return Err("no statistics computed yet");
}
if self.trace.locations.len() == 0 {
return Err("could not compute statistics for empty trace");
}
let first_stats = self.statistics.first();
let initial_distance = match first_stats {
Some(stats) => stats.distance,
None => return Err("could not retrieve first stats item"),
};
let mut delta = (distance - initial_distance).abs();
let index = self
.statistics
.iter()
.enumerate()
.fold(0, |acc, (index, statistic)| {
let diff = (distance - statistic.distance).abs();
match diff < delta {
true => {
delta = diff;
index
}
false => acc,
}
});
if index > self.trace.locations.len() {
return Err("out of bounds");
}
Ok(index)
}
pub fn get_location_at(&self, distance: f64) -> Result<&Location, &str> {
self.trace
.locations
.iter()
.nth(self.find_location_index_at(distance)?)
.ok_or_else(|| "location not found")
}
pub fn get_trace_section(&self, start: f64, end: f64) -> Result<Vec<Location>, &str> {
let last_stats = self.statistics.last();
let stats = match last_stats {
Some(stats) => stats,
None => return Err("no computed stats"),
};
if stats.distance < end {
return Err("out of bounds");
};
if start < 0.0 {
return Err("negative values");
};
let start_index = self.find_location_index_at(start)?;
let end_index = self.find_location_index_at(end)?;
self.trace.get_section(start_index, end_index)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{helper, Location};
#[test]
fn should_find_location_index() {
let locations = helper::get_locations();
let trace = Trace { locations };
let analyzer = Analyzer::new(&trace);
let index = analyzer.find_location_index_at(0.2);
assert_eq!(index.is_ok(), true);
assert_eq!(index.ok(), Some(4));
}
#[test]
fn should_return_error_while_getting_location_for_wrong_mark() {
let locations = helper::get_locations();
let trace = Trace { locations };
let analyzer = Analyzer::new(&trace);
let location = analyzer.get_location_at(-100.2);
assert_eq!(location.is_ok(), false);
assert_eq!(location.err(), Some("negative mark"));
}
#[test]
fn should_get_location_for_200_mark() {
let locations = helper::get_locations();
let trace = Trace { locations };
let matched_location = Location {
longitude: 0.328684,
latitude: 42.828782,
altitude: 793.0,
};
let analyzer = Analyzer::new(&trace);
let location = analyzer.get_location_at(0.2);
assert_eq!(location.is_ok(), true);
assert_eq!(location.ok(), Some(&matched_location));
}
#[test]
fn should_return_error_while_getting_closest_location() {
let paris = Location {
longitude: 2.350987,
latitude: 48.856667,
altitude: 800.00,
};
let locations = vec![];
let trace = Trace { locations };
let analyzer = Analyzer::new(&trace);
let location = analyzer.compute_closest_location(&paris);
assert_eq!(location.is_ok(), false);
assert_eq!(location.err(), Some("empty trace"));
}
#[test]
fn compute_closest_location() {
let paris = Location {
longitude: 2.350987,
latitude: 48.856667,
altitude: 800.00,
};
let moscow = Location {
longitude: 37.617634,
latitude: 55.755787,
altitude: 200.00,
};
let locations = vec![paris, moscow];
let trace = Trace { locations };
let analyzer = Analyzer::new(&trace);
let current_location = Location {
longitude: 1.350987,
latitude: 49.856667,
altitude: 800.00,
};
let closest_location = analyzer.compute_closest_location(¤t_location);
assert_eq!(closest_location.is_ok(), true);
assert_eq!(closest_location.ok(), Some(&paris));
}
#[test]
fn find_location_index() {
let locations = helper::get_locations();
let trace = Trace { locations };
let analyzer = Analyzer::new(&trace);
let current_location = Location {
longitude: 5.77501,
latitude: 45.07069,
altitude: 281.516,
};
let index = analyzer.find_location_index(¤t_location);
assert_eq!(index.is_ok(), false);
assert_eq!(index.err(), Some("not found"));
}
#[test]
fn should_not_get_any_location_index() {
let locations = helper::get_locations();
let trace = Trace { locations };
let analyzer = Analyzer::new(&trace);
let current_location = Location {
longitude: 5.7750,
latitude: 45.07069,
altitude: 281.516,
};
let index = analyzer.find_location_index(¤t_location);
assert_eq!(index.is_ok(), false);
assert_eq!(index.err(), Some("not found"));
}
#[test]
fn should_get_a_location_section() {
let locations = helper::get_locations();
let trace = Trace { locations };
let section = vec![
Location {
longitude: 0.32802,
latitude: 42.829719999999995,
altitude: 792.0,
},
Location {
longitude: 0.32839999999999997,
latitude: 42.8296,
altitude: 792.0,
},
Location {
longitude: 0.32882,
latitude: 42.829181,
altitude: 792.0,
},
];
let analyzer = Analyzer::new(&trace);
let computed_section = analyzer.get_trace_section(0.0, 0.2);
assert_eq!(computed_section.is_ok(), true);
assert_eq!(Some(section), computed_section.ok())
}
#[test]
fn should_return_error_while_getting_trace_section() {
let locations = helper::get_locations();
let trace = Trace { locations };
let analyzer = Analyzer::new(&trace);
let section = analyzer.get_trace_section(0.0, 10000.2);
assert_eq!(section.is_ok(), false);
assert_eq!(section.err(), Some("out of bounds"));
}
#[test]
fn should_return_error_while_getting_trace_section_negative_value() {
let locations = helper::get_locations();
let trace = Trace { locations };
let analyzer = Analyzer::new(&trace);
let section = analyzer.get_trace_section(-20.0, 0.2);
assert_eq!(section.is_ok(), false);
assert_eq!(section.err(), Some("negative values"));
}
}
|
use redis::Commands;
use serde_json::{json, Result, Value};
use std::collections::HashMap;
use std::io::prelude::*;
use std::io::BufReader;
use std::net::TcpStream;
use std::str;
use std::sync::mpsc;
use std::thread;
use uuid::Uuid;
use crate::message::{Message, ServiceMessage, ServiceMsgType, ServiceType};
static HELLO_WORLD: &str = "proxy_uuid";
pub fn query_storage(stream: &mut TcpStream, msg: Value) {
let proxy_server_uuid = HELLO_WORLD.to_string();
// TODO Fetch details of the file from the core_server
let client = redis::Client::open("redis://172.28.5.3/7").unwrap();
let mut con = client.get_connection().unwrap();
// TODO calculate the hash of the file using the filename and the user UUID
let user_uuid = msg["id"].as_str().unwrap().to_string();
let query = msg["queryname"].as_str().unwrap();
let files: String = con.get(&user_uuid).unwrap();
println!("{}", files);
let mut filemap: HashMap<String, HashMap<String, Value>> =
serde_json::from_str(&files.as_str()).unwrap();
match query {
"ls" => {
for (k, v) in &mut filemap {
v.remove("chunk_id");
}
stream
.write_all(json!(filemap).to_string().as_bytes())
.unwrap();
stream.flush().unwrap();
}
_ => (),
}
}
pub fn read_storage(stream: &mut TcpStream, msg: Value) {
let proxy_server_uuid = HELLO_WORLD.to_string();
// TODO Fetch details of the file from the core_server
let client = redis::Client::open("redis://172.28.5.3/7").unwrap();
let mut con = client.get_connection().unwrap();
// TODO calculate the hash of the file using the filename and the user UUID
let user_uuid = msg["id"].as_str().unwrap().to_string();
let filename = msg["filename"].as_str().unwrap().to_string();
let files: String = con.get(&user_uuid).unwrap();
let filemap: HashMap<String, HashMap<String, Value>> =
serde_json::from_str(&files.as_str()).unwrap();
/*
println!("{:?}",filemap);
println!("{}",filemap[&filename]["chunk_id"]);
println!("{:?\n}",filename);
*/
let filesize = &filemap[&filename]["size"];
let chunks: String = con
.get(&filemap[&filename]["chunk_id"].as_str().unwrap().to_string())
.unwrap();
let chunkmap: HashMap<String, Vec<String>> = serde_json::from_str(&chunks.as_str()).unwrap();
let (sct_tx, sct_rx) = mpsc::channel();
//println!("{:?}",chunkmap);
let mut t_handles: Vec<thread::JoinHandle<_>> = vec![];
for (k, v) in &chunkmap {
let nextserver_ip = v[0].clone();
let metadata: Value = serde_json::from_str(v[1].as_str()).unwrap();
let size = metadata["size"].as_u64().unwrap() as usize;
let off = metadata["offset"].as_u64().unwrap() as usize;
let index = metadata["index"].as_u64().unwrap() as usize;
let content = json!({
"msg_type" : "read",
"metadata" : metadata,
})
.to_string();
println!("{}", content);
let data = ServiceMessage {
uuid: proxy_server_uuid.clone(),
msg_type: ServiceMsgType::SERVICEINIT,
service_type: ServiceType::Storage,
content: content,
};
let msg_data = serde_json::to_string(&data).unwrap();
let dup_sct_tx = mpsc::Sender::clone(&sct_tx);
let storage_client_thread = thread::spawn(move || {
let mut cstream = TcpStream::connect(nextserver_ip.clone()).unwrap();
cstream.write_all(msg_data.as_bytes()).unwrap();
cstream.flush().unwrap();
let mut destbuffer = [0 as u8; 2048];
let mut total = 0 as usize;
let mut bufvec: Vec<u8> = vec![];
loop {
let dno = cstream.read(&mut destbuffer).unwrap();
total += dno;
println!("Total {} - dno {} - size {}", total, dno, size);
bufvec.append(&mut destbuffer[0..dno].to_vec());
if total == size {
break;
}
}
dup_sct_tx
.send(((size, index), bufvec, "data".to_string()))
.unwrap();
});
t_handles.push(storage_client_thread);
}
for handle in t_handles {
handle.join().unwrap();
}
sct_tx
.send(((0 as usize, 0 as usize), vec![], "End".to_string()))
.unwrap();
// TODO store the total_size in the redis db (this is a temp hack for a specific file)
stream
.write_all(
json!({
"total_size": filesize,
})
.to_string()
.as_bytes(),
)
.unwrap();
stream.flush().unwrap();
for received in sct_rx.iter() {
match received {
((size, index), chunk, dat) => {
let end: &String = &dat;
let mut resp = [0; 512];
if end.trim() == String::from("End") {
println!("{:?}", end.trim());
stream
.write_all(
json!({
"msg_type": "End",
})
.to_string()
.as_bytes(),
)
.unwrap();
break;
}
println!("Senting to client chunk of Size :- {} ", size);
stream
.write_all(
json!({
"msg_type": "meta",
"size" : size,
"index" : index,
})
.to_string()
.as_bytes(),
)
.unwrap();
stream.flush().unwrap();
let no = stream.read(&mut resp).unwrap();
if std::str::from_utf8(&resp[0..no]).unwrap() == "OK" {
stream.write_all(&chunk.as_slice()).unwrap();
stream.flush().unwrap();
}
// TODO
// Combine the encrypted file chunks in correct order and decrypt it using some key
}
_ => (),
};
}
//let ndata: String = rxthread.join().unwrap();
// TODO Properly combine data
// Send the file back to the user in json, also accessible from the website
//respond_back(stream, String::from("OK").as_bytes());
}
pub fn write_storage(
stream: &mut TcpStream,
msg: Value,
resp: Value,
) -> std::result::Result<String, ()> {
let proxy_server_uuid = HELLO_WORLD.to_string();
println!("Writing file");
let filesize = msg["filesize"].as_u64().unwrap();
let filename = msg["filename"].as_str().unwrap().to_string();
let user_uuid = msg["id"].as_str().unwrap().to_string();
let mut destbuffer = [0 as u8; 512];
// TODO Encrypt the file and split it to chunks of equal size
let alloc_nodes = resp["response"]["node_ip"].as_array().unwrap();
let mut node_array: Vec<String> = Vec::new();
let mut nodedata: Vec<Vec<String>> = Vec::new();
// Store the node ips to a
for i in alloc_nodes.iter() {
println!("{}", i);
node_array.push(i.as_str().unwrap().split(":").collect::<Vec<&str>>()[0].to_string());
}
let mut destbuffer = [0 as u8; 2048];
stream.write_all(String::from("OK").as_bytes()).unwrap();
stream.flush().unwrap();
let (sct_tx, sct_rx) = mpsc::channel();
let (meta_tx, meta_rx) = mpsc::channel();
let meta_thread = thread::spawn(move || {
let mut chunklist: HashMap<String, Vec<String>> = HashMap::new();
for received in meta_rx.iter() {
match received {
(metadata, ip) => {
//println!("Received meadata {}", metadata);
let end: &String = &metadata;
if end.trim() == String::from("End") {
println!("{:?}", end.trim());
break;
}
/*
let dno = forward_to(
coreserver_ip,
chunk_metadata,
&mut destbuffer,
&data,
);*/
// TODO calculate real hash of the chunk
let hash = Uuid::new_v4().to_string();
chunklist.insert(hash, vec![ip, metadata]);
}
_ => (),
};
}
json!(chunklist).to_string()
});
let dup_meta_tx = mpsc::Sender::clone(&meta_tx);
let node_thread = thread::spawn(move || {
let mut t_handles: Vec<thread::JoinHandle<_>> = vec![];
for recv in sct_rx.iter() {
match recv {
(index, ip, chunkbuf) => {
let end: &String = &ip;
println!("{:?}", end.trim());
if end.trim() == String::from("End") {
for handle in t_handles {
handle.join().unwrap();
}
dup_meta_tx
.send((String::from("End"), String::from("")))
.unwrap();
break;
}
let nextserver_ip = format!("{}:7777", &ip);
let datachunk: Vec<u8> = chunkbuf;
println!(
"Index [{}] : Forwarding chunk {:?} ",
index,
datachunk.len()
);
let dup2_meta_tx = mpsc::Sender::clone(&dup_meta_tx);
let proxy_server_uuid = proxy_server_uuid.clone();
let storage_client_thread = thread::spawn(move || {
let mut destbuffer = [0 as u8; 512];
let content = json!({
"msg_type" : "write",
"size" : datachunk.len(),
})
.to_string();
let data = ServiceMessage {
uuid: proxy_server_uuid.clone(),
msg_type: ServiceMsgType::SERVICEINIT,
service_type: ServiceType::Storage,
content: content,
};
let msg_data = serde_json::to_string(&data).unwrap();
let mut resp = [0; 512];
let mut cstream = TcpStream::connect(nextserver_ip.clone()).unwrap();
cstream.write_all(msg_data.as_bytes()).unwrap();
cstream.flush().unwrap();
let no = cstream.read(&mut resp).unwrap();
if std::str::from_utf8(&resp[0..no]).unwrap() == "OK" {
cstream.write_all(datachunk.as_slice()).unwrap();
cstream.flush().unwrap();
//println!("Sent Chunk");
}
let dno = cstream.read(&mut resp).unwrap();
let mut metadata: Value = serde_json::from_slice(&resp[0..dno]).unwrap();
metadata["index"] = json!(index);
dup2_meta_tx
.send((metadata.to_string(), nextserver_ip))
.unwrap();
});
t_handles.push(storage_client_thread);
}
}
}
});
// TODO implement size limut
// Read file content
let mut total = 0;
let mut index = 0;
let mut tempbuffer: Vec<u8> = Vec::new();
loop {
let dno = stream.read(&mut destbuffer).unwrap();
total += dno;
tempbuffer.append(&mut destbuffer[0..dno].to_vec());
if index == 1 {
println!("{:?}", tempbuffer);
}
if total % 65536 == 0 {
let ip = node_array[index % node_array.len()].to_owned();
sct_tx.send((index, ip, tempbuffer.clone())).unwrap();
index += 1;
tempbuffer.clear();
}
if total == filesize as usize {
let ip = node_array[index % node_array.len()].to_owned();
sct_tx.send((index, ip, tempbuffer.clone())).unwrap();
tempbuffer.clear();
sct_tx
.send((index, String::from("End"), tempbuffer))
.unwrap();
break;
}
}
let chunkdata: String = meta_thread.join().unwrap();
let file_uuid = Uuid::new_v4().to_string();
// TODO Get uuid from the user message
let client = redis::Client::open("redis://172.28.5.3/7").unwrap();
let mut con = client.get_connection().unwrap();
let filedata: String = match con.get(&user_uuid) {
Ok(val) => val,
_ => {
let _: () = con.set(&user_uuid, json!({}).to_string()).unwrap();
json!({}).to_string()
}
};
let mut filemap: Value = serde_json::from_str(&filedata.as_str()).unwrap();
// Inserting new file
filemap[&filename] = json!({
"chunk_id" : file_uuid,
"size": filesize,
"creation_date": "date",
});
println!("{:?} - {:?}", file_uuid, chunkdata);
let _: () = con.set(&user_uuid, filemap.to_string()).unwrap();
let _: () = con.set(&file_uuid, chunkdata).unwrap();
// TODO
// send the details of each chunk and it's respective node to the core_server
Ok("Upload Complete".to_string())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn run() {
let mut filename = json!({
"filename": {
"id" : "file_uuid",
"size": "filesize",
"creation_date": "date",
},});
println!("{}", filename);
filename["newfile"] = json!({
"id": "newid",
});
println!("{}", filename);
}
}
|
// Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use std::fmt;
use anyhow::Error;
use once_cell::sync::OnceCell;
use strum::EnumProperty as _;
use strum_macros::EnumIter;
use strum_macros::EnumProperty;
use crate::mangle::MangleWithClass as _;
use crate::textual;
use crate::textual::Sid;
const BUILTINS_CLASS: ir::ClassName<'static> = ir::ClassName::new(ffi::Str::new(b"$builtins"));
type Result<T = (), E = Error> = std::result::Result<T, E>;
/// These represent builtins for handling HHVM bytecode instructions. In general
/// the names should match the HHBC name except when they are compound bytecodes
/// (like Cmp with a parameter of Eq becoming CmpEq). Documentation can be found
/// in hphp/doc/bytecode.specification.
#[derive(Copy, Clone, EnumIter, EnumProperty)]
pub(crate) enum Hhbc {
#[strum(props(Function = "hhbc_add"))]
Add,
#[strum(props(Function = "hhbc_cmp_eq"))]
CmpEq,
#[strum(props(Function = "hhbc_cmp_gt"))]
CmpGt,
#[strum(props(Function = "hhbc_cmp_gte"))]
CmpGte,
#[strum(props(Function = "hhbc_cmp_lt"))]
CmpLt,
#[strum(props(Function = "hhbc_cmp_lte"))]
CmpLte,
#[strum(props(Function = "hhbc_cmp_nsame"))]
CmpNSame,
#[strum(props(Function = "hhbc_cmp_neq"))]
CmpNeq,
#[strum(props(Function = "hhbc_cmp_same"))]
CmpSame,
#[strum(props(Function = "hhbc_exit"))]
Exit,
#[strum(props(Function = "hhbc_is_type_int"))]
IsTypeInt,
#[strum(props(Function = "hhbc_is_type_null"))]
IsTypeNull,
#[strum(props(Function = "hhbc_is_type_str"))]
IsTypeStr,
#[strum(props(Function = "hhbc_modulo"))]
Modulo,
#[strum(props(Function = "hhbc_new_obj"))]
NewObj,
#[strum(props(Function = "hhbc_new_vec"))]
NewVec,
#[strum(props(Function = "hhbc_not"))]
Not,
#[strum(props(Function = "hhbc_print"))]
Print,
#[strum(props(Function = "hhbc_sub"))]
Sub,
#[strum(props(Function = "hhbc_verify_failed"))]
VerifyFailed,
}
// Need Default for EnumIter on Builtin
impl std::default::Default for Hhbc {
fn default() -> Self {
Hhbc::Add
}
}
#[derive(EnumIter, EnumProperty)]
pub(crate) enum Builtin {
/// Allocate an array with the given number of words (a word is a
/// pointer-sized value).
/// AllocWords(int) -> *void
#[strum(props(Function = "alloc_words"))]
AllocWords,
/// Throws a BadMethodCall exception.
/// BadMethodCall() -> noreturn
#[strum(props(Function = "hack_bad_method_call"))]
BadMethodCall,
/// Throws a BadProperty exception.
/// BadProperty() -> noreturn
#[strum(props(Function = "hack_bad_property"))]
BadProperty,
/// Turns a raw boolean into a Mixed.
/// Bool(n: bool) -> *Mixed
#[strum(props(Function = "hack_bool"))]
Bool,
/// Returns the Class identifier for the given class.
#[strum(props(Function = "hack_get_class"))]
GetClass,
/// Returns the Class identifier for the given class's static class.
#[strum(props(Function = "hack_get_static_class"))]
GetStaticClass,
/// Hhbc handlers. See hphp/doc/bytecode.specification for docs.
Hhbc(Hhbc),
/// Turns a raw int into a Mixed.
/// Int(n: int) -> *Mixed
#[strum(props(Function = "hack_int"))]
Int,
/// Returns true if the given Mixed is truthy.
/// IsTrue(p: *Mixed) -> bool
#[strum(props(Function = "hack_is_true"))]
IsTrue,
/// Returns a Mixed containing a `null`.
/// Null() -> *Mixed
#[strum(props(Function = "hack_null"))]
Null,
/// Returns true if the given raw pointer is null.
/// RawPtrIsNull(*void) -> bool
#[strum(props(Function = "raw_ptr_is_null"))]
RawPtrIsNull,
/// Turns a raw string into a Mixed.
/// String(s: *string) -> *Mixed
#[strum(props(Function = "hack_string"))]
String,
/// Used to check param count on function entry.
/// VerifyParamCount(params, min, max)
#[strum(props(Function = "verify_param_count"))]
VerifyParamCount,
}
impl fmt::Display for Builtin {
fn fmt(&self, w: &mut fmt::Formatter<'_>) -> fmt::Result {
static DUMMY: OnceCell<ir::StringInterner> = OnceCell::new();
let strings = DUMMY.get_or_init(Default::default);
let name = match self {
Builtin::Hhbc(hhbc) => hhbc.get_str("Function").unwrap(),
_ => self.get_str("Function").unwrap(),
};
let method = ir::MethodName::new(ffi::Str::new(name.as_bytes()));
// Use a dummy string table - this is fine because builtins will never
// be based on the UnitBytesId.
w.write_str(&method.mangle(&BUILTINS_CLASS, strings))
}
}
pub(crate) fn call_builtin(
w: &mut textual::FuncWriter<'_>,
target: Builtin,
params: impl textual::VarArgs,
) -> Result<Sid> {
w.call(&target.to_string(), params)
}
pub(crate) fn expr_builtin(target: Builtin, params: impl textual::VarArgs) -> textual::Expr {
textual::Expr::call(target.to_string(), params)
}
|
//! The `window` module defines data structure for storing the tail of the ledger.
//!
use crate::packet::SharedBlob;
use solana_sdk::pubkey::Pubkey;
use std::cmp;
use std::sync::{Arc, RwLock};
#[derive(Default, Clone)]
pub struct WindowSlot {
pub data: Option<SharedBlob>,
pub coding: Option<SharedBlob>,
pub leader_unknown: bool,
}
impl WindowSlot {
fn blob_index(&self) -> Option<u64> {
match self.data {
Some(ref blob) => Some(blob.read().unwrap().index()),
None => None,
}
}
fn clear_data(&mut self) {
self.data.take();
}
}
type Window = Vec<WindowSlot>;
pub type SharedWindow = Arc<RwLock<Window>>;
#[derive(Debug)]
pub struct WindowIndex {
pub data: u64,
pub coding: u64,
}
pub trait WindowUtil {
/// Finds available slots, clears them, and returns their indices.
fn clear_slots(&mut self, consumed: u64, received: u64) -> Vec<u64>;
fn window_size(&self) -> u64;
fn print(&self, id: &Pubkey, consumed: u64) -> String;
fn blob_idx_in_window(&self, id: &Pubkey, pix: u64, consumed: u64, received: &mut u64) -> bool;
}
impl WindowUtil for Window {
fn clear_slots(&mut self, consumed: u64, received: u64) -> Vec<u64> {
(consumed..received)
.filter_map(|pix| {
let i = (pix % self.window_size()) as usize;
if let Some(blob_idx) = self[i].blob_index() {
if blob_idx == pix {
return None;
}
}
self[i].clear_data();
Some(pix)
})
.collect()
}
fn blob_idx_in_window(&self, id: &Pubkey, pix: u64, consumed: u64, received: &mut u64) -> bool {
// Prevent receive window from running over
// Got a blob which has already been consumed, skip it
// probably from a repair window request
if pix < consumed {
trace!(
"{}: received: {} but older than consumed: {} skipping..",
id,
pix,
consumed
);
false
} else {
// received always has to be updated even if we don't accept the packet into
// the window. The worst case here is the server *starts* outside
// the window, none of the packets it receives fits in the window
// and repair requests (which are based on received) are never generated
*received = cmp::max(pix, *received);
if pix >= consumed + self.window_size() {
trace!(
"{}: received: {} will overrun window: {} skipping..",
id,
pix,
consumed + self.window_size()
);
false
} else {
true
}
}
}
fn window_size(&self) -> u64 {
self.len() as u64
}
fn print(&self, id: &Pubkey, consumed: u64) -> String {
let pointer: Vec<_> = self
.iter()
.enumerate()
.map(|(i, _v)| {
if i == (consumed % self.window_size()) as usize {
"V"
} else {
" "
}
})
.collect();
let buf: Vec<_> = self
.iter()
.map(|v| {
if v.data.is_none() && v.coding.is_none() {
"O"
} else if v.data.is_some() && v.coding.is_some() {
"D"
} else if v.data.is_some() {
// coding.is_none()
"d"
} else {
// data.is_none()
"c"
}
})
.collect();
format!(
"\n{}: WINDOW ({}): {}\n{}: WINDOW ({}): {}",
id,
consumed,
pointer.join(""),
id,
consumed,
buf.join("")
)
}
}
fn calculate_max_repair(
num_peers: u64,
consumed: u64,
received: u64,
times: usize,
is_next_leader: bool,
window_size: u64,
) -> u64 {
// Calculate the highest blob index that this node should have already received
// via avalanche. The avalanche splits data stream into nodes and each node retransmits
// the data to their peer nodes. So there's a possibility that a blob (with index lower
// than current received index) is being retransmitted by a peer node.
let max_repair = if times >= 8 || is_next_leader {
// if repair backoff is getting high, or if we are the next leader,
// don't wait for avalanche
cmp::max(consumed, received)
} else {
cmp::max(consumed, received.saturating_sub(num_peers))
};
// This check prevents repairing a blob that will cause window to roll over. Even if
// the highes_lost blob is actually missing, asking to repair it might cause our
// current window to move past other missing blobs
cmp::min(consumed + window_size - 1, max_repair)
}
pub fn new_window(window_size: usize) -> Window {
(0..window_size).map(|_| WindowSlot::default()).collect()
}
pub fn default_window() -> Window {
(0..2048).map(|_| WindowSlot::default()).collect()
}
#[cfg(test)]
mod test {
use crate::packet::{Blob, Packet, Packets, SharedBlob, PACKET_DATA_SIZE};
use crate::streamer::{receiver, responder, PacketReceiver};
use crate::window::{calculate_max_repair, new_window, Window, WindowUtil};
use solana_sdk::pubkey::Pubkey;
use std::io;
use std::io::Write;
use std::net::UdpSocket;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::mpsc::channel;
use std::sync::Arc;
use std::time::Duration;
fn get_msgs(r: PacketReceiver, num: &mut usize) {
for _t in 0..5 {
let timer = Duration::new(1, 0);
match r.recv_timeout(timer) {
Ok(m) => *num += m.read().unwrap().packets.len(),
e => info!("error {:?}", e),
}
if *num == 10 {
break;
}
}
}
#[test]
pub fn streamer_debug() {
write!(io::sink(), "{:?}", Packet::default()).unwrap();
write!(io::sink(), "{:?}", Packets::default()).unwrap();
write!(io::sink(), "{:?}", Blob::default()).unwrap();
}
#[test]
pub fn streamer_send_test() {
let read = UdpSocket::bind("127.0.0.1:0").expect("bind");
read.set_read_timeout(Some(Duration::new(1, 0))).unwrap();
let addr = read.local_addr().unwrap();
let send = UdpSocket::bind("127.0.0.1:0").expect("bind");
let exit = Arc::new(AtomicBool::new(false));
let (s_reader, r_reader) = channel();
let t_receiver = receiver(Arc::new(read), &exit, s_reader, "window-streamer-test");
let t_responder = {
let (s_responder, r_responder) = channel();
let t_responder = responder("streamer_send_test", Arc::new(send), r_responder);
let mut msgs = Vec::new();
for i in 0..10 {
let b = SharedBlob::default();
{
let mut w = b.write().unwrap();
w.data[0] = i as u8;
w.meta.size = PACKET_DATA_SIZE;
w.meta.set_addr(&addr);
}
msgs.push(b);
}
s_responder.send(msgs).expect("send");
t_responder
};
let mut num = 0;
get_msgs(r_reader, &mut num);
assert_eq!(num, 10);
exit.store(true, Ordering::Relaxed);
t_receiver.join().expect("join");
t_responder.join().expect("join");
}
#[test]
pub fn test_calculate_max_repair() {
const WINDOW_SIZE: u64 = 200;
assert_eq!(calculate_max_repair(0, 10, 90, 0, false, WINDOW_SIZE), 90);
assert_eq!(calculate_max_repair(15, 10, 90, 32, false, WINDOW_SIZE), 90);
assert_eq!(calculate_max_repair(15, 10, 90, 0, false, WINDOW_SIZE), 75);
assert_eq!(calculate_max_repair(90, 10, 90, 0, false, WINDOW_SIZE), 10);
assert_eq!(calculate_max_repair(90, 10, 50, 0, false, WINDOW_SIZE), 10);
assert_eq!(calculate_max_repair(90, 10, 99, 0, false, WINDOW_SIZE), 10);
assert_eq!(calculate_max_repair(90, 10, 101, 0, false, WINDOW_SIZE), 11);
assert_eq!(
calculate_max_repair(90, 10, 95 + WINDOW_SIZE, 0, false, WINDOW_SIZE),
WINDOW_SIZE + 5
);
assert_eq!(
calculate_max_repair(90, 10, 99 + WINDOW_SIZE, 0, false, WINDOW_SIZE),
WINDOW_SIZE + 9
);
assert_eq!(
calculate_max_repair(90, 10, 100 + WINDOW_SIZE, 0, false, WINDOW_SIZE),
WINDOW_SIZE + 9
);
assert_eq!(
calculate_max_repair(90, 10, 120 + WINDOW_SIZE, 0, false, WINDOW_SIZE),
WINDOW_SIZE + 9
);
assert_eq!(
calculate_max_repair(50, 100, 50 + WINDOW_SIZE, 0, false, WINDOW_SIZE),
WINDOW_SIZE
);
assert_eq!(
calculate_max_repair(50, 100, 50 + WINDOW_SIZE, 0, true, WINDOW_SIZE),
50 + WINDOW_SIZE
);
}
fn wrap_blob_idx_in_window(
window: &Window,
id: &Pubkey,
pix: u64,
consumed: u64,
received: u64,
) -> (bool, u64) {
let mut received = received;
let is_in_window = window.blob_idx_in_window(&id, pix, consumed, &mut received);
(is_in_window, received)
}
#[test]
pub fn test_blob_idx_in_window() {
let id = Pubkey::default();
const WINDOW_SIZE: u64 = 200;
let window = new_window(WINDOW_SIZE as usize);
assert_eq!(
wrap_blob_idx_in_window(&window, &id, 90 + WINDOW_SIZE, 90, 100),
(false, 90 + WINDOW_SIZE)
);
assert_eq!(
wrap_blob_idx_in_window(&window, &id, 91 + WINDOW_SIZE, 90, 100),
(false, 91 + WINDOW_SIZE)
);
assert_eq!(
wrap_blob_idx_in_window(&window, &id, 89, 90, 100),
(false, 100)
);
assert_eq!(
wrap_blob_idx_in_window(&window, &id, 91, 90, 100),
(true, 100)
);
assert_eq!(
wrap_blob_idx_in_window(&window, &id, 101, 90, 100),
(true, 101)
);
}
}
|
use chrono::{Duration, NaiveDateTime};
use serde::{Serialize, Serializer};
pub(crate) mod duration_tuple_secs {
use super::DurationWrapper;
use chrono::Duration;
use serde::{Serialize, Serializer};
pub fn serialize<S>(dur: &Option<(Duration, Duration)>, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
match *dur {
Some((ref a, ref b)) => serializer.serialize_some(&(DurationWrapper(a), DurationWrapper(b))),
None => serializer.serialize_none(),
}
}
}
pub(crate) struct DurationWrapper<'a>(&'a Duration);
impl<'a> Serialize for DurationWrapper<'a> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
duration_secs::serialize(self.0, serializer)
}
}
pub(crate) mod duration_secs {
use chrono::Duration;
use serde::{Serialize, Serializer};
pub fn serialize<S>(dur: &Duration, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_i64(dur.num_seconds())
}
}
pub(crate) mod duration_option_secs {
use super::DurationWrapper;
use chrono::Duration;
use serde::{Serialize, Serializer};
pub fn serialize<S>(dur: &Option<Duration>, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
match *dur {
Some(ref dur) => serializer.serialize_some(&DurationWrapper(dur)),
None => serializer.serialize_none(),
}
}
}
pub(crate) struct NaiveDateTimeWrapper<'a>(&'a NaiveDateTime);
impl<'a> Serialize for NaiveDateTimeWrapper<'a> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
time_secs::serialize(self.0, serializer)
}
}
pub(crate) mod time_secs {
use chrono::NaiveDateTime;
use serde::{Serialize, Serializer};
pub fn serialize<S>(time: &NaiveDateTime, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_i64(time.timestamp())
}
}
pub(crate) mod time_option_secs {
use super::NaiveDateTimeWrapper;
use chrono::NaiveDateTime;
use serde::{Serialize, Serializer};
pub fn serialize<S>(time: &Option<NaiveDateTime>, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
match *time {
Some(ref time) => serializer.serialize_some(&NaiveDateTimeWrapper(time)),
None => serializer.serialize_none(),
}
}
}
|
#![no_std]
use volatile_cell::VolatileCell;
// Known to apply to:
// STM32L4x6
ioregs!(RCC = {
0x00 => reg32 cr {
0 => msi_on : rw {
0 => Off,
1 => On,
},
1 => msi_rdy : ro {
0 => NotReady,
1 => Ready,
},
2 => msi_pll_en : rw {
0 => Off,
1 => On,
},
3 => msi_rgsel : rw,
4..7 => msi_range : rw,
8 => hsi_on : rw {
0 => Off,
1 => On,
},
9 => hsi_ker_on : rw {
0 => Off,
1 => On
},
10 => hsi_rdy : ro {
0 => NotReady,
1 => Ready,
},
11 => hsi_asfs : rw {
0 => Disable,
1 => Enable,
},
16 => hse_on : rw {
0 => Off,
1 => On,
},
17 => hse_rdy : ro {
0 => NotReady,
1 => Ready,
},
18 => hse_byp : rw {
0 => Off,
1 => On,
},
19 => css_on {
0 => Off,
1 => On,
},
24 => pll_on : rw {
0 => Off,
1 => On,
},
25 => pll_rdy : ro {
0 => Unlocked,
1 => Locked,
},
26 => pll_sai1_on : rw {
0 => Off,
1 => On,
},
27 => pll_sai1_rdy : ro {
0 => Unlocked,
1 => Locked,
},
28 => pll_sai2_on : rw {
0 => Off,
1 => On,
},
29 => pll_sai2_rdy : ro {
0 => Unlocked,
1 => Locked,
},
},
0x04 => reg32 icscr {
0..7 => msi_cal : ro,
8..15 => msi_trim : rw,
16..23 => hsi_cal : ro,
24..28 => hsi_trim : rw,
},
0x08 => reg32 cfgr {
0..1 => sw : rw {
0 => MSI,
1 => HSI16,
2 => HSE,
3 => PLL,
},
2..3 => sws : ro {
0 => MSI,
1 => HSI16,
2 => HSE,
3 => PLL,
},
4..7 => hpre : rw {
0x0 => Div1,
0x8 => Div2,
0x9 => Div4,
0xA => Div8,
0xB => Div16,
0xC => Div64,
0xD => Div128,
0xE => Div256,
0xF => Div512,
},
8..10 => ppre1 : rw {
0 => Div1,
4 => Div2,
5 => Div4,
6 => Div8,
7 => Div16,
},
11..13 => ppre2 : rw {
0 => Div1,
4 => Div2,
5 => Div4,
6 => Div8,
7 => Div16,
},
15 => stop_wu_ck : rw {
0 => MSI,
1 => HSI16,
},
24..26 => mcosel : rw {
0 => Disable,
1 => SysClk,
2 => MSI,
3 => HSI16,
4 => HSE,
5 => MainPLL,
6 => LSI,
7 => LSE,
},
28..30 => mcopre : rw {
0 => Div1,
1 => Div2,
2 => Div4,
3 => Div8,
4 => Div16,
},
},
0x0c => reg32 pll_cfgr {
0..1 => pll_src : rw {
0 => Disable,
1 => MSI,
2 => HSI16,
3 => HSE,
},
4..6 => pll_m : rw {
0 => Div1,
1 => Div2,
2 => Div3,
3 => Div4,
4 => Div5,
5 => Div6,
6 => Div7,
7 => Div8,
},
8..14 => pll_n : rw,
16 => pll_p_en : rw {
0 => Disable,
1 => Enable,
},
17 => pll_p : rw {
0 => Div7,
1 => Div17,
},
20 => pll_q_en : rw {
0 => Disable,
1 => Enable,
},
21..22 => pll_q : rw {
0 => Div2,
1 => Div4,
2 => Div6,
3 => Div8,
},
24 => pll_r_en : rw {
0 => Disable,
1 => Enable,
},
25..26 => pll_r : rw {
0 => Div2,
1 => Div4,
2 => Div6,
3 => Div8,
},
},
0x10 => reg32 pll_sai1_cfgr {
8..14 => pll_sai1_n : rw,
16 => pll_sai1_p_en : rw {
0 => Disable,
1 => Enable,
},
17 => pll_sai1_p : rw {
0 => Div7,
1 => Div17,
},
20 => pll_sai1_q_en : rw {
0 => Disable,
1 => Enable,
},
21..22 => pll_sai1_q : rw {
0 => Div2,
1 => Div4,
2 => Div6,
3 => Div8,
},
24 => pll_sai1_r_en : rw {
0 => Disable,
1 => Enable,
},
25..26 => pll_sai1_r : rw {
0 => Div2,
1 => Div4,
2 => Div6,
3 => Div8,
},
},
0x14 => reg32 pll_sai2_cfgr {
8..14 => pll_sai2_n : rw,
16 => pll_sai2_p_en : rw {
0 => Disable,
1 => Enable,
},
17 => pll_sai2_p : rw {
0 => Div7,
1 => Div17,
},
24 => pll_sai2_r_en : rw {
0 => Disable,
1 => Enable,
},
25..26 => pll_sai2_r : rw {
0 => Div2,
1 => Div4,
2 => Div6,
3 => Div8,
},
},
0x18 => reg32 cier {
0 => lsi_rdyie : rw,
1 => lse_rdyie : rw,
2 => msi_rdyie : rw,
3 => hsi_rdyie : rw,
4 => hse_rdyie : rw,
5 => pll_rdyie : rw,
6 => pll_sai1_rdyie : rw,
7 => pll_sai2_rdyie : rw,
9 => lse_cssie : rw,
},
0x1c => reg32 cifr {
0 => lsi_rdyf : ro,
1 => lse_rdyf : ro,
2 => msi_rdyf : ro,
3 => hsi_rdyf : ro,
4 => hse_rdyf : ro,
5 => pll_rdyf : ro,
6 => pll_sai1_rdyf : ro,
7 => pll_sai2_rdyf : ro,
8 => cssf : ro,
9 => lse_cssf : ro,
},
0x20 => reg32 cicr {
0 => lsi_rdyc : wo,
1 => lse_rdyc : wo,
2 => msi_rdyc : wo,
3 => hsi_rdyc : wo,
4 => hse_rdyc : wo,
5 => pll_rdyc : wo,
6 => pll_sai1_rdyc : wo,
7 => pll_sai2_rdyc : wo,
8 => cssc : wo,
9 => lse_cssc : wo,
},
0x28 => reg32 ahb1_rstr {
0 => dma1_rst : rw,
1 => dma2_rst : rw,
8 => flash_rst : rw,
12 => crc_rst : rw,
16 => tsc_rst : rw,
},
0x2c => reg32 ahb2_rstr {
0 => gpioa_rst : rw,
1 => gpiob_rst : rw,
2 => gpioc_rst : rw,
3 => gpiod_rst : rw,
4 => gpioe_rst : rw,
5 => gpiof_rst : rw,
6 => gpiog_rst : rw,
7 => gpioh_rst : rw,
12 => otgfs_rst : rw,
13 => adc_rst : rw,
16 => aes_rst : rw,
18 => rng_rst : rw,
},
0x30 => reg32 ahb3_rstr {
0 => fmc_rst : rw,
8 => qspi_rst : rw,
},
0x38 => reg32 apb1_rstr1 {
0 => tim2_rst : rw,
1 => tim3_rst : rw,
2 => tim4_rst : rw,
3 => tim5_rst : rw,
4 => tim6_rst : rw,
5 => tim7_rst : rw,
9 => lcd_rst : rw,
14 => spi2_rst : rw,
15 => spi3_rst : rw,
17 => usart2_rst : rw,
18 => usart3_rst : rw,
19 => uart4_rst : rw,
20 => uart5_rst : rw,
21 => i2c1_rst : rw,
22 => i2c2_rst : rw,
23 => i2c3_rst : rw,
25 => can1_rst : rw,
28 => pwr_rst : rw,
29 => dac1_rst : rw,
30 => opamp_rst : rw,
31 => lptim1_rst : rw,
},
0x3c => reg32 apb1_rstr2 {
0 => lpuart1_rst : rw,
2 => swpmi_rst : rw,
5 => lptim2_rst : rw,
},
0x40 => reg32 apb2_rstr {
0 => syscfg_rst : rw,
10 => sdmmc1_rst : rw,
11 => tim1_rst : rw,
12 => spi1_rst : rw,
13 => tim8_rst : rw,
14 => usart1_rst : rw,
16 => tim15_rst : rw,
17 => tim16_rst : rw,
18 => tim17_rst : rw,
21 => sai1_rst : rw,
22 => sai2_rst : rw,
24 => dfsdm_rst : rw,
},
0x48 => reg32 ahb1enr {
0 => dma1_en : rw,
1 => dma2_en : rw,
8 => flash_en : rw,
12 => crc_en : rw,
16 => tsc_en : rw,
},
0x4c => reg32 ahb2enr {
0 => gpioa_en : rw,
1 => gpiob_en : rw,
2 => gpioc_en : rw,
3 => gpiod_en : rw,
4 => gpioe_en : rw,
5 => gpiof_en : rw,
6 => gpiog_en : rw,
7 => gpioh_en : rw,
12 => otgfs_en : rw,
13 => adc_en : rw,
16 => aes_en : rw,
18 => rng_en : rw,
},
0x50 => reg32 ahb3enr {
0 => fmc_en : rw,
8 => qspi_en : rw,
},
0x58 => reg32 apb1enr1 {
0 => tim2_en : rw,
1 => tim3_en : rw,
2 => tim4_en : rw,
3 => tim5_en : rw,
4 => tim6_en : rw,
5 => tim7_en : rw,
9 => lcd_en : rw,
11 => wwdg_en : rw,
14 => spi2_en : rw,
15 => spi3_en : rw,
17 => usart2_en : rw,
18 => usart3_en : rw,
19 => uart4_en : rw,
20 => uart5_en : rw,
21 => i2c1_en : rw,
22 => i2c2_en : rw,
23 => i2c3_en : rw,
25 => can1_en : rw,
28 => pwr_en : rw,
29 => dac1_en : rw,
30 => opamp_en : rw,
31 => lptim1_en : rw,
},
0x5c => reg32 apb1enr2 {
0 => lpuart1_en : rw,
2 => swpmi_en : rw,
5 => lptim2_en : rw,
},
0x60 => reg32 apb2enr {
0 => syscfg_en : rw,
7 => fw_en : rw,
10 => sdmmc1_en : rw,
11 => tim1_en : rw,
12 => spi1_en : rw,
13 => tim8_en : rw,
14 => usart1_en : rw,
16 => tim15_en : rw,
17 => tim16_en : rw,
18 => tim17_en : rw,
21 => sai1_en : rw,
22 => sai2_en : rw,
24 => dfsdm_en : rw,
},
0x68 => reg32 ahb1smenr {
0 => dma1_smen : rw,
1 => dma2_smen : rw,
8 => flash_smen : rw,
9 => sram1_smen : rw,
12 => crc_smen : rw,
16 => tsc_smen : rw,
},
0x6c => reg32 ahm2smenr {
0 => gpioa_smen : rw,
1 => gpiob_smen : rw,
2 => gpioc_smen : rw,
3 => gpiod_smen : rw,
4 => gpioe_smen : rw,
5 => gpiof_smen : rw,
6 => gpiog_smen : rw,
7 => gpioh_smen : rw,
9 => sram2_smen : rw,
12 => otgfs_smen : rw,
13 => adc_smen : rw,
16 => aes_smen : rw,
18 => rng_smen : rw,
},
0x70 => reg32 ahm3smenr {
0 => fmc_smen,
8 => qspi_smen,
},
0x78 => reg32 apb1smenr1 {
0 => tim2_smen : rw,
1 => tim3_smen : rw,
2 => tim4_smen : rw,
3 => tim5_smen : rw,
4 => tim6_smen : rw,
5 => tim7_smen : rw,
9 => lcd_smen : rw,
11 => wwdg_smen : rw,
14 => spi2_smen : rw,
15 => spi3_smen : rw,
17 => usart2_smen : rw,
18 => usart3_smen : rw,
19 => uart4_smen : rw,
20 => uart5_smen : rw,
21 => i2c1_smen : rw,
22 => i2c2_smen : rw,
23 => i2c3_smen : rw,
25 => can1_smen : rw,
28 => pwr_smen : rw,
29 => dac1_smen : rw,
30 => opamp_smen : rw,
31 => lptim1_smen : rw,
},
0x7c => reg32 apb1smenr2 {
0 => lpuart1_smen : rw,
2 => swpmi_smen : rw,
5 => lptim2_smen : rw,
},
0x80 => reg32 apb2smenr {
0 => syscfg_smen : rw,
10 => sdmmc1_smen : rw,
11 => tim1_smen : rw,
12 => spi1_smen : rw,
13 => tim8_smen : rw,
14 => usart1_smen : rw,
16 => tim15_smen : rw,
17 => tim16_smen : rw,
18 => tim17_smen : rw,
21 => sai1_smen : rw,
22 => sai2_smen : rw,
24 => dfsdm_smen : rw,
},
0x88 => reg32 ccipr {
// TODO: lots of enums
0..1 => usart1_sel : rw,
2..3 => usart2_sel : rw,
4..5 => usart3_sel : rw,
6..7 => uart4_sel : rw,
8..9 => uart5_sel : rw,
10..11 => lpuart1_sel : rw,
12..13 => i2c1_sel : rw,
14..15 => i2c2_sel : rw,
16..17 => i2c3_sel : rw,
18..19 => lptim1_sel : rw,
20..21 => lptim2_sel : rw,
22..23 => sai1_sel : rw,
24..25 => sai2_sel : rw,
26..27 => clk48_sel : rw,
28..29 => adc_sel : rw,
30 => swpmi_sel : rw,
31 => dfsdm_sel : rw,
},
0x90 => reg32 bdcr {
0 => lse_on : rw {
0 => Off,
1 => On,
},
1 => lse_rdy : ro {
0 => NotReady,
1 => Ready,
},
2 => lse_byp : rw {
0 => NotBypassed,
1 => Bypassed,
},
3..4 => lse_drv : rw {
0 => Lower,
1 => MediumLow,
2 => MediumHigh,
3 => Higher,
},
5 => lse_css_on : rw {
0 => Off,
1 => On,
},
6 => lse_css_d : ro {
0 => OK,
1 => Failure,
},
8..9 => rtc_sel : rw {
0 => NoClock,
1 => LSE,
2 => LSI,
3 => HSE,
},
15 => rtc_en : rw {
0 => Disable,
1 => Enable,
},
16 => bd_rst : rw,
24 => lsco_en : rw {
0 => Disable,
1 => Enable,
},
25 => lsco_sel : rw {
0 => LSI,
1 => LSE,
},
},
0x94 => reg32 csr {
0 => lsi_on : rw {
0 => Off,
1 => On,
},
1 => lsi_rdy : ro {
0 => NotReady,
1 => Ready,
},
8..11 => msi_srange : rw,
23 => rmvf : rw,
24 => fw_rstf : ro,
25 => obl_rstf : ro,
26 => pin_rstf : ro,
27 => bor_rstf : ro,
28 => sft_rstf : ro,
29 => iwdg_rstf : ro,
30 => wwdg_rstf : ro,
31 => lpwr_rstf : ro,
}
}); |
use std::io;
use std::io::Write;
use std::io::prelude::*;
use std::fs;
use std::fs::File;
use std::num::NonZeroU32;
use aes::Aes256;
use block_modes::{BlockMode,Cbc};
use block_modes::block_padding::Pkcs7;
use ring::error::Unspecified;
use ring::rand::SecureRandom;
use ring::{digest, pbkdf2, rand};
use data_encoding::HEXUPPER;
/**
* Implements AES-256-CBC w/ PKDF2 for Key Derivation
* provides CLI to recieve instructions. Code has not
* been audited and may cointain bugs. Use at your own
* risk.
*/
// TODO: swap the panics for re-directions,
// fix up noted comments, should write unit tests.
fn main() {
let mut continue_with_program = true;
println!("Welcome to Free Enc. \n");
while continue_with_program {
begin_encrypt_or_decrypt();
println!("\nThanks for using Free Enc!");
println!("\nIf you would like to go back to encrypt or decrypt selection press enter, otherwise press any btn then enter to quit.");
let user_input = get_user_input();
if user_input != "" {
continue_with_program = false;
}
}
}
fn begin_encrypt_or_decrypt(){
println!("\nWould you like to encrypt or decrypt?");
let encrypt_or_decrypt = get_user_input();
if encrypt_or_decrypt == "decrypt"{
println!("\nWhat is the relative location of the file for decryption? ");
let mut location_of_file = get_user_input();
println!("\nLocation of File to be decrypted: {}", location_of_file);
println!("\nPlease enter a password for decryption: ");
let mut password = get_user_input();
println!("\nPlease enter your password one more time to confirm: ");
let mut password_conformation = get_user_input();
if !verfiy_passwords_match(&mut password, &mut password_conformation)
{
panic!("Passwords for decryption do not match! Exiting program");
}
decrypt_document(&mut password, &mut location_of_file);
println!("\nDecryption Successful!");
}
else if encrypt_or_decrypt == "encrypt" {
println!("\nWhat is the relative location of the file for encryption?: ");
let mut location_of_file = get_user_input();
println!("\nLocation of File to be encrpyted: {}", location_of_file);
println!("\nPlease enter a password for encrpytion: ");
let mut password = get_user_input();
println!("\nPlease enter your password one more time to confirm: ");
let mut password_conformation = get_user_input();
if !verfiy_passwords_match(&mut password, &mut password_conformation)
{
panic!("Passwords for encrpytion do not match! Exiting program");
}
encrypt_document(&mut password, &mut location_of_file);
println!("\nEncryption Successful!");
}
else {
panic!("\nBad Command! Exiting Program");
}
}
fn encrypt_document(pwd: &mut String, location_of_file: &mut String){
const SALT_LEN: usize = digest::SHA256_OUTPUT_LEN;
const IV_LEN: usize = 16;
let mut salt = [0u8; SALT_LEN];
let mut iv = [0u8; IV_LEN];
let n_iter = NonZeroU32::new(100_000).unwrap();
let rng = rand::SystemRandom::new();
rng.fill(&mut iv).expect("Error Filling IV");
rng.fill(&mut salt).expect("Error Filling Salt");
let mut pbkdf2_hash = [0u8; SALT_LEN];
pbkdf2::derive(
pbkdf2::PBKDF2_HMAC_SHA256,
n_iter,
&salt,
pwd.as_bytes(),
&mut pbkdf2_hash,
);
// print_key_info(&salt, &iv, &pbkdf2_hash);
let data = fs::read(&location_of_file).expect("Error Reading in File to Encrypt");
type Aes256Cbc = Cbc<Aes256, Pkcs7>;
let cipher = Aes256Cbc::new_var(&pbkdf2_hash, &iv).unwrap();
let mut cipertext = cipher.encrypt_vec(&data);
let mut data_to_write : Vec<u8> = Vec::new();
let spacer: u8 = b'=';
for i in 0..IV_LEN {
data_to_write.push(iv[i]);
}
data_to_write.push(spacer);
data_to_write.push(spacer);
data_to_write.push(spacer);
for i in 0..SALT_LEN {
data_to_write.push(salt[i]);
}
data_to_write.push(spacer);
data_to_write.push(spacer);
data_to_write.push(spacer);
// append cipher
data_to_write.append(&mut cipertext);
let mut buffer_to_write = File::create(location_of_file).expect("Error Creating New File");// create file at location of file to be encrypted.
buffer_to_write.write_all(&data_to_write).expect("Error Writing Ciper Text");
}
fn decrypt_document(pwd: &mut String, location_of_file: &mut String){
const SALT_LEN: usize = digest::SHA256_OUTPUT_LEN;
const IV_LEN: usize = 16;
let mut salt = [0u8; SALT_LEN];
let mut iv = [0u8; IV_LEN];
let n_iter = NonZeroU32::new(100_000).unwrap();
let mut pbkdf2_hash = [0u8; SALT_LEN];
let mut data = fs::read(&location_of_file).expect("Error Reading in File to Decrypt");
//this can probably be initialized better..
let mut space = [0u8; 3];
space[0] = b'=';
space[1] = b'=';
space[2] = b'=';
let mut spacer_finder = data.windows(space.len()).position(|x| x == space);
let mut inital_space: usize;
match spacer_finder {
Some(x) => inital_space = x,
None => panic!("Error no spacers found when decrypted doc"),
}
for i in 0..inital_space{
iv[i] = data[i];
}
let len_of_first_slice = inital_space + 3; //or spacer_finder.iter().len();
data.drain(0..len_of_first_slice);
spacer_finder = data.windows(space.len()).position(|x| x == space);
match spacer_finder {
Some(x) => inital_space = x,
None => panic!("Error no spacers found when decrypted doc"),
}
for i in 0..inital_space{
salt[i] = data[i];
}
let len_of_second_slice = inital_space + 3;
data.drain(0..len_of_second_slice);
pbkdf2::derive(
pbkdf2::PBKDF2_HMAC_SHA256,
n_iter,
&salt,
pwd.as_bytes(),
&mut pbkdf2_hash,
);
// print_key_info(&salt, &iv, &pbkdf2_hash);
type Aes256Cbc = Cbc<Aes256, Pkcs7>;
let dec_cipher = Aes256Cbc::new_var(&pbkdf2_hash, &iv).unwrap();
let decrypted_ciphertext = dec_cipher.decrypt_vec(&data).expect("Error decrpyting doc");
let mut buffer_to_write = File::create(location_of_file).expect("Error Creating New File");
buffer_to_write.write_all(&decrypted_ciphertext).expect("Blah");
}
fn get_user_input() -> String {
let mut return_string = String::new();
io::stdin()
.read_line(&mut return_string)
.expect("Read Line Failure");
let new_string = return_string.trim_end();
new_string.to_string()
}
fn verfiy_passwords_match(pwd_one: &mut String, pwd_two: &mut String) -> bool {
if pwd_one == pwd_two
{
return true;
}
false
}
// For debugging
fn print_key_info(salt: &[u8],iv: &[u8],pbkdf2_hash: &[u8]){
println!("Salt In Dec: {}", HEXUPPER.encode(&salt));
println!("IV In Dec: {}", HEXUPPER.encode(&iv));
println!("PDKDF2 hash In Dec: {}", HEXUPPER.encode(&pbkdf2_hash));
} |
// Copyright 2016 taskqueue developers
//
// Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or
// http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
// http://opensource.org/licenses/MIT>, at your option. This file may not be
// copied, modified, or distributed except according to those terms.
use std::boxed::FnBox;
use std::collections::VecDeque;
use std::io;
use std::thread;
use std::sync::Arc;
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::mpsc::*;
use mioco::{self, Mioco, Config, Scheduler, SchedulerThread, Handler, CoroutineControl};
use mioco::mail::*;
lazy_static! {
static ref INSTANCE: MailboxOuterEnd<Box<FnBox() + Send + 'static>> = {
let (tx, rx) = sync_channel(0);
thread::spawn(move || {
Mioco::new_configured({
let mut config = Config::new();
config.set_scheduler(Box::new(FifoScheduler::new()));
config.set_userdata(Userdata::RoundRobin);
config.set_catch_panics(false);
config
}).start(move || {
let(tx_fn, rx_fn) = mailbox::<Box<FnBox() + Send + 'static>>();
tx.send(tx_fn).unwrap();
loop {
rx_fn.read()();
}
});
});
rx.recv().unwrap()
};
}
pub enum Userdata
{
RoundRobin,
SameThread,
}
pub fn new_coroutine<F: FnOnce() -> io::Result<()> + Send + 'static>(f: F)
{
INSTANCE.send(Box::new(move || {
mioco::set_children_userdata(Some(Userdata::RoundRobin));
mioco::spawn(f);
}));
}
pub fn read_from_mailbox<T: Send + 'static>(rx: MailboxInnerEnd<T>) -> T {
match mioco::in_coroutine() {
true => rx.read(),
false => {
let (tx, rx_result) = sync_channel(0);
new_coroutine(move || {
tx.send(rx.read()).unwrap();
Ok(())
});
rx_result.recv().unwrap()
}
}
}
pub fn blocking_mioco_run_loop<F: FnOnce() -> io::Result<()> + Send + 'static>(f: F) {
Mioco::new_configured({
let mut config = Config::new();
config.set_thread_num(1);
config.set_catch_panics(false);
config
}).start(f);
}
struct FifoScheduler {
thread_num: Arc<AtomicUsize>,
}
impl FifoScheduler {
pub fn new() -> Self {
FifoScheduler { thread_num: Arc::new(AtomicUsize::new(0)) }
}
}
struct FifoSchedulerThread {
thread_i: usize,
thread_num: Arc<AtomicUsize>,
delayed: VecDeque<CoroutineControl>,
}
impl Scheduler for FifoScheduler {
fn spawn_thread(&self) -> Box<SchedulerThread> {
self.thread_num.fetch_add(1, Ordering::Relaxed);
Box::new(FifoSchedulerThread {
thread_i: 0,
thread_num: self.thread_num.clone(),
delayed: VecDeque::new(),
})
}
}
impl FifoSchedulerThread {
fn thread_next_i(&mut self) -> usize {
self.thread_i += 1;
if self.thread_i >= self.thread_num() {
self.thread_i = 0;
}
self.thread_i
}
fn thread_num(&self) -> usize {
self.thread_num.load(Ordering::Relaxed)
}
}
impl SchedulerThread for FifoSchedulerThread {
fn spawned(&mut self,
event_loop: &mut mioco::mio::EventLoop<Handler>,
coroutine_ctrl: CoroutineControl) {
match coroutine_ctrl.get_userdata::<Userdata>() {
Some(&Userdata::SameThread) => {
trace!("Using newly spawn Coroutine on current thread");
coroutine_ctrl.resume(event_loop)
},
_ => {
let thread_i = self.thread_next_i();
trace!("Migrating newly spawn Coroutine to thread {}", thread_i);
coroutine_ctrl.migrate(event_loop, thread_i);
}
}
}
fn ready(&mut self,
event_loop: &mut mioco::mio::EventLoop<Handler>,
coroutine_ctrl: CoroutineControl) {
if coroutine_ctrl.is_yielding() {
self.delayed.push_back(coroutine_ctrl);
} else {
coroutine_ctrl.resume(event_loop);
}
}
fn tick(&mut self, event_loop: &mut mioco::mio::EventLoop<Handler>) {
let len = self.delayed.len();
for _ in 0..len {
let coroutine_ctrl = self.delayed.pop_front().unwrap();
coroutine_ctrl.resume(event_loop);
}
}
}
|
//! main is our command line application implementation.
// Copyright (2017) Jeremy A. Wall.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
extern crate clap;
extern crate rustyline;
#[macro_use]
mod macros;
mod tokenizer;
mod parser;
mod errors;
mod nock;
use clap::{App, Arg};
use rustyline::Editor;
use errors::WrappedError;
struct PromptingLineParser {
read_prompt: String,
continue_prompt: String,
is_complete: fn(&Vec<String>) -> bool,
editor: Editor<()>,
}
impl PromptingLineParser {
fn new(read_prompt: String,
continue_prompt: String,
is_complete: fn(&Vec<String>) -> bool)
-> PromptingLineParser {
PromptingLineParser {
read_prompt: read_prompt,
continue_prompt: continue_prompt,
is_complete: is_complete,
editor: Editor::<()>::new(),
}
}
}
impl tokenizer::ExpressionReader for PromptingLineParser {
fn read(&mut self) -> Result<Vec<String>, WrappedError> {
let mut buffer = Vec::new();
let mut prompt = &self.read_prompt;
loop {
let line = try!(self.editor.readline(prompt));
buffer.push(line);
if (self.is_complete)(&buffer) {
break;
} else {
prompt = &self.continue_prompt;
}
}
self.editor.add_history_entry(&buffer.join("\n"));
return Ok(buffer);
}
}
use std::fs::File;
use std::io::BufReader;
use std::io::BufRead;
struct FileExpressionReader {
name: String,
buff_reader: Option<BufReader<File>>,
is_complete: fn(&Vec<String>) -> bool,
eof: bool,
}
impl FileExpressionReader {
pub fn new<S: Into<String>>(file_name: S, is_complete: fn(&Vec<String>) -> bool) -> FileExpressionReader {
FileExpressionReader{name: file_name.into(),
buff_reader: None,
is_complete: is_complete,
eof: false,
}
}
pub fn open(&mut self) -> Result<(), WrappedError> {
let file = try!(File::open(&self.name));
self.buff_reader = Some(BufReader::new(file));
Ok(())
}
}
impl tokenizer::ExpressionReader for FileExpressionReader {
fn read(&mut self) -> Result<Vec<String>, WrappedError> {
if self.eof {
return Err(WrappedError::new("End of File"));
}
let mut buffer = Vec::new();
loop {
let mut line = String::new();
let mut rdr = self.buff_reader.as_mut().expect("");
let num_read = try!(rdr.read_line(&mut line));
buffer.push(line);
if num_read < 1 {
self.eof = true;
}
if (self.is_complete)(&buffer) {
break;
}
}
return Ok(buffer);
}
}
fn do_flags<'a>() -> clap::ArgMatches<'a> {
return App::new("nock")
.version("0.1")
.author("Jeremy Wall <jeremy@marzhillstudios.com")
.about("A simple Nock interpreter and repl")
.arg(Arg::with_name("file")
.short("f")
.long("file")
.value_name("FILE")
.help("Execute the nock file.")
.takes_value(true))
.get_matches();
}
fn is_complete_expr(lines: &Vec<String>) -> bool {
let mut count = 0;
for l in lines {
for c in l.chars() {
if c == '[' {
count += 1;
}
if c == ']' {
count -= 1;
}
}
}
return count == 0;
}
fn main() {
let matches = do_flags();
fn eval_exprs(mut nock_parser: parser::Parser) {
while let Ok(expr) = nock_parser.parse() {
match nock::compute(expr) {
Ok(noun) => println!("{}", noun),
Err(err) => println!("{}", err),
}
}
}
if let Some(filename) = matches.value_of("file") {
// parse and execute file stream.
let mut reader = FileExpressionReader::new(filename, is_complete_expr);
reader.open().expect("Failed to open file!");
let nock_parser = parser::Parser::new(Box::new(reader));
eval_exprs(nock_parser);
} else {
// parse and execute stdin.
println!("Welcome to the nock repl!");
println!("Type nock expressions at the prompt.");
println!("Ctrl-D to quit...\n");
let reader =
PromptingLineParser::new("nock> ".to_string(), "> ".to_string(), is_complete_expr);
let nock_parser = parser::Parser::new(Box::new(reader));
eval_exprs(nock_parser);
}
}
|
use cocoa::base::id;
use sys::MTLArrayType;
use {DataType, FromRaw, StructType};
pub struct ArrayType(id);
impl ArrayType {
pub fn array_length(&self) -> usize {
unsafe { self.0.arrayLength() as usize }
}
pub fn element_type(&self) -> DataType {
unsafe { self.0.elementType().into() }
}
pub fn stride(&self) -> usize {
unsafe { self.0.stride() as usize() }
}
pub fn element_array_type(&self) -> Option<Self> {
unsafe { Self::from_raw(self.0.elementArrayType()).ok() }
}
pub fn element_struct_type(&self) -> Option<StructType> {
unsafe { StructType::from_raw(self.0.elementStructType()).ok() }
}
}
impl_from_into_raw!(ArrayType, of class "MTLArrayType");
|
use super::{CombatStats, GameLog, InBackpack, Name, Player, Position, State, Viewport, Viewshed};
use rltk::{Point, Rltk, VirtualKeyCode, RGB};
use specs::prelude::*;
// ------------------------------------------------------------------------------------------------------------------ //
fn draw_inventory(world: &World, ctx: &mut Rltk, viewport: &Viewport) {
let player_entity = world.fetch::<Entity>();
let names = world.read_storage::<Name>();
let backpack = world.read_storage::<InBackpack>();
let inventory: Vec<(&InBackpack, &Name)> = (&backpack, &names)
.join()
.filter(|item| item.0.owner == *player_entity)
.collect();
let count = inventory.len() as i32;
let mut y = (viewport.map_height / 2 - count / 2) as i32;
ctx.draw_box(
15,
y - 2,
31,
(count + 3) as i32,
RGB::named(rltk::WHITE),
RGB::named(rltk::BLACK),
);
ctx.print_color(
18,
y - 2,
RGB::named(rltk::YELLOW),
RGB::named(rltk::BLACK),
"Inventory",
);
ctx.print_color(
18,
y + count as i32 + 1,
RGB::named(rltk::YELLOW),
RGB::named(rltk::BLACK),
"ESCAPE to cancel",
);
let mut j = 0;
for (_pack, name) in inventory {
ctx.set(
17,
y,
RGB::named(rltk::WHITE),
RGB::named(rltk::BLACK),
rltk::to_cp437('('),
);
ctx.set(
18,
y,
RGB::named(rltk::YELLOW),
RGB::named(rltk::BLACK),
97 + j as rltk::FontCharType,
);
ctx.set(
19,
y,
RGB::named(rltk::WHITE),
RGB::named(rltk::BLACK),
rltk::to_cp437(')'),
);
ctx.print(21, y, &name.name.to_string());
y += 1;
j += 1;
}
}
// ------------------------------------------------------------------------------------------------------------------ //
pub fn draw_ui(world: &World, ctx: &mut Rltk, viewport: &Viewport, show_inventory: bool) {
// draw border
ctx.draw_box(
0,
viewport.map_height,
viewport.map_width - 1,
viewport.log_height - 1,
RGB::named(rltk::WHITE),
RGB::named(rltk::BLACK),
);
// draw player health
let combat_stats = world.read_storage::<CombatStats>();
let players = world.read_storage::<Player>();
for (index, info) in (&players, &combat_stats).join().enumerate() {
let (_player, stats) = info;
let health = format!("HP: {} / {}", stats.hp, stats.max_hp);
let vert_offset = (index * 1) as i32;
ctx.print_color(
12,
viewport.map_height + vert_offset,
RGB::named(rltk::YELLOW),
RGB::named(rltk::BLACK),
&health,
);
ctx.draw_bar_horizontal(
28,
viewport.map_height + vert_offset,
51,
stats.hp,
stats.max_hp,
RGB::named(rltk::RED),
RGB::named(rltk::BLACK),
);
}
// draw log
let log = world.fetch::<GameLog>();
let y = viewport.map_height + 2;
for (i, s) in log.entries.iter().rev().enumerate() {
let yoff = y + (i as i32);
if yoff < viewport.map_height + viewport.log_height - 1 {
ctx.print(2, yoff, s);
}
}
if show_inventory {
draw_inventory(world, ctx, viewport);
}
}
// ------------------------------------------------------------------------------------------------------------------ //
#[derive(PartialEq, Copy, Clone)]
pub enum ItemMenuResult {
Cancel,
NoResponse,
Selected(Entity),
}
// ------------------------------------------------------------------------------------------------------------------ //
#[derive(PartialEq, Copy, Clone)]
pub enum ItemTargetingResult {
Cancel,
NoResponse,
Targeted(Point),
}
// ------------------------------------------------------------------------------------------------------------------ //
pub fn menu_inventory(gs: &mut State, ctx: &mut Rltk) -> ItemMenuResult {
let world = &gs.ecs;
let player_entity = world.fetch::<Entity>();
let backpack = world.read_storage::<InBackpack>();
let entities = world.entities();
let inventory: Vec<(Entity, &InBackpack)> = (&entities, &backpack)
.join()
.filter(|item| item.1.owner == *player_entity)
.collect();
let count = inventory.len() as i32;
match ctx.key {
None => ItemMenuResult::NoResponse,
Some(key) => match key {
VirtualKeyCode::Escape => ItemMenuResult::Cancel,
_ => {
let selection = rltk::letter_to_option(key);
if selection > -1 && selection < count {
ItemMenuResult::Selected(inventory[selection as usize].0)
} else {
ItemMenuResult::NoResponse
}
}
},
}
}
// ------------------------------------------------------------------------------------------------------------------ //
pub fn ranged_target(gs: &mut State, ctx: &mut Rltk, range: i32) -> ItemTargetingResult {
let player_entity = gs.ecs.fetch::<Entity>();
let player_pos = gs.ecs.fetch::<Point>();
let viewsheds = gs.ecs.read_storage::<Viewshed>();
ctx.print_color(
5,
0,
RGB::named(rltk::YELLOW),
RGB::named(rltk::BLACK),
"Select Target:",
);
// highlight available target cells
let mut available_cells = Vec::new();
let visible = viewsheds.get(*player_entity);
if let Some(visible) = visible {
// we have a viewshed
for visible_p in visible.visible_tiles.iter() {
let distance = rltk::DistanceAlg::Pythagoras.distance2d(*player_pos, *visible_p);
if distance <= range as f32 {
ctx.set_bg(visible_p.x, visible_p.y, RGB::named(rltk::BLUE));
available_cells.push(visible_p);
}
}
} else {
return ItemTargetingResult::Cancel;
}
// draw mouse cursor
ItemTargetingResult::NoResponse
}
|
use actix_web::{web, HttpRequest, HttpResponse, Responder};
use identity::application::role::GetAll;
use crate::authorization::auth;
use crate::container::Container;
use crate::error::PublicError;
async fn get_all(req: HttpRequest, c: web::Data<Container>) -> impl Responder {
let auth_id = auth(&req, &c).await?;
GetAll::new(c.identity.role_repo(), c.identity.user_repo())
.exec(auth_id)
.await
.map(|res| HttpResponse::Ok().json(res))
.map_err(PublicError::from)
}
pub fn routes(cfg: &mut web::ServiceConfig) {
cfg.service(web::scope("/roles").route("", web::get().to(get_all)));
}
|
use actix_web::{App, HttpServer};
use server::db;
use server::routes;
#[actix_web::main]
async fn main() -> std::io::Result<()> {
let pool = db::create_pool();
HttpServer::new(move || {
App::new()
.data(pool.clone())
.configure(routes::room::add_route)
})
.bind("127.0.0.1:8080")?
.run()
.await
}
|
use command::Command;
use event::Event;
use json::ComponentDefinition;
use quote::Tokens;
use schema_type::ReferencedUserType;
use syn::Ident;
use to_rust_qualified_name;
pub struct Component {
name: String,
pub qualified_name: Vec<String>,
pub rust_qualified_name: String,
pub data_reference_type: ReferencedUserType,
pub events: Vec<Event>,
pub commands: Vec<Command>,
pub component_id: u32,
}
impl From<ComponentDefinition> for Component {
fn from(value: ComponentDefinition) -> Component {
let command_component_value = value.clone();
Component {
name: value.name,
component_id: value.id,
qualified_name: value
.qualifiedName
.split(".")
.map(|s| String::from(s))
.collect(),
rust_qualified_name: to_rust_qualified_name(value.qualifiedName.as_str()),
data_reference_type: ReferencedUserType::from(value.dataDefinition),
events: {
value
.eventDefinitions
.into_iter()
.map(|event_def| Event::from(event_def))
.collect()
},
commands: {
value
.commandDefinitions
.into_iter()
.map(|command_def| {
Command::from((command_def, command_component_value.clone()))
})
.collect()
},
}
}
}
impl Component {
pub fn enum_name(&self) -> String {
format!("Component{}", self.component_id)
}
pub fn get_code(&self) -> Tokens {
let name = Ident::new(self.name.as_str());
let data_name = Ident::new(format!("{}Data", &self.name));
let update_name = Ident::new(format!("{}Update", &self.name));
let component_id = self.component_id;
let enum_name = Ident::new(self.enum_name().as_str());
let fields = &self.data_reference_type.get().unwrap().borrow().fields;
let field_definitions = fields.iter().map(|field| field.definition_in_struct(false));
let field_data_definitions = fields.iter().map(|field| field.definition_in_struct(true));
let event_data_definitions = self.events.iter().map(|event| event.definition_in_struct());
let field_serialise_from_data = fields.iter().map(|field| field.serialise_from_data());
let field_deserialise_into_data = fields.iter().map(|field| field.deserialise_into_data());
let event_initial_code = self.events.iter().map(|event| event.initial_code());
let field_update_definitions = fields.iter().map(|field| field.definition_in_update());
let event_update_definitions = self.events.iter().map(|event| event.definition_in_update());
let field_apply_from_update = fields.iter().map(|field| field.apply_from_update());
let event_apply_from_update = self.events.iter().map(|event| event.apply_from_update());
let field_serialise_from_dirty_data =
fields.iter().map(|field| field.serialise_from_dirty_data());
let event_serialise_from_dirty_data = self.events
.iter()
.map(|event| event.serialise_from_dirty_data());
let field_deserialise_into_update =
fields.iter().map(|field| field.deserialise_into_update());
let event_deserialise_into_update = self.events
.iter()
.map(|event| event.deserialise_into_update());
let snapshot_to_data_fields = fields.iter().map(|field| field.snapshot_to_data());
let snapshot_event_initial_code = self.events.iter().map(|event| event.initial_code());
let clear_events = self.events.iter().map(|event| event.clear_events_code());
let contains_events = self.events.iter().map(|event| event.contains_events_code());
let command_getters = self.commands.iter().map(|command| command.getter_code());
let command_code = self.commands.iter().map(|command| command.get_code());
quote!{
#[allow(dead_code, unused_variables)]
#[derive(Default)]
pub struct #name{
#(#field_definitions,)*
}
#[allow(dead_code, unused_variables)]
impl #name {
#(#command_getters)*
}
#(#command_code)*
#[allow(dead_code, unused_variables)]
impl Component<Schema> for #name {
type Data = #data_name;
type Update = #update_name;
fn component_id() -> ComponentId {
#component_id
}
fn apply_update_to_data(data: &mut Self::Data, update: &Self::Update) {
data.apply_update(update);
}
fn extract_data_borrow(data: &<Schema as GeneratedSchema>::ComponentData)
-> Option<&Self::Data> {
match data {
&ComponentData::#enum_name(ref data) => Some(data),
_ => None
}
}
fn extract_data(data: <Schema as GeneratedSchema>::ComponentData)
-> Option<Self::Data> {
match data {
ComponentData::#enum_name(data) => Some(data),
_ => None
}
}
fn extract_update(update: &<Schema as GeneratedSchema>::ComponentUpdate)
-> Option<&Self::Update> {
match update {
&ComponentUpdate::#enum_name(ref update) => Some(update),
_ => None
}
}
fn serialise_snapshot(self) -> Box<ffi::Schema_ComponentData> {
let data = #data_name {
is_dirty: false,
#(#snapshot_to_data_fields,)*
#(#snapshot_event_initial_code,)*
};
data.serialise_data()
}
}
#[allow(dead_code, unused_variables)]
#[derive(Clone, Debug)]
pub struct #update_name {
#(#field_update_definitions,)*
#(#event_update_definitions,)*
}
#[allow(dead_code, unused_variables)]
#[derive(Clone, Debug, Default)]
pub struct #data_name {
is_dirty: bool,
#(#field_data_definitions,)*
#(#event_data_definitions,)*
}
#[allow(dead_code, unused_variables)]
impl #data_name {
pub unsafe fn deserialise(object: *mut Schema_Object) -> #data_name {
#data_name {
is_dirty: false,
#(#field_deserialise_into_data,)*
#(#event_initial_code,)*
}
}
pub unsafe fn serialise(&self, object: *mut Schema_Object) {
#(#field_serialise_from_data;)*
}
}
#[allow(dead_code, unused_variables)]
impl #data_name {
pub fn apply_update(&mut self, update: &#update_name) {
#(#field_apply_from_update;)*
#(#event_apply_from_update;)*
}
}
#[allow(dead_code, unused_variables)]
impl ComponentDataInterface<Schema> for #data_name {
fn deserialise_data(data: Box<ffi::Schema_ComponentData>)
-> <Schema as GeneratedSchema>::ComponentData {
unsafe {
let data_raw = Box::into_raw(data);
let fields = ffi::Schema_GetComponentDataFields(data_raw);
Box::from_raw(data_raw);
ComponentData::#enum_name(#data_name::deserialise(fields))
}
}
fn serialise_data(&self) -> Box<ffi::Schema_ComponentData> {
unsafe {
let data = ffi::Schema_CreateComponentData(#name::component_id());
let fields = ffi::Schema_GetComponentDataFields(data);
self.serialise(fields);
Box::from_raw(data)
}
}
fn serialise_update(&mut self) -> Box<ffi::Schema_ComponentUpdate> {
unsafe {
let update = ffi::Schema_CreateComponentUpdate(#name::component_id());
let fields = ffi::Schema_GetComponentUpdateFields(update);
let events = ffi::Schema_GetComponentUpdateEvents(update);
#(#field_serialise_from_dirty_data)*
#(#event_serialise_from_dirty_data)*
Box::from_raw(update)
}
}
fn make_dirty(&mut self) {
self.is_dirty = true;
}
fn get_and_clear_dirty_bit(&mut self) -> bool {
let dirty = self.is_dirty;
self.is_dirty = false;
dirty
}
fn cleanup_after_frame(&mut self) {
#(#clear_events;)*
}
}
#[allow(dead_code, unused_variables)]
impl ComponentUpdateInterface<Schema> for #update_name {
fn deserialise_update(update_box: Box<ffi::Schema_ComponentUpdate>)
-> <Schema as GeneratedSchema>::ComponentUpdate {
unsafe {
let update = Box::into_raw(update_box);
let fields = ffi::Schema_GetComponentUpdateFields(update);
Box::from_raw(update);
ComponentUpdate::#enum_name(#update_name {
#(#field_deserialise_into_update,)*
#(#event_deserialise_into_update,)*
})
}
}
fn contains_events(&self) -> bool {
#(#contains_events ||)* false
}
}
}
}
}
|
// Copyright 2019-2020 Parity Technologies (UK) Ltd.
// This file is part of Substrate.
// Substrate is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Substrate is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Substrate. If not, see <http://www.gnu.org/licenses/>.
//! Uncles functionality for Substrate.
#![forbid(unsafe_code, missing_docs)]
use log::warn;
use sc_client_api::ProvideUncles;
use sp_authorship;
use sp_consensus::SelectChain;
use sp_inherents::InherentDataProviders;
use sp_runtime::traits::{Block as BlockT, Header};
use std::sync::Arc;
/// Maximum uncles generations we may provide to the runtime.
const MAX_UNCLE_GENERATIONS: u32 = 8;
/// Register uncles inherent data provider, if not registered already.
pub fn register_uncles_inherent_data_provider<B, C, SC>(
client: Arc<C>,
select_chain: SC,
inherent_data_providers: &InherentDataProviders,
) -> Result<(), sp_consensus::Error>
where
B: BlockT,
C: ProvideUncles<B> + Send + Sync + 'static,
SC: SelectChain<B> + 'static,
{
if !inherent_data_providers.has_provider(&sp_authorship::INHERENT_IDENTIFIER) {
inherent_data_providers
.register_provider(sp_authorship::InherentDataProvider::new(move || {
let chain_head = match select_chain.best_chain() {
Ok(x) => x,
Err(e) => {
warn!(target: "uncles", "Unable to get chain head: {:?}", e);
return Vec::new()
},
};
match client.uncles(chain_head.hash(), MAX_UNCLE_GENERATIONS.into()) {
Ok(uncles) => uncles,
Err(e) => {
warn!(target: "uncles", "Unable to get uncles: {:?}", e);
Vec::new()
},
}
}))
.map_err(|err| sp_consensus::Error::InherentData(err.into()))?;
}
Ok(())
}
|
pub mod common;
pub mod pages;
mod shared;
mod util;
|
#[macro_use]
extern crate actix_web;
mod comp;
mod db;
mod error;
mod field;
mod input;
mod insert;
mod model;
mod query;
mod utils;
mod view;
use comp::{Comp, CompBox, DataTable, Doc};
use db::Db;
use field::{BoolField, DateTimeField, IdField, TextField};
use indexmap::IndexMap;
use insert::Insert;
use model::{Model, Models};
use query::Query;
use serde::{Deserialize, Serialize};
use std::sync::Arc;
use utils::{DateTimeVal, html_resp, json_resp};
use std::path::PathBuf;
use actix_web::{
http::StatusCode,
middleware,
web::{self, Data},
App, Error, HttpResponse, HttpServer, ResponseError,
};
use actix_files as fs;
impl ResponseError for error::Error {
fn error_response(&self) -> HttpResponse {
HttpResponse::InternalServerError().body(format!("{}", self))
}
}
fn define_models() -> Models {
let mut models = Models::new();
models.add(
Model::new("key_values")
.add_field(TextField::primary("key"))
.add_field(TextField::text("value")),
);
models.add(
Model::new("error")
.add_field(IdField::id())
.add_field(TextField::text("request_url"))
.add_field(TextField::text("ip"))
.add_field(TextField::text("user_agent"))
.add_field(TextField::text("referrer"))
.add_field(TextField::text("message"))
.add_field(DateTimeField::date("time"))
.add_field(BoolField::boolean("on_client"))
.add_field(TextField::text("file"))
.add_field(TextField::text("line"))
.add_field(TextField::text("col"))
.add_field(TextField::text("details")),
);
models.add(
Model::new("visit")
.add_field(IdField::id())
.add_field(TextField::text("request_url"))
.add_field(TextField::text("ip"))
.add_field(TextField::text("user_agent"))
.add_field(TextField::text("referrer"))
.add_field(DateTimeField::date("time"))
.add_field(TextField::text("details")),
);
models
}
#[get("/visits")]
async fn visits(
data: Data<AppData>,
) -> Result<HttpResponse, Error> {
let db = Db::conn(data.models.clone())?;
let mut query = Query::new("visit");
query.select(vec![
"time",
"request_url",
"ip",
"user_agent",
"referrer",
"details",
]);
query.order_by("id", false);
query.limit(500);
let comp = CompBox::new(
Box::new(Doc::new(vec!["/assets/style.css".into()])),
vec![CompBox::new(Box::new(DataTable { query }), Vec::new())],
);
let html = CompBox::do_html(&comp, &db)?;
Ok(html_resp(html))
}
#[get("/errors")]
async fn errors(
data: Data<AppData>,
) -> Result<HttpResponse, Error> {
let db = Db::conn(data.models.clone())?;
let mut query = Query::new("error");
query.select(vec![
"time",
"request_url",
"ip",
"user_agent",
"referrer",
"message",
"on_client",
"file",
"line",
"col",
"details",
]);
query.order_by("id", false);
query.limit(500);
let comp = CompBox::new(
Box::new(Doc::new(vec!["/assets/style.css".into()])),
vec![CompBox::new(Box::new(DataTable { query }), Vec::new())],
);
let html = CompBox::do_html(&comp, &db)?;
Ok(html_resp(html))
}
#[derive(Deserialize)]
pub struct InsertJson {
model: String,
inputs: IndexMap<String, serde_json::Value>,
}
#[derive(Serialize)]
pub struct InsertResponse {
success: bool,
}
impl InsertResponse {
pub fn success() -> Self {
InsertResponse { success: true }
}
}
#[post("/insert")]
async fn insert_req(
data: Data<AppData>,
input: web::Json<InsertJson>,
) -> Result<HttpResponse, Error> {
let model = data.models.get(&input.model)?;
let mut insert = Insert::new(&input.model);
for (key, value) in &input.inputs {
let field = model.get_field(key)?;
insert.value(key, field.json_to_input(value)?);
}
let db = Db::conn(data.models.clone())?;
insert.execute(&db)?;
Ok(json_resp(InsertResponse::success()))
}
fn init_db(models: Arc<Models>) -> error::Result<()> {
let db = Db::conn(models)?;
db.create_tables()?;
/*
let mut q = Insert::new("visit");
q.value("request_url", "/")
.value("ip", "127.0.0.1")
.value("user_agent", "Mozilla")
.value("referrer", "http://google.com")
.value(
"time",
DateTimeVal::try_from("2019-12-31 12:00:12")?,
)
.value("details", "Details");
q.execute(&db)?;*/
Ok(())
}
#[get("/assets/style.css")]
async fn style(data: Data<AppData>) -> Result<fs::NamedFile, Error> {
let assets = std::path::Path::new(&data.assets);
Ok(fs::NamedFile::open(assets.join("style.css"))?)
}
/// This is a big to public
#[get("/reset-db")]
async fn reset_db(data: Data<AppData>) -> Result<HttpResponse, Error> {
Db::reset_db()?;
init_db(data.models.clone())?;
Ok(html_resp("Reset db".into()))
}
pub struct AppData {
models: Arc<Models>,
assets: PathBuf
}
#[actix_rt::main]
async fn main() -> std::io::Result<()> {
std::env::set_var("RUST_LOG", "actix_web=info");
env_logger::init();
let models = Arc::new(define_models());
init_db(models.clone()).unwrap();
// Assets folder
let assets = PathBuf::from("/var/lib/log_server/assets");
HttpServer::new(move || {
App::new()
.app_data(Data::new(AppData {
models: models.clone(),
assets: assets.clone()
}))
// Always register last
.wrap(middleware::Logger::default())
.service(style)
.service(visits)
.service(errors)
.service(insert_req)
.service(reset_db)
})
.bind("0.0.0.0:7006")?
.run()
.await
}
|
use std::fs::OpenOptions;
use std::io::Write;
use std::sync::{Arc, Mutex};
use std::thread;
use std::time::{Duration, SystemTime};
use rppal::gpio::{Gpio, Level, Mode};
use libdriver::{api, util};
use libutil::SoftPwm;
use crate::{Error, Result};
// sensor input pins in BCM numbering
const GPIO_IR_L: u8 = 4;
const GPIO_IR_R: u8 = 17;
const GPIO_LINE_L: u8 = 5;
const GPIO_LINE_R: u8 = 27;
const GPIO_SONAR: u8 = 20;
const SOUND_SPEED: u32 = 343000; // in mm/s
// motors control pins in BCM numbering
const GPIO_MOTOR_L1: u8 = 16;
const GPIO_MOTOR_L2: u8 = 19;
const GPIO_MOTOR_R1: u8 = 13;
const GPIO_MOTOR_R2: u8 = 12;
// pan/tilt servo control pins in BCM numbering
#[allow(dead_code)]
const GPIO_PAN_SERVO: u8 = 25;
#[allow(dead_code)]
const GPIO_TILT_SERVO: u8 = 24;
// pan limits
const PAN_L_CUT_DEGREES: i16 = 90;
const PAN_R_CUT_DEGREES: i16 = -90;
const PAN_C_DEGREES: i16 = 0;
const PAN_L_CUT_PWIDTH: i16 = 220;
const PAN_R_CUT_PWIDTH: i16 = 55;
const PAN_C_PWIDTH: i16 = 138;
// tilt limits
const TILT_U_CUT_DEGREES: i16 = -90;
const TILT_D_CUT_DEGREES: i16 = 80;
const TILT_C_DEGREES: i16 = 0;
const TILT_U_CUT_PWIDTH: i16 = 65;
const TILT_D_CUT_PWIDTH: i16 = 210;
const TILT_C_PWIDTH: i16 = 138;
// Servoblaster control
const SERVOBLASTER: &str = "/extdev/servoblaster";
pub struct RobohatRover {
gpio: Arc<Mutex<Gpio>>,
left_motor: (SoftPwm, SoftPwm),
right_motor: (SoftPwm, SoftPwm),
}
impl RobohatRover {
pub fn new() -> Result<RobohatRover> {
let gpio = Arc::new(Mutex::new(Gpio::new()?));
{
let mut g = gpio.lock().unwrap();
g.set_mode(GPIO_IR_L, Mode::Input);
g.set_mode(GPIO_IR_R, Mode::Input);
g.set_mode(GPIO_LINE_L, Mode::Input);
g.set_mode(GPIO_LINE_R, Mode::Input);
}
let left_motor = (
SoftPwm::new(Arc::clone(&gpio), GPIO_MOTOR_L1, 10.0, 0.0),
SoftPwm::new(Arc::clone(&gpio), GPIO_MOTOR_L2, 10.0, 0.0),
);
let right_motor = (
SoftPwm::new(Arc::clone(&gpio), GPIO_MOTOR_R1, 10.0, 0.0),
SoftPwm::new(Arc::clone(&gpio), GPIO_MOTOR_R2, 10.0, 0.0),
);
Ok(RobohatRover {
gpio,
left_motor,
right_motor,
})
}
fn set_motor_speed(motor: &mut (SoftPwm, SoftPwm), speed: u8, forward: bool) -> Result<()> {
let frequency = 100_f32;
let duty_cycle = speed as f32 / 255.0;
if speed == 0 {
motor.0.set_duty_cycle(0.0)?;
motor.1.set_duty_cycle(0.0)?;
} else if forward {
motor.0.set_duty_cycle(duty_cycle)?;
motor.0.set_frequency(frequency)?;
motor.1.set_duty_cycle(0.0)?;
} else {
motor.0.set_duty_cycle(0.0)?;
motor.1.set_frequency(frequency)?;
motor.1.set_duty_cycle(duty_cycle)?;
}
Ok(())
}
fn map_degrees_to_pulse_width(h: i16, v: i16) -> (i16, i16) {
let deg_to_pw = |deg: i16,
e1_deg: i16,
e2_deg: i16,
mid_deg: i16,
e1_pw: i16,
e2_pw: i16,
mid_pw: i16|
-> i16 {
let deg_lo = e1_deg.min(e2_deg);
let deg_hi = e1_deg.max(e2_deg);
let deg_span = deg_hi - deg_lo;
let pw_lo = e1_pw.min(e2_pw);
let pw_hi = e1_pw.max(e2_pw);
let pw_span = pw_hi - pw_lo;
let cvt_coef = pw_span as f32 / deg_span as f32;
let pw = mid_pw as f32 + ((deg - mid_deg) as f32 * cvt_coef);
if pw > pw_hi as f32 {
pw_hi
} else if pw < pw_lo as f32 {
pw_lo
} else {
pw.round() as i16
}
};
let pan_pw = deg_to_pw(
h,
PAN_R_CUT_DEGREES,
PAN_L_CUT_DEGREES,
PAN_C_DEGREES,
PAN_R_CUT_PWIDTH,
PAN_L_CUT_PWIDTH,
PAN_C_PWIDTH,
);
let tilt_pw = deg_to_pw(
v,
TILT_D_CUT_DEGREES,
TILT_U_CUT_DEGREES,
TILT_C_DEGREES,
TILT_D_CUT_PWIDTH,
TILT_U_CUT_PWIDTH,
TILT_C_PWIDTH,
);
(pan_pw, tilt_pw)
}
}
impl api::Mover for RobohatRover {
type Error = Error;
fn stop(&mut self) -> Result<()> {
RobohatRover::set_motor_speed(&mut self.left_motor, 0, false)?;
RobohatRover::set_motor_speed(&mut self.right_motor, 0, false)?;
Ok(())
}
fn move_forward(&mut self, speed: u8) -> Result<()> {
RobohatRover::set_motor_speed(&mut self.left_motor, speed, true)?;
RobohatRover::set_motor_speed(&mut self.right_motor, speed, true)?;
Ok(())
}
fn move_backward(&mut self, speed: u8) -> Result<()> {
RobohatRover::set_motor_speed(&mut self.left_motor, speed, false)?;
RobohatRover::set_motor_speed(&mut self.right_motor, speed, false)?;
Ok(())
}
fn spin_right(&mut self, speed: u8) -> Result<()> {
RobohatRover::set_motor_speed(&mut self.left_motor, speed, true)?;
RobohatRover::set_motor_speed(&mut self.right_motor, speed, false)?;
Ok(())
}
fn spin_left(&mut self, speed: u8) -> Result<()> {
RobohatRover::set_motor_speed(&mut self.left_motor, speed, false)?;
RobohatRover::set_motor_speed(&mut self.right_motor, speed, true)?;
Ok(())
}
fn reset(&mut self) -> Result<()> {
self.stop()
}
}
impl api::Looker for RobohatRover {
type Error = Error;
fn look_at(&mut self, h: i16, v: i16) -> Result<()> {
let (hpw, vpw) = RobohatRover::map_degrees_to_pulse_width(h, v);
// println!("Converted coordinates: [{}; {}]", hpw, vpw);
let mut servo_ctl = OpenOptions::new()
.write(true)
.open(SERVOBLASTER)
.expect("Failed to open Servoblaster device.");
servo_ctl.write(format!("7={}\n", hpw).as_bytes())?;
servo_ctl.write(format!("6={}\n", vpw).as_bytes())?;
Ok(())
}
}
impl api::Sensor for RobohatRover {
type Error = Error;
fn get_obstacles(&self) -> Result<Vec<bool>> {
let gpio = self.gpio.lock().unwrap();
Ok(vec![
gpio.read(GPIO_IR_L)? == Level::Low,
gpio.read(GPIO_IR_R)? == Level::Low,
])
}
fn get_lines(&self) -> Result<Vec<bool>> {
let gpio = self.gpio.lock().unwrap();
Ok(vec![
gpio.read(GPIO_LINE_L)? == Level::Low,
gpio.read(GPIO_LINE_R)? == Level::Low,
])
}
fn scan_distance(&mut self) -> Result<f32> {
let mut gpio = self.gpio.lock().unwrap();
gpio.set_mode(GPIO_SONAR, Mode::Output);
gpio.write(GPIO_SONAR, Level::High);
thread::sleep(Duration::from_micros(10));
gpio.write(GPIO_SONAR, Level::Low);
gpio.set_mode(GPIO_SONAR, Mode::Input);
let timeout = Duration::from_millis(100);
let mut timeout_guard = SystemTime::now();
let mut pulse_start = timeout_guard.clone();
while gpio.read(GPIO_SONAR)? == Level::Low && timeout_guard.elapsed()? < timeout {
pulse_start = SystemTime::now();
}
timeout_guard = SystemTime::now();
let mut pulse_end = timeout_guard.clone();
while gpio.read(GPIO_SONAR)? == Level::High && timeout_guard.elapsed()? < timeout {
pulse_end = SystemTime::now();
}
let pulse_width = pulse_end.duration_since(pulse_start)?;
let pulse_width_f32 =
pulse_width.as_secs() as f32 + pulse_width.subsec_nanos() as f32 / 1000000000.0;
let distance = SOUND_SPEED as f32 * pulse_width_f32;
Ok(distance / 2.0)
}
}
impl util::splittable::SplittableRover for RobohatRover {}
|
/// Struct to represent an Autonomous System
#[derive(Default, Clone, PartialEq, Eq, Hash, Debug)]
pub struct Asn {
/// Autonomous System number
pub number: u32,
/// Autonomous System name
pub name: String,
}
|
pub use crate::rand::*;
pub use crate::test_vectors::*;
pub use crate::*;
// re-export serde and file IO
pub use serde::{self, de::DeserializeOwned, Deserialize, Serialize};
pub use serde_json::Value;
pub use std::fs::File;
pub use std::io::{prelude::*, BufReader};
|
use std::net::{IpAddr, Ipv4Addr, SocketAddr};
use hyper::{Request, Response, StatusCode, Body};
use hyper::server::conn::AddrStream;
use hyper::service::{make_service_fn, service_fn_ok};
use hyper::server::Server;
use hyper::rt::Future;
use super::router::Router;
use super::builder::BuilderWithHandlers;
#[derive(Debug, Clone)]
pub struct ServerOptions {
pub port: u16,
pub cache_max_age: i32,
pub ip: IpAddr,
}
impl Default for ServerOptions {
fn default() -> Self {
Self {
// cache 3 days
cache_max_age: 24 * 3600 * 3,
port: 7070,
ip: IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)),
}
}
}
pub fn serve_http(build: BuilderWithHandlers, options: ServerOptions) {
let addr = SocketAddr::new(options.ip, options.port);
let service = make_service_fn(move |_: &AddrStream| {
let router = Router::new(build.clone(), options.clone());
service_fn_ok(move |req: Request<Body>| {
match router.route(req) {
Ok(router_response) => router_response.response(),
Err(error) => {
eprintln!("service error: {:?}", error);
let mut response = Response::new(Body::empty());
*response.status_mut() = StatusCode::INTERNAL_SERVER_ERROR;
response
}
}
})
});
let server = Server::bind(&addr)
.serve(service)
.map_err(|e| eprintln!("server error: {}", e));
println!("Running on: {}", addr);
hyper::rt::run(server)
}
pub fn serve_serverless(
req: now_lambda::Request, build: BuilderWithHandlers, options: ServerOptions
) -> Result<impl now_lambda::IntoResponse, now_lambda::error::NowError> {
let router = Router::new(build, options);
match router.route(req) {
Ok(router_response) => Ok(router_response),
Err(error) => {
let error_message = format!("service error: {:?}", error);
eprintln!("{}", error_message);
Err(now_lambda::error::NowError::new(&error_message))
}
}
}
|
//! Defining a custom asset and format.
extern crate amethyst_assets;
extern crate futures;
extern crate rayon;
use std::str::{Utf8Error, from_utf8};
use std::sync::Arc;
use amethyst_assets::*;
use rayon::{Configuration, ThreadPool};
#[derive(Clone, Debug)]
struct DummyAsset(String);
impl Asset for DummyAsset {
type Context = DummyContext;
}
struct DummyContext(&'static str);
impl Context for DummyContext {
type Result = Result<DummyAsset, NoError>;
type Asset = DummyAsset;
type Data = String;
type Error = NoError;
fn category(&self) -> &str {
"dummy"
}
fn create_asset(&self, mut data: String, _: &ThreadPool) -> Result<DummyAsset, Self::Error> {
data.insert_str(0, self.0);
Ok(DummyAsset(data))
}
fn update(&self, _spec: &AssetSpec, _asset: AssetFuture<Self::Asset>) {
unimplemented!()
}
}
struct DummyFormat;
impl Format for DummyFormat {
const EXTENSIONS: &'static [&'static str] = &["dum"];
type Result = Result<String, Utf8Error>;
type Data = String;
type Error = Utf8Error;
fn parse(&self, bytes: Vec<u8>, _: &ThreadPool) -> Self::Result {
from_utf8(bytes.as_slice()).map(|s| s.to_owned())
}
}
fn main() {
use futures::Future;
let path = format!("{}/examples/assets", env!("CARGO_MANIFEST_DIR"));
let cfg = Configuration::new().num_threads(8);
let pool = Arc::new(ThreadPool::new(cfg).expect("Invalid config"));
let mut loader = Loader::new(&path, pool);
loader.register(DummyContext(">> "));
let dummy = loader.load("whatever", DummyFormat);
let dummy: DummyAsset = dummy.wait().expect("Failed to load dummy asset");
println!("dummy: {:?}", dummy);
}
|
#[doc = "Reader of register ETZPC_HWCFGR"]
pub type R = crate::R<u32, super::ETZPC_HWCFGR>;
#[doc = "Reader of field `NUM_TZMA`"]
pub type NUM_TZMA_R = crate::R<u8, u8>;
#[doc = "Reader of field `NUM_PER_SEC`"]
pub type NUM_PER_SEC_R = crate::R<u8, u8>;
#[doc = "Reader of field `NUM_AHB_SEC`"]
pub type NUM_AHB_SEC_R = crate::R<u8, u8>;
#[doc = "Reader of field `CHUNKS1N4`"]
pub type CHUNKS1N4_R = crate::R<u8, u8>;
impl R {
#[doc = "Bits 0:7 - NUM_TZMA"]
#[inline(always)]
pub fn num_tzma(&self) -> NUM_TZMA_R {
NUM_TZMA_R::new((self.bits & 0xff) as u8)
}
#[doc = "Bits 8:15 - NUM_PER_SEC"]
#[inline(always)]
pub fn num_per_sec(&self) -> NUM_PER_SEC_R {
NUM_PER_SEC_R::new(((self.bits >> 8) & 0xff) as u8)
}
#[doc = "Bits 16:23 - NUM_AHB_SEC"]
#[inline(always)]
pub fn num_ahb_sec(&self) -> NUM_AHB_SEC_R {
NUM_AHB_SEC_R::new(((self.bits >> 16) & 0xff) as u8)
}
#[doc = "Bits 24:31 - CHUNKS1N4"]
#[inline(always)]
pub fn chunks1n4(&self) -> CHUNKS1N4_R {
CHUNKS1N4_R::new(((self.bits >> 24) & 0xff) as u8)
}
}
|
fn main() {
// let mut arr = [18, 36, 2, 69, 42, 80, 90, 83, 61, 70, 37, 67, 84, 90, 58, 10, 23, 42, 62, 48, 49, 58, 86, 1, 65, 20, 20, 26, 91, 8, 50, 28, 9, 59, 76, 96, 49, 54, 55, 8, 20, 90, 64, 81, 53, 75, 52, 8, 64, 52, 63, 39, 5, 83, 81, 2, 36, 41, 66, 81, 87, 36, 84, 8, 5, 27, 32, 46, 80, 62, 94, 39, 99, 15, 81, 96, 74, 78, 78, 11, 30, 8, 18, 70, 56, 5, 42, 10, 2, 50, 6, 66, 69, 9, 58, 73, 43, 79, 47, 67];
let mut arr = [9, 4, 13, 2, 22, 17, 8, 9, 10];
let end = arr.len() - 1;
quicksort(&mut arr[..], 0, end);
println!("Sorted array:");ZZ
print_arr(&arr[..]);
}
fn quicksort(arr: &mut[i32], start:usize, end:usize) {
if start > end {
return;
}
println!("quicksorting");
println!("start {} end {}", start, end);
let p = partition(arr, start, end);
print_arr(&arr[..]);
println!("partition {}", p);
if p == 0 {
return;
}
quicksort(arr, start, p-1);
quicksort(arr, p+1, end);
}
fn partition(arr: &mut[i32], start:usize, end:usize) -> usize {
println!("partitioning");
if end <= start {
return end;
}
let pivot = end;
let mut i = start;
println!("pivot: arr[{}]: {}", pivot, arr[pivot]);
for j in start..end {
println!("j: arr[{}]: {}", j, arr[j]);
if arr[j] <= arr[pivot] {
arr.swap(i, j);
i += 1;
}
}
arr.swap(i, end);
return i;
// return end;
}
fn print_arr(a: &[i32]) -> () {
for i in a {
print!("{} ", i);
}
println!("");
}
|
use core::ops;
use proc_macro2::TokenStream;
use quote::ToTokens;
use syn::{
parse::{Parse, ParseStream},
Fields, Ident, ItemEnum, Result, Type,
};
/// A structure to make trait implementation to enums more efficient.
pub struct EnumData {
repr: ItemEnum,
field_types: Vec<Type>,
}
impl EnumData {
/// Returns an iterator over field types.
///
/// ```text
/// enum Enum<TypeA, TypeB> {
/// VariantA(TypeA),
/// ^^^^^
/// VariantB(TypeB),
/// ^^^^^
/// }
/// ```
pub fn field_types(&self) -> impl ExactSizeIterator<Item = &Type> + Clone {
self.field_types.iter()
}
/// Returns an iterator over variant names.
///
/// ```text
/// enum Enum<TypeA, TypeB> {
/// VariantA(TypeA),
/// ^^^^^^^^
/// VariantB(TypeB),
/// ^^^^^^^^
/// }
/// ```
pub fn variant_idents(&self) -> impl ExactSizeIterator<Item = &Ident> + Clone {
self.variants.iter().map(|v| &v.ident)
}
}
impl ops::Deref for EnumData {
type Target = ItemEnum;
fn deref(&self) -> &Self::Target {
&self.repr
}
}
impl From<EnumData> for ItemEnum {
fn from(other: EnumData) -> Self {
other.repr
}
}
impl Parse for EnumData {
fn parse(input: ParseStream<'_>) -> Result<Self> {
let item: ItemEnum = input.parse()?;
if item.variants.is_empty() {
bail!(item, "may not be used on enums without variants");
}
let field_types = item.variants.iter().try_fold(
Vec::with_capacity(item.variants.len()),
|mut field_types, v| {
if let Some((_, e)) = &v.discriminant {
bail!(e, "may not be used on enums with discriminants");
}
if v.fields.is_empty() {
bail!(v, "may not be used on enums with variants with zero fields");
} else if v.fields.len() != 1 {
bail!(v, "may not be used on enums with variants with multiple fields");
}
match &v.fields {
Fields::Unnamed(f) => {
field_types.push(f.unnamed.iter().next().unwrap().ty.clone());
Ok(field_types)
}
Fields::Named(_) => {
bail!(v, "may not be used on enums with variants with named fields");
}
Fields::Unit => unreachable!(),
}
},
)?;
Ok(Self { repr: item, field_types })
}
}
impl ToTokens for EnumData {
fn to_tokens(&self, tokens: &mut TokenStream) {
self.repr.to_tokens(tokens);
}
}
|
#[doc = "Reader of register DDRPERFM_CTL"]
pub type R = crate::R<u32, super::DDRPERFM_CTL>;
#[doc = "Writer for register DDRPERFM_CTL"]
pub type W = crate::W<u32, super::DDRPERFM_CTL>;
#[doc = "Register DDRPERFM_CTL `reset()`'s with value 0"]
impl crate::ResetValue for super::DDRPERFM_CTL {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Write proxy for field `START`"]
pub struct START_W<'a> {
w: &'a mut W,
}
impl<'a> START_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "Write proxy for field `STOP`"]
pub struct STOP_W<'a> {
w: &'a mut W,
}
impl<'a> STOP_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);
self.w
}
}
impl R {}
impl W {
#[doc = "Bit 0 - START"]
#[inline(always)]
pub fn start(&mut self) -> START_W {
START_W { w: self }
}
#[doc = "Bit 1 - STOP"]
#[inline(always)]
pub fn stop(&mut self) -> STOP_W {
STOP_W { w: self }
}
}
|
//! This is so nasty!
//!
//! We need to support 3DES to provide compatibility with Yubico's braindead
//! implementation of key management...
// use cortex_m_semihosting::{dbg, hprintln};
use core::convert::TryInto;
// needed to even get ::new() from des...
use des::cipher::{BlockDecrypt, BlockEncrypt, NewBlockCipher};
use crate::api::*;
use crate::error::Error;
use crate::service::*;
use crate::types::*;
#[cfg(feature = "tdes")]
impl Encrypt for super::Tdes
{
/// Encrypts a single block. Let's hope we don't have to support ECB!!
#[inline(never)]
fn encrypt(keystore: &mut impl Keystore, request: &request::Encrypt)
-> Result<reply::Encrypt, Error>
{
if request.message.len() != 8 { return Err(Error::WrongMessageLength); }
let key_id = request.key;
let symmetric_key: [u8; 24] = keystore
.load_key(key::Secrecy::Secret, None, &key_id)?
.material.as_slice().try_into()
.map_err(|_| Error::InternalError)?;
let cipher = des::TdesEde3::new(GenericArray::from_slice(&symmetric_key));
let mut message = request.message.clone();
cipher.encrypt_block(GenericArray::from_mut_slice(&mut message));
Ok(reply::Encrypt { ciphertext: message, nonce: Default::default(), tag: Default::default() })
}
}
#[cfg(feature = "tdes")]
impl Decrypt for super::Tdes
{
/// Decrypts a single block. Let's hope we don't have to support ECB!!
#[inline(never)]
fn decrypt(keystore: &mut impl Keystore, request: &request::Decrypt)
-> Result<reply::Decrypt, Error>
{
if request.message.len() != 8 { return Err(Error::WrongMessageLength); }
let key_id = request.key;
let symmetric_key: [u8; 24] = keystore
.load_key(key::Secrecy::Secret, None, &key_id)?
.material.as_slice().try_into()
.map_err(|_| Error::InternalError)?;
let cipher = des::TdesEde3::new(GenericArray::from_slice(&symmetric_key));
let mut message = request.message.clone();
cipher.decrypt_block(GenericArray::from_mut_slice(&mut message));
Ok(reply::Decrypt { plaintext: Some(message) })
}
}
|
//! Session information.
//!
//! Initially created from database client and used through server, sql-processor, and storage-engine.
//!
//! A session holds these information:
//!
//! - Open database (0/1)
//! - Beginning transaction (0/1 if a database is open; 0 if any database isn't open)
//!
//! Only storage-engine has direct access to database and transaction.
//! Other components create/modify/get database and transaction through access methods' call with session.
//!
//! Note that session is free from physical connection implementation.
//! Therefore, for example, client-server's transport is independent from Session and can be any of TCP, direct method call, and so on.
pub(crate) mod session_id;
pub(crate) mod with_db;
pub(crate) mod with_tx;
pub(crate) mod without_db;
use crate::{SessionId, SessionWithDb, SessionWithTx, SessionWithoutDb};
use serde::{Deserialize, Serialize};
/// Session types
#[derive(Hash, Debug, Serialize, Deserialize)]
pub enum Session {
/// session without open database
WithoutDb(SessionWithoutDb),
/// session with open database
WithDb(SessionWithDb),
/// session with open transaction
WithTx(SessionWithTx),
}
impl Default for Session {
fn default() -> Self {
Self::WithoutDb(SessionWithoutDb::default())
}
}
impl From<SessionWithoutDb> for Session {
fn from(s: SessionWithoutDb) -> Self {
Session::WithoutDb(s)
}
}
impl From<SessionWithDb> for Session {
fn from(s: SessionWithDb) -> Self {
Session::WithDb(s)
}
}
impl From<SessionWithTx> for Session {
fn from(s: SessionWithTx) -> Self {
Session::WithTx(s)
}
}
impl Session {
/// get session ID
pub fn get_id(&self) -> &SessionId {
match self {
Session::WithoutDb(s) => s.get_id(),
Session::WithDb(s) => s.get_id(),
Session::WithTx(s) => s.get_id(),
}
}
}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(non_camel_case_types)]
#![allow(unused_imports)]
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ClusterProperties {
#[serde(rename = "clusterId", default, skip_serializing_if = "Option::is_none")]
pub cluster_id: Option<String>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<cluster_properties::ProvisioningState>,
#[serde(rename = "isDoubleEncryptionEnabled", default, skip_serializing_if = "Option::is_none")]
pub is_double_encryption_enabled: Option<bool>,
#[serde(rename = "isAvailabilityZonesEnabled", default, skip_serializing_if = "Option::is_none")]
pub is_availability_zones_enabled: Option<bool>,
#[serde(rename = "billingType", default, skip_serializing_if = "Option::is_none")]
pub billing_type: Option<BillingType>,
#[serde(rename = "keyVaultProperties", default, skip_serializing_if = "Option::is_none")]
pub key_vault_properties: Option<KeyVaultProperties>,
#[serde(rename = "lastModifiedDate", default, skip_serializing_if = "Option::is_none")]
pub last_modified_date: Option<String>,
#[serde(rename = "createdDate", default, skip_serializing_if = "Option::is_none")]
pub created_date: Option<String>,
#[serde(rename = "associatedWorkspaces", default, skip_serializing_if = "Vec::is_empty")]
pub associated_workspaces: Vec<AssociatedWorkspace>,
#[serde(rename = "capacityReservationProperties", default, skip_serializing_if = "Option::is_none")]
pub capacity_reservation_properties: Option<CapacityReservationProperties>,
}
pub mod cluster_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ProvisioningState {
Creating,
Succeeded,
Failed,
Canceled,
Deleting,
ProvisioningAccount,
Updating,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ClusterPatchProperties {
#[serde(rename = "keyVaultProperties", default, skip_serializing_if = "Option::is_none")]
pub key_vault_properties: Option<KeyVaultProperties>,
#[serde(rename = "billingType", default, skip_serializing_if = "Option::is_none")]
pub billing_type: Option<BillingType>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ClusterPatch {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<ClusterPatchProperties>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub identity: Option<Identity>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub sku: Option<ClusterSku>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Cluster {
#[serde(flatten)]
pub tracked_resource: TrackedResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub identity: Option<Identity>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub sku: Option<ClusterSku>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<ClusterProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ClusterListResult {
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Cluster>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct KeyVaultProperties {
#[serde(rename = "keyVaultUri", default, skip_serializing_if = "Option::is_none")]
pub key_vault_uri: Option<String>,
#[serde(rename = "keyName", default, skip_serializing_if = "Option::is_none")]
pub key_name: Option<String>,
#[serde(rename = "keyVersion", default, skip_serializing_if = "Option::is_none")]
pub key_version: Option<String>,
#[serde(rename = "keyRsaSize", default, skip_serializing_if = "Option::is_none")]
pub key_rsa_size: Option<i32>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum BillingType {
Cluster,
Workspaces,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ClusterSku {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub capacity: Option<i64>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<cluster_sku::Name>,
}
pub mod cluster_sku {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Name {
CapacityReservation,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Identity {
#[serde(rename = "principalId", default, skip_serializing_if = "Option::is_none")]
pub principal_id: Option<String>,
#[serde(rename = "tenantId", default, skip_serializing_if = "Option::is_none")]
pub tenant_id: Option<String>,
#[serde(rename = "type")]
pub type_: identity::Type,
#[serde(rename = "userAssignedIdentities", default, skip_serializing_if = "Option::is_none")]
pub user_assigned_identities: Option<serde_json::Value>,
}
pub mod identity {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Type {
SystemAssigned,
UserAssigned,
None,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct UserIdentityProperties {
#[serde(rename = "principalId", default, skip_serializing_if = "Option::is_none")]
pub principal_id: Option<String>,
#[serde(rename = "clientId", default, skip_serializing_if = "Option::is_none")]
pub client_id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AssociatedWorkspace {
#[serde(rename = "workspaceId", default, skip_serializing_if = "Option::is_none")]
pub workspace_id: Option<String>,
#[serde(rename = "workspaceName", default, skip_serializing_if = "Option::is_none")]
pub workspace_name: Option<String>,
#[serde(rename = "resourceId", default, skip_serializing_if = "Option::is_none")]
pub resource_id: Option<String>,
#[serde(rename = "associateDate", default, skip_serializing_if = "Option::is_none")]
pub associate_date: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CapacityReservationProperties {
#[serde(rename = "lastSkuUpdate", default, skip_serializing_if = "Option::is_none")]
pub last_sku_update: Option<String>,
#[serde(rename = "minCapacity", default, skip_serializing_if = "Option::is_none")]
pub min_capacity: Option<i64>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Operation>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Operation {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub display: Option<operation::Display>,
}
pub mod operation {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Display {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub provider: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub resource: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub operation: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TableProperties {
#[serde(rename = "retentionInDays", default, skip_serializing_if = "Option::is_none")]
pub retention_in_days: Option<i32>,
#[serde(rename = "isTroubleshootingAllowed", default, skip_serializing_if = "Option::is_none")]
pub is_troubleshooting_allowed: Option<bool>,
#[serde(rename = "isTroubleshootEnabled", default, skip_serializing_if = "Option::is_none")]
pub is_troubleshoot_enabled: Option<bool>,
#[serde(rename = "lastTroubleshootDate", default, skip_serializing_if = "Option::is_none")]
pub last_troubleshoot_date: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Table {
#[serde(flatten)]
pub proxy_resource: ProxyResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<TableProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TablesListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Table>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct WorkspaceSku {
pub name: workspace_sku::Name,
#[serde(rename = "capacityReservationLevel", default, skip_serializing_if = "Option::is_none")]
pub capacity_reservation_level: Option<i32>,
#[serde(rename = "lastSkuUpdate", default, skip_serializing_if = "Option::is_none")]
pub last_sku_update: Option<String>,
}
pub mod workspace_sku {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Name {
Free,
Standard,
Premium,
PerNode,
#[serde(rename = "PerGB2018")]
PerGb2018,
Standalone,
CapacityReservation,
#[serde(rename = "LACluster")]
LaCluster,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct WorkspaceCapping {
#[serde(rename = "dailyQuotaGb", default, skip_serializing_if = "Option::is_none")]
pub daily_quota_gb: Option<f64>,
#[serde(rename = "quotaNextResetTime", default, skip_serializing_if = "Option::is_none")]
pub quota_next_reset_time: Option<String>,
#[serde(rename = "dataIngestionStatus", default, skip_serializing_if = "Option::is_none")]
pub data_ingestion_status: Option<workspace_capping::DataIngestionStatus>,
}
pub mod workspace_capping {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum DataIngestionStatus {
RespectQuota,
ForceOn,
ForceOff,
OverQuota,
SubscriptionSuspended,
ApproachingQuota,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct WorkspaceProperties {
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<workspace_properties::ProvisioningState>,
#[serde(rename = "customerId", default, skip_serializing_if = "Option::is_none")]
pub customer_id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub sku: Option<WorkspaceSku>,
#[serde(rename = "retentionInDays", default, skip_serializing_if = "Option::is_none")]
pub retention_in_days: Option<i32>,
#[serde(rename = "workspaceCapping", default, skip_serializing_if = "Option::is_none")]
pub workspace_capping: Option<WorkspaceCapping>,
#[serde(rename = "createdDate", default, skip_serializing_if = "Option::is_none")]
pub created_date: Option<String>,
#[serde(rename = "modifiedDate", default, skip_serializing_if = "Option::is_none")]
pub modified_date: Option<String>,
#[serde(rename = "publicNetworkAccessForIngestion", default, skip_serializing_if = "Option::is_none")]
pub public_network_access_for_ingestion: Option<PublicNetworkAccessType>,
#[serde(rename = "publicNetworkAccessForQuery", default, skip_serializing_if = "Option::is_none")]
pub public_network_access_for_query: Option<PublicNetworkAccessType>,
#[serde(rename = "forceCmkForQuery", default, skip_serializing_if = "Option::is_none")]
pub force_cmk_for_query: Option<bool>,
#[serde(rename = "privateLinkScopedResources", default, skip_serializing_if = "Vec::is_empty")]
pub private_link_scoped_resources: Vec<PrivateLinkScopedResource>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub features: Option<WorkspaceFeatures>,
}
pub mod workspace_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ProvisioningState {
Creating,
Succeeded,
Failed,
Canceled,
Deleting,
ProvisioningAccount,
Updating,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct WorkspaceFeatures {
#[serde(rename = "enableDataExport", default, skip_serializing_if = "Option::is_none")]
pub enable_data_export: Option<bool>,
#[serde(rename = "immediatePurgeDataOn30Days", default, skip_serializing_if = "Option::is_none")]
pub immediate_purge_data_on30_days: Option<bool>,
#[serde(
rename = "enableLogAccessUsingOnlyResourcePermissions",
default,
skip_serializing_if = "Option::is_none"
)]
pub enable_log_access_using_only_resource_permissions: Option<bool>,
#[serde(rename = "clusterResourceId", default, skip_serializing_if = "Option::is_none")]
pub cluster_resource_id: Option<String>,
#[serde(rename = "disableLocalAuth", default, skip_serializing_if = "Option::is_none")]
pub disable_local_auth: Option<bool>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateLinkScopedResource {
#[serde(rename = "resourceId", default, skip_serializing_if = "Option::is_none")]
pub resource_id: Option<String>,
#[serde(rename = "scopeId", default, skip_serializing_if = "Option::is_none")]
pub scope_id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Workspace {
#[serde(flatten)]
pub tracked_resource: TrackedResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<WorkspaceProperties>,
#[serde(rename = "eTag", default, skip_serializing_if = "Option::is_none")]
pub e_tag: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct WorkspacePatch {
#[serde(flatten)]
pub azure_entity_resource: AzureEntityResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<WorkspaceProperties>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct WorkspaceListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Workspace>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum PublicNetworkAccessType {
Enabled,
Disabled,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorResponse {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<ErrorDetail>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorDetail {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub target: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub details: Vec<ErrorDetail>,
#[serde(rename = "additionalInfo", default, skip_serializing_if = "Vec::is_empty")]
pub additional_info: Vec<ErrorAdditionalInfo>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorAdditionalInfo {
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub info: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TrackedResource {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
pub location: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Resource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ProxyResource {
#[serde(flatten)]
pub resource: Resource,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AzureEntityResource {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub etag: Option<String>,
}
|
use hyper::{Body, Response};
use crate::db;
use crate::html;
use crate::http::util;
pub fn handle_get() -> Result<Response<Body>, hyper::Error> {
let db = db::DB::new();
let games = match db.get_games() {
Ok(d) => d,
Err(e) => return util::db_error_page(e),
};
Ok(Response::new(Body::from(html::common::render_page(
html::pages::game_list::page(games),
))))
}
pub fn handle_post() -> Result<Response<Body>, hyper::Error> {
let db = db::DB::new();
match db.add_game() {
Ok(()) => (),
Err(e) => return util::db_error_page(e),
};
handle_get()
}
|
use block::Block;
use blocks::wm::*;
use wm::WindowManager;
use util::WindowManagers;
pub struct Wsp {
wm: Box<WindowManager>,
icon: String,
active_icon: String,
}
impl Wsp {
pub fn new() -> Wsp {
Wsp {
wm: Box::new(Bspwm::new()),
icon: String::new(),
active_icon: String::new(),
}
}
pub fn set_wm(&mut self, wm: WindowManagers) {
self.wm = match wm {
// I3 => Box::new(I3::new()),
_ => Box::new(Bspwm::new()),
};
}
pub fn set_icon<T: Into<String>>(&mut self, icon: T) {
self.icon = icon.into();
}
pub fn set_active_icon<T: Into<String>>(&mut self, active_icon: T) {
self.active_icon = active_icon.into();
}
}
impl Block for Wsp {
fn new() -> Wsp {
Wsp::new()
}
fn output(&self) -> String {
let current = self.wm.current_desktop();
let num = self.wm.num_desktops();
let mut out = String::new();
for desktop in 0..num {
if desktop == current {
out.push_str(&self.active_icon);
} else {
out.push_str(&self.icon);
}
// Padding after each except end
if desktop < num - 1 {
out.push_str(" ");
}
}
out
}
}
|
// Copyright 2019 Parity Technologies
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! `BitVec` specific serialization.
use bitvec::{
vec::BitVec, store::BitStore, order::BitOrder, slice::BitSlice, boxed::BitBox, view::BitView,
};
use crate::{
EncodeLike, Encode, Decode, Input, Output, Error, Compact, codec::decode_vec_with_len,
};
impl<O: BitOrder, T: BitStore + Encode> Encode for BitSlice<T, O> {
fn encode_to<W: Output + ?Sized>(&self, dest: &mut W) {
let bits = self.len();
assert!(
bits <= ARCH32BIT_BITSLICE_MAX_BITS,
"Attempted to encode a BitSlice with too many bits.",
);
Compact(bits as u32).encode_to(dest);
// Iterate over chunks
for chunk in self.chunks(core::mem::size_of::<T>() * 8) {
let mut element = T::ZERO;
element.view_bits_mut::<O>()[..chunk.len()].copy_from_bitslice(chunk);
element.encode_to(dest);
}
}
}
impl<O: BitOrder, T: BitStore + Encode> Encode for BitVec<T, O> {
fn encode_to<W: Output + ?Sized>(&self, dest: &mut W) {
self.as_bitslice().encode_to(dest)
}
}
impl<O: BitOrder, T: BitStore + Encode> EncodeLike for BitVec<T, O> {}
/// Equivalent of `BitStore::MAX_BITS` on 32bit machine.
const ARCH32BIT_BITSLICE_MAX_BITS: usize = 0x1fff_ffff;
impl<O: BitOrder, T: BitStore + Decode> Decode for BitVec<T, O> {
fn decode<I: Input>(input: &mut I) -> Result<Self, Error> {
<Compact<u32>>::decode(input).and_then(move |Compact(bits)| {
// Otherwise it is impossible to store it on 32bit machine.
if bits as usize > ARCH32BIT_BITSLICE_MAX_BITS {
return Err("Attempt to decode a BitVec with too many bits".into());
}
let vec = decode_vec_with_len(input, bitvec::mem::elts::<T>(bits as usize))?;
let mut result = Self::try_from_vec(vec)
.map_err(|_| {
Error::from("UNEXPECTED ERROR: `bits` is less or equal to
`ARCH32BIT_BITSLICE_MAX_BITS`; So BitVec must be able to handle the number of
segment needed for `bits` to be represented; qed")
})?;
assert!(bits as usize <= result.len());
result.truncate(bits as usize);
Ok(result)
})
}
}
impl<O: BitOrder, T: BitStore + Encode> Encode for BitBox<T, O> {
fn encode_to<W: Output + ?Sized>(&self, dest: &mut W) {
self.as_bitslice().encode_to(dest)
}
}
impl<O: BitOrder, T: BitStore + Encode> EncodeLike for BitBox<T, O> {}
impl<O: BitOrder, T: BitStore + Decode> Decode for BitBox<T, O> {
fn decode<I: Input>(input: &mut I) -> Result<Self, Error> {
Ok(BitVec::<T, O>::decode(input)?.into())
}
}
#[cfg(test)]
mod tests {
use super::*;
use bitvec::{bitvec, order::{Msb0, Lsb0}};
use crate::{codec::MAX_PREALLOCATION, CompactLen};
macro_rules! test_data {
($inner_type:ident) => (
[
BitVec::<$inner_type, Msb0>::new(),
bitvec![$inner_type, Msb0; 0],
bitvec![$inner_type, Msb0; 1],
bitvec![$inner_type, Msb0; 0, 0],
bitvec![$inner_type, Msb0; 1, 0],
bitvec![$inner_type, Msb0; 0, 1],
bitvec![$inner_type, Msb0; 1, 1],
bitvec![$inner_type, Msb0; 1, 0, 1],
bitvec![$inner_type, Msb0; 0, 1, 0, 1, 0, 1, 1],
bitvec![$inner_type, Msb0; 0, 1, 0, 1, 0, 1, 1, 0],
bitvec![$inner_type, Msb0; 1, 1, 0, 1, 0, 1, 1, 0, 1],
bitvec![$inner_type, Msb0; 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 0],
bitvec![$inner_type, Msb0; 0, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 0],
bitvec![$inner_type, Msb0; 0, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 0, 0],
bitvec![$inner_type, Msb0; 0; 15],
bitvec![$inner_type, Msb0; 1; 16],
bitvec![$inner_type, Msb0; 0; 17],
bitvec![$inner_type, Msb0; 1; 31],
bitvec![$inner_type, Msb0; 0; 32],
bitvec![$inner_type, Msb0; 1; 33],
bitvec![$inner_type, Msb0; 0; 63],
bitvec![$inner_type, Msb0; 1; 64],
bitvec![$inner_type, Msb0; 0; 65],
bitvec![$inner_type, Msb0; 1; MAX_PREALLOCATION * 8 + 1],
bitvec![$inner_type, Msb0; 0; MAX_PREALLOCATION * 9],
bitvec![$inner_type, Msb0; 1; MAX_PREALLOCATION * 32 + 1],
bitvec![$inner_type, Msb0; 0; MAX_PREALLOCATION * 33],
]
)
}
#[test]
fn bitvec_u8() {
for v in &test_data!(u8) {
let encoded = v.encode();
assert_eq!(*v, BitVec::<u8, Msb0>::decode(&mut &encoded[..]).unwrap());
let elements = bitvec::mem::elts::<u8>(v.len());
let compact_len = Compact::compact_len(&(v.len() as u32));
assert_eq!(compact_len + elements, encoded.len(), "{}", v);
}
}
#[test]
fn bitvec_u16() {
for v in &test_data!(u16) {
let encoded = v.encode();
assert_eq!(*v, BitVec::<u16, Msb0>::decode(&mut &encoded[..]).unwrap());
let elements = bitvec::mem::elts::<u16>(v.len());
let compact_len = Compact::compact_len(&(v.len() as u32));
assert_eq!(compact_len + elements * 2, encoded.len(), "{}", v);
}
}
#[test]
fn bitvec_u32() {
for v in &test_data!(u32) {
let encoded = v.encode();
assert_eq!(*v, BitVec::<u32, Msb0>::decode(&mut &encoded[..]).unwrap());
let elements = bitvec::mem::elts::<u32>(v.len());
let compact_len = Compact::compact_len(&(v.len() as u32));
assert_eq!(compact_len + elements * 4, encoded.len(), "{}", v);
}
}
#[test]
fn bitvec_u64() {
for v in &test_data!(u64) {
let encoded = v.encode();
assert_eq!(*v, BitVec::<u64, Msb0>::decode(&mut &encoded[..]).unwrap());
let elements = bitvec::mem::elts::<u64>(v.len());
let compact_len = Compact::compact_len(&(v.len() as u32));
assert_eq!(compact_len + elements * 8, encoded.len(), "{}", v);
}
}
#[test]
fn bitslice() {
let data: &[u8] = &[0x69];
let slice = BitSlice::<u8, Msb0>::from_slice(data);
let encoded = slice.encode();
let decoded = BitVec::<u8, Msb0>::decode(&mut &encoded[..]).unwrap();
assert_eq!(slice, decoded.as_bitslice());
}
#[test]
fn bitbox() {
let data: &[u8] = &[5, 10];
let slice = BitSlice::<u8, Msb0>::from_slice(data);
let bb = BitBox::<u8, Msb0>::from_bitslice(slice);
let encoded = bb.encode();
let decoded = BitBox::<u8, Msb0>::decode(&mut &encoded[..]).unwrap();
assert_eq!(bb, decoded);
}
#[test]
fn bitvec_u8_encodes_as_expected() {
let cases = vec![
(bitvec![u8, Lsb0; 0, 0, 1, 1].encode(), (Compact(4u32), 0b00001100u8).encode()),
(bitvec![u8, Lsb0; 0, 1, 1, 1].encode(), (Compact(4u32), 0b00001110u8).encode()),
(bitvec![u8, Lsb0; 1, 1, 1, 1].encode(), (Compact(4u32), 0b00001111u8).encode()),
(bitvec![u8, Lsb0; 1, 1, 1, 1, 1].encode(), (Compact(5u32), 0b00011111u8).encode()),
(bitvec![u8, Lsb0; 1, 1, 1, 1, 1, 0].encode(), (Compact(6u32), 0b00011111u8).encode()),
(bitvec![u8, Lsb0; 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1].encode(), (Compact(12u32), 0b00011111u8, 0b00001011u8).encode()),
];
for (idx, (actual, expected)) in cases.into_iter().enumerate() {
assert_eq!(actual, expected, "case at index {} failed; encodings differ", idx);
}
}
}
|
#[doc = "Register `CMD` reader"]
pub type R = crate::R<CMD_SPEC>;
#[doc = "Register `CMD` writer"]
pub type W = crate::W<CMD_SPEC>;
#[doc = "Field `CMDINDEX` reader - Command index"]
pub type CMDINDEX_R = crate::FieldReader;
#[doc = "Field `CMDINDEX` writer - Command index"]
pub type CMDINDEX_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 6, O>;
#[doc = "Field `WAITRESP` reader - Wait for response bits"]
pub type WAITRESP_R = crate::FieldReader;
#[doc = "Field `WAITRESP` writer - Wait for response bits"]
pub type WAITRESP_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>;
#[doc = "Field `WAITINT` reader - CPSM waits for interrupt request"]
pub type WAITINT_R = crate::BitReader;
#[doc = "Field `WAITINT` writer - CPSM waits for interrupt request"]
pub type WAITINT_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `WAITPEND` reader - CPSM Waits for ends of data transfer (CmdPend internal signal)"]
pub type WAITPEND_R = crate::BitReader;
#[doc = "Field `WAITPEND` writer - CPSM Waits for ends of data transfer (CmdPend internal signal)"]
pub type WAITPEND_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `CPSMEN` reader - Command path state machine (CPSM) Enable bit"]
pub type CPSMEN_R = crate::BitReader;
#[doc = "Field `CPSMEN` writer - Command path state machine (CPSM) Enable bit"]
pub type CPSMEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SDIOSuspend` reader - SD I/O suspend command"]
pub type SDIOSUSPEND_R = crate::BitReader;
#[doc = "Field `SDIOSuspend` writer - SD I/O suspend command"]
pub type SDIOSUSPEND_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ENCMDcompl` reader - Enable CMD completion"]
pub type ENCMDCOMPL_R = crate::BitReader;
#[doc = "Field `ENCMDcompl` writer - Enable CMD completion"]
pub type ENCMDCOMPL_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `nIEN` reader - not Interrupt Enable"]
pub type N_IEN_R = crate::BitReader;
#[doc = "Field `nIEN` writer - not Interrupt Enable"]
pub type N_IEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `CE_ATACMD` reader - CE-ATA command"]
pub type CE_ATACMD_R = crate::BitReader;
#[doc = "Field `CE_ATACMD` writer - CE-ATA command"]
pub type CE_ATACMD_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bits 0:5 - Command index"]
#[inline(always)]
pub fn cmdindex(&self) -> CMDINDEX_R {
CMDINDEX_R::new((self.bits & 0x3f) as u8)
}
#[doc = "Bits 6:7 - Wait for response bits"]
#[inline(always)]
pub fn waitresp(&self) -> WAITRESP_R {
WAITRESP_R::new(((self.bits >> 6) & 3) as u8)
}
#[doc = "Bit 8 - CPSM waits for interrupt request"]
#[inline(always)]
pub fn waitint(&self) -> WAITINT_R {
WAITINT_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bit 9 - CPSM Waits for ends of data transfer (CmdPend internal signal)"]
#[inline(always)]
pub fn waitpend(&self) -> WAITPEND_R {
WAITPEND_R::new(((self.bits >> 9) & 1) != 0)
}
#[doc = "Bit 10 - Command path state machine (CPSM) Enable bit"]
#[inline(always)]
pub fn cpsmen(&self) -> CPSMEN_R {
CPSMEN_R::new(((self.bits >> 10) & 1) != 0)
}
#[doc = "Bit 11 - SD I/O suspend command"]
#[inline(always)]
pub fn sdiosuspend(&self) -> SDIOSUSPEND_R {
SDIOSUSPEND_R::new(((self.bits >> 11) & 1) != 0)
}
#[doc = "Bit 12 - Enable CMD completion"]
#[inline(always)]
pub fn encmdcompl(&self) -> ENCMDCOMPL_R {
ENCMDCOMPL_R::new(((self.bits >> 12) & 1) != 0)
}
#[doc = "Bit 13 - not Interrupt Enable"]
#[inline(always)]
pub fn n_ien(&self) -> N_IEN_R {
N_IEN_R::new(((self.bits >> 13) & 1) != 0)
}
#[doc = "Bit 14 - CE-ATA command"]
#[inline(always)]
pub fn ce_atacmd(&self) -> CE_ATACMD_R {
CE_ATACMD_R::new(((self.bits >> 14) & 1) != 0)
}
}
impl W {
#[doc = "Bits 0:5 - Command index"]
#[inline(always)]
#[must_use]
pub fn cmdindex(&mut self) -> CMDINDEX_W<CMD_SPEC, 0> {
CMDINDEX_W::new(self)
}
#[doc = "Bits 6:7 - Wait for response bits"]
#[inline(always)]
#[must_use]
pub fn waitresp(&mut self) -> WAITRESP_W<CMD_SPEC, 6> {
WAITRESP_W::new(self)
}
#[doc = "Bit 8 - CPSM waits for interrupt request"]
#[inline(always)]
#[must_use]
pub fn waitint(&mut self) -> WAITINT_W<CMD_SPEC, 8> {
WAITINT_W::new(self)
}
#[doc = "Bit 9 - CPSM Waits for ends of data transfer (CmdPend internal signal)"]
#[inline(always)]
#[must_use]
pub fn waitpend(&mut self) -> WAITPEND_W<CMD_SPEC, 9> {
WAITPEND_W::new(self)
}
#[doc = "Bit 10 - Command path state machine (CPSM) Enable bit"]
#[inline(always)]
#[must_use]
pub fn cpsmen(&mut self) -> CPSMEN_W<CMD_SPEC, 10> {
CPSMEN_W::new(self)
}
#[doc = "Bit 11 - SD I/O suspend command"]
#[inline(always)]
#[must_use]
pub fn sdiosuspend(&mut self) -> SDIOSUSPEND_W<CMD_SPEC, 11> {
SDIOSUSPEND_W::new(self)
}
#[doc = "Bit 12 - Enable CMD completion"]
#[inline(always)]
#[must_use]
pub fn encmdcompl(&mut self) -> ENCMDCOMPL_W<CMD_SPEC, 12> {
ENCMDCOMPL_W::new(self)
}
#[doc = "Bit 13 - not Interrupt Enable"]
#[inline(always)]
#[must_use]
pub fn n_ien(&mut self) -> N_IEN_W<CMD_SPEC, 13> {
N_IEN_W::new(self)
}
#[doc = "Bit 14 - CE-ATA command"]
#[inline(always)]
#[must_use]
pub fn ce_atacmd(&mut self) -> CE_ATACMD_W<CMD_SPEC, 14> {
CE_ATACMD_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "command register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cmd::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cmd::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct CMD_SPEC;
impl crate::RegisterSpec for CMD_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`cmd::R`](R) reader structure"]
impl crate::Readable for CMD_SPEC {}
#[doc = "`write(|w| ..)` method takes [`cmd::W`](W) writer structure"]
impl crate::Writable for CMD_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets CMD to value 0"]
impl crate::Resettable for CMD_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
extern crate rand;
use self::rand::random;
use std::f32;
use std::iter::Cycle;
use std::slice::Iter;
use synth::module;
// TODO We could try smoothly mixing between the waves.
#[derive(Debug)]
#[derive(Clone)]
pub enum Waveform {
Sine,
Square,
Sawtooth,
Triangle,
Noise
}
impl Waveform {
fn from_data(data: f32) -> Waveform {
match (data * 5.0) as i32 {
x if x == Waveform::Sine as i32 => Waveform::Sine,
x if x == Waveform::Square as i32 => Waveform::Square,
x if x == Waveform::Sawtooth as i32 => Waveform::Sawtooth,
x if x == Waveform::Triangle as i32 => Waveform::Triangle,
x if x == Waveform::Noise as i32 => Waveform::Noise,
_ => Waveform::Sine
}
}
fn oscillate(&self, t: f32, f: f32, fm: f32, d: f32) -> f32 {
let ftfm = f * (t + 10.0 * fm);
match self {
&Waveform::Sine => {
let omega = 2.0 * f32::consts::PI;
f32::sin(ftfm * omega)
},
&Waveform::Square => {
if ftfm % 1.0 < d {
0.5
} else {
-0.5
}
},
&Waveform::Sawtooth => {
2.0 * (ftfm % 1.0) - 1.0
},
&Waveform::Triangle => {
let saw = 2.0 * ((2.0 * ftfm) % 1.0);
if ftfm % 1.0 < 0.5 {
saw - 1.0
} else {
1.0 - saw
}
},
&Waveform::Noise => {
random::<f32>() * 2.0 - 1.0
}
}
}
}
pub struct Oscillator {
t: f32,
rate: f32,
}
impl Oscillator {
fn get_freq(note: f32, rate: f32) -> f32 {
let pitch = (note * 127.0) - 69.0;
let freq_hz = (2.0 as f32).powf(pitch/12.0) * 440.0;
freq_hz / rate
}
pub fn new(rate: f32) -> module::MisoModule<Oscillator> {
module::MisoModule::new(Oscillator {
t: 0.0,
rate,
})
}
pub fn oscillate(&mut self, primary: f32, note: f32, fm_in: f32, duty_cycle_in: f32) -> f32 {
let freq = Oscillator::get_freq(note, self.rate);
let wave = Waveform::from_data(primary);
let res = wave.oscillate(self.t, freq, fm_in, duty_cycle_in);
self.t = self.t + 1.0;
res
}
}
impl module::MisoWorker for Oscillator {
fn get_data(&self) -> Vec<module::DataIn> {
vec![
module::DataIn::new(String::from("primary"), 0.0),
module::DataIn::new(String::from("freq_in"), 0.0),
module::DataIn::new(String::from("fm_in"), 0.0),
module::DataIn::new(String::from("duty_cycle_in"), 0.5),
]
}
fn extract(&mut self, vals: &[f32]) -> f32 {
self.oscillate(vals[0], vals[1], vals[2], vals[3])
}
}
|
// vim: shiftwidth=2
use std::fs::{File, canonicalize, read_to_string};
use std::io::{self, BufRead};
use std::num::ParseIntError;
use std::path::{Path, PathBuf};
use std::collections::HashSet;
use crate::key_codes::KeyCode;
pub fn list_keyboards_to_stdout() -> io::Result<()> {
for p in list_keyboards()? {
println!("{}: {}", p.name, p.dev_path.to_string_lossy());
}
Ok(())
}
struct ExtractedProcBusKeyboard {
sysfs_path: String,
name: String
}
pub struct ExtractedKeyboard {
pub dev_path: PathBuf,
pub name: String
}
fn parse_mask_hex(hex: &str) -> Result<HashSet<i32>, ParseIntError> {
let tokens = hex.rsplit(' ');
let mut res = HashSet::new();
let mut token_index = 0;
for token in tokens {
let num = u64::from_str_radix(token, 16)?;
for i in 0u8 .. 63u8 {
let mask = 1u64 << i;
if (num & mask) != 0 {
res.insert((i as i32) + token_index * 64);
}
}
token_index += 1;
}
Ok(res)
}
fn extract_keyboards_from_proc_bus_input_devices(proc_bus_input_devices: &str) -> Vec<ExtractedProcBusKeyboard> {
let mut res = Vec::new();
let lines = proc_bus_input_devices.split('\n');
let mut working_sysfs_path = Box::new(None);
let mut working_name = Box::new(None);
let mut working_ev_mask = Box::new(None);
for line in lines {
if line.starts_with("I:") {
*working_sysfs_path = None;
*working_name = None;
*working_ev_mask = None;
}
else if line.starts_with("S: Sysfs=") {
let new_sysfs_path = line[9..].to_string();
*working_sysfs_path = Some(new_sysfs_path);
}
else if line.starts_with("N: Name=\"") {
let mut name = line[9..].to_string();
name = name.trim_end().to_string();
if name.ends_with('"') {
name = name[..name.len()-1].to_string();
}
*working_name = Some(name);
}
else if line.starts_with("B: EV=") {
*working_ev_mask = Some(line[6..].to_string());
}
else if line.starts_with("B: KEY=") {
let mut num_keys = 0;
for c in line[7..].chars() {
num_keys += match c {
'0' => 0, '1' => 1, '2' => 1, '3' => 2,
'4' => 1, '5' => 2, '6' => 2, '7' => 3,
'8' => 1, '9' => 2, 'a' => 2, 'b' => 3,
'c' => 2, 'd' => 3, 'e' => 3, 'f' => 4,
_ => 0
}
}
let key_set = parse_mask_hex(&line[7..]).unwrap_or(HashSet::new());
let ev_set = match &*working_ev_mask {
None => HashSet::new(),
Some(mask_hex) => {
parse_mask_hex(mask_hex.as_str()).unwrap_or(HashSet::new())
}
};
let num_normal_keys =
(key_set.contains(&(KeyCode::A as i32)) as i32)
+ (key_set.contains(&(KeyCode::B as i32)) as i32)
+ (key_set.contains(&(KeyCode::C as i32)) as i32)
+ (key_set.contains(&(KeyCode::SPACE as i32)) as i32)
+ (key_set.contains(&(KeyCode::LEFTSHIFT as i32)) as i32)
+ (key_set.contains(&(KeyCode::RIGHTSHIFT as i32)) as i32)
+ (key_set.contains(&(KeyCode::BACKSPACE as i32)) as i32)
+ (key_set.contains(&(KeyCode::ENTER as i32)) as i32)
+ (key_set.contains(&(KeyCode::ESC as i32)) as i32)
+ (key_set.contains(&(KeyCode::PAUSE as i32)) as i32)
;
let name = match &*working_name {
None => "".to_string(),
Some(name) => name.clone()
};
let has_scroll_down = key_set.contains(&(KeyCode::SCROLLDOWN as i32));
let lacks_leds = !ev_set.contains(&0x11);
let has_mouse_in_name = name.contains("Mouse");
let mousey = (has_scroll_down as i32) + (lacks_leds as i32) + (has_mouse_in_name as i32) >= 2;
let has_rel_motion = ev_set.contains(&0x2);
// Heuristic for what is a keyboard
if num_keys >= 20 && num_normal_keys >= 3 && !has_rel_motion && !mousey {
match &*working_sysfs_path {
None => (),
Some(p) => {
res.push(ExtractedProcBusKeyboard {
sysfs_path: p.to_string(),
name
});
}
}
}
}
}
res
}
pub fn list_keyboards() -> io::Result<Vec<ExtractedKeyboard>> {
let mut res = Vec::new();
let proc_bus_input_devices = read_to_string("/proc/bus/input/devices")?;
let extracted = extract_keyboards_from_proc_bus_input_devices(&proc_bus_input_devices);
for dev in extracted {
let p = dev.sysfs_path;
if !p.starts_with("/devices/virtual") {
match dev_path_for_sysfs_name(&p)? {
None => (),
Some(dev_path) => {
res.push(ExtractedKeyboard {
dev_path,
name: dev.name
});
}
}
}
}
Ok(res)
}
fn dev_path_for_sysfs_name(sysfs_name: &String) -> io::Result<Option<PathBuf>> {
let mut sysfs_path = "/sys".to_string();
sysfs_path.push_str(sysfs_name);
for _entry in Path::new(&sysfs_path).read_dir()? {
let entry = _entry?;
let path = entry.path();
match path.file_name() {
None => (),
Some(_name) => {
let name = _name.to_string_lossy();
if name.starts_with("event") {
let mut uevent_path = path.clone();
uevent_path.push("uevent");
for _line in io::BufReader::new(File::open(uevent_path)?).lines() {
let line = _line?;
if line.starts_with("DEVNAME=") {
let dev_name = line[8..].to_string();
let mut dev_path = PathBuf::new();
dev_path.push("/dev");
dev_path.push(dev_name);
return Ok(Some(dev_path));
}
}
}
}
}
}
Ok(None)
}
pub fn filter_keyboards<'a>(devices: &Vec<&'a str>) -> io::Result<Vec<&'a str>> {
let mut res = Vec::new();
let all_keyboards = list_keyboards()?;
let mut canonical_set: HashSet<String> = HashSet::new();
for p in all_keyboards {
for q in canonicalize(p.dev_path) {
for s in q.to_str() {
canonical_set.insert(s.to_string());
}
}
}
for s in devices {
match canonicalize(Path::new(s)) {
Err(_) => (),
Ok(c) => {
match c.to_str() {
None => (),
Some(l) => {
if canonical_set.contains(&l.to_string()) {
res.push(*s)
}
}
}
}
}
}
Ok(res)
}
#[cfg(test)]
mod tests {
use crate::example_hardware;
use super::*;
#[test]
fn test_parse_mask() {
let res = parse_mask_hex("120013").unwrap();
for item in &res {
println!(" * {:x}", item)
}
assert_eq!(res.len(), 5);
assert!(res.contains(&0x00));
assert!(res.contains(&0x01));
assert!(res.contains(&0x04));
assert!(res.contains(&0x11));
assert!(res.contains(&0x14));
}
#[test]
fn test_parse_mask_2() {
let res = parse_mask_hex("1 1").unwrap();
for item in &res {
println!(" * {:x}", item)
}
assert_eq!(res.len(), 2);
assert!(res.contains(&0));
assert!(res.contains(&64));
}
#[test]
fn test_gaming_mouse_exclusion() {
let text = example_hardware::GAMING_MOUSE_SETUP_1;
let keyoards = extract_keyboards_from_proc_bus_input_devices(text);
println!("Found:");
for keyboard in &keyoards {
println!(" * {}", keyboard.name);
}
println!("");
let mut desired_name_set: HashSet<String> = HashSet::new();
desired_name_set.insert("AT Translated Set 2 keyboard".to_string());
let mut actual_name_set: HashSet<String> = HashSet::new();
for keyboard in &keyoards {
actual_name_set.insert(keyboard.name.clone());
}
for actual_name in &actual_name_set {
if !desired_name_set.contains(actual_name) {
panic!("Found spurious {}", actual_name);
}
}
for desired_name in &desired_name_set {
if !actual_name_set.contains(desired_name) {
panic!("Failed to detect {}", desired_name);
}
}
}
}
|
use super::point::Point;
use super::ray::Ray;
use super::vector::Vector;
pub struct Camera {
location: Point,
u: Vector,
v: Vector,
n: Vector,
distance: f64,
width: u32,
height: u32
}
impl Camera {
pub fn new(location: Point, v: Vector, n: Vector, distance: f64, width: u32, height: u32) -> Camera {
let u = v.cross(&n).normalize();
Camera {
location: location,
u: u,
v: v,
n: n,
distance: distance,
width: width,
height: height
}
}
pub fn camera_ray(&self, x: u32, y: u32) -> Ray {
let direction = self.u.scale(x as f64 - (self.width as f64 / 2.0) + 0.5) +
self.v.scale((self.height as f64 / 2.0) - y as f64 - 0.5) +
self.n.scale(-self.distance);
Ray::new(self.location.clone(), direction.normalize())
}
}
#[cfg(test)]
mod tests {
use super::Camera;
use super::super::point::Point;
use super::super::vector::Vector;
#[test]
fn test_camera_ray() {
let camera = Camera::new(
Point::new(0.0, 0.0, 100.0),
Vector::new(0.0, 1.0, 0.0),
Vector::new(0.0, 0.0, 1.0),
100.0,
640,
480
);
let camera_ray = camera.camera_ray(0, 0);
assert_eq!(camera_ray.origin, camera.location);
assert_eq!(camera_ray.direction, Vector::new(-319.5, 239.5, -100.0).normalize());
}
}
|
use crate::ast::*;
use crate::span::Spanned;
use crate::value::Value;
use crate::Pos;
use pest::error::LineColLocation;
use pest::iterators::Pair;
use pest::Parser;
use std::collections::BTreeMap;
use std::fmt;
#[derive(Parser)]
#[grammar = "query.pest"]
struct QueryParser;
/// Parser error
#[derive(Error, Debug, PartialEq)]
pub struct Error {
pub pos: Pos,
pub message: String,
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.message)
}
}
impl From<pest::error::Error<Rule>> for Error {
fn from(err: pest::error::Error<Rule>) -> Self {
Error {
pos: {
let (line, column) = match err.line_col {
LineColLocation::Pos((line, column)) => (line, column),
LineColLocation::Span((line, column), _) => (line, column),
};
Pos { line, column }
},
message: err.to_string(),
}
}
}
/// Parser result
pub type Result<T> = std::result::Result<T, Error>;
/// Parse a GraphQL query.
pub fn parse_query<T: AsRef<str>>(input: T) -> Result<Document> {
let document_pair: Pair<Rule> = QueryParser::parse(Rule::document, input.as_ref())?
.next()
.unwrap();
let mut definitions = Vec::new();
for pair in document_pair.into_inner() {
match pair.as_rule() {
Rule::named_operation_definition => definitions
.push(parse_named_operation_definition(pair)?.pack(Definition::Operation)),
Rule::selection_set => definitions.push(
parse_selection_set(pair)?
.pack(OperationDefinition::SelectionSet)
.pack(Definition::Operation),
),
Rule::fragment_definition => {
definitions.push(parse_fragment_definition(pair)?.pack(Definition::Fragment))
}
Rule::EOI => {}
_ => unreachable!(),
}
}
Ok(Document { definitions })
}
fn parse_named_operation_definition(pair: Pair<Rule>) -> Result<Spanned<OperationDefinition>> {
enum OperationType {
Query,
Mutation,
Subscription,
}
let span = pair.as_span();
let mut operation_type = OperationType::Query;
let mut name = None;
let mut variable_definitions = None;
let mut directives = None;
let mut selection_set = None;
for pair in pair.into_inner() {
match pair.as_rule() {
Rule::operation_type => {
operation_type = match pair.as_str() {
"query" => OperationType::Query,
"mutation" => OperationType::Mutation,
"subscription" => OperationType::Subscription,
_ => unreachable!(),
};
}
Rule::name => {
name = Some(Spanned::new(pair.as_str().to_string(), pair.as_span()));
}
Rule::variable_definitions => {
variable_definitions = Some(parse_variable_definitions(pair)?);
}
Rule::directives => {
directives = Some(parse_directives(pair)?);
}
Rule::selection_set => {
selection_set = Some(parse_selection_set(pair)?);
}
_ => unreachable!(),
}
}
Ok(match operation_type {
OperationType::Query => Spanned::new(
Query {
name,
variable_definitions: variable_definitions.unwrap_or_default(),
directives: directives.unwrap_or_default(),
selection_set: selection_set.unwrap(),
},
span,
)
.pack(OperationDefinition::Query),
OperationType::Mutation => Spanned::new(
Mutation {
name,
variable_definitions: variable_definitions.unwrap_or_default(),
directives: directives.unwrap_or_default(),
selection_set: selection_set.unwrap(),
},
span,
)
.pack(OperationDefinition::Mutation),
OperationType::Subscription => Spanned::new(
Subscription {
name,
variable_definitions: variable_definitions.unwrap_or_default(),
directives: directives.unwrap_or_default(),
selection_set: selection_set.unwrap(),
},
span,
)
.pack(OperationDefinition::Subscription),
})
}
fn parse_default_value(pair: Pair<Rule>) -> Result<Value> {
for pair in pair.into_inner() {
match pair.as_rule() {
Rule::value => return Ok(parse_value(pair)?),
_ => unreachable!(),
}
}
unreachable!()
}
fn parse_type(pair: Pair<Rule>) -> Result<Type> {
let pair = pair.into_inner().next().unwrap();
match pair.as_rule() {
Rule::nonnull_type => Ok(Type::NonNull(Box::new(parse_type(pair)?))),
Rule::list_type => Ok(Type::List(Box::new(parse_type(pair)?))),
Rule::name => Ok(Type::Named(pair.as_str().to_string())),
Rule::type_ => parse_type(pair),
_ => unreachable!(),
}
}
fn parse_variable_definition(pair: Pair<Rule>) -> Result<Spanned<VariableDefinition>> {
let span = pair.as_span();
let mut variable = None;
let mut ty = None;
let mut default_value = None;
for pair in pair.into_inner() {
let span = pair.as_span();
match pair.as_rule() {
Rule::variable => variable = Some(parse_variable(pair)?),
Rule::type_ => ty = Some(Spanned::new(parse_type(pair)?, span)),
Rule::default_value => {
default_value = Some(Spanned::new(parse_default_value(pair)?, span))
}
_ => unreachable!(),
}
}
Ok(Spanned::new(
VariableDefinition {
name: variable.unwrap(),
var_type: ty.unwrap(),
default_value,
},
span,
))
}
fn parse_variable_definitions(pair: Pair<Rule>) -> Result<Vec<Spanned<VariableDefinition>>> {
let mut vars = Vec::new();
for pair in pair.into_inner() {
match pair.as_rule() {
Rule::variable_definition => vars.push(parse_variable_definition(pair)?),
_ => unreachable!(),
}
}
Ok(vars)
}
fn parse_directive(pair: Pair<Rule>) -> Result<Spanned<Directive>> {
let span = pair.as_span();
let mut name = None;
let mut arguments = None;
for pair in pair.into_inner() {
match pair.as_rule() {
Rule::name => name = Some(Spanned::new(pair.as_str().to_string(), pair.as_span())),
Rule::arguments => arguments = Some(parse_arguments(pair)?),
_ => unreachable!(),
}
}
Ok(Spanned::new(
Directive {
name: name.unwrap(),
arguments: arguments.unwrap_or_default(),
},
span,
))
}
fn parse_directives(pair: Pair<Rule>) -> Result<Vec<Spanned<Directive>>> {
let mut directives = Vec::new();
for pair in pair.into_inner() {
match pair.as_rule() {
Rule::directive => directives.push(parse_directive(pair)?),
_ => unreachable!(),
}
}
Ok(directives)
}
fn parse_variable(pair: Pair<Rule>) -> Result<Spanned<String>> {
for pair in pair.into_inner() {
if let Rule::name = pair.as_rule() {
return Ok(Spanned::new(pair.as_str().to_string(), pair.as_span()));
}
}
unreachable!()
}
fn parse_value(pair: Pair<Rule>) -> Result<Value> {
let pair = pair.into_inner().next().unwrap();
Ok(match pair.as_rule() {
Rule::object => parse_object_value(pair)?,
Rule::array => parse_array_value(pair)?,
Rule::variable => Value::Variable(parse_variable(pair)?.into_inner()),
Rule::float => Value::Float(pair.as_str().parse().unwrap()),
Rule::int => Value::Int(pair.as_str().parse().unwrap()),
Rule::string => Value::String({
let start_pos = pair.as_span().start_pos().line_col();
unquote_string(
pair.as_str(),
Pos {
line: start_pos.0,
column: start_pos.1,
},
)?
}),
Rule::name => Value::Enum(pair.as_str().to_string()),
Rule::boolean => Value::Boolean(match pair.as_str() {
"true" => true,
"false" => false,
_ => unreachable!(),
}),
Rule::null => Value::Null,
_ => unreachable!(),
})
}
fn parse_object_pair(pair: Pair<Rule>) -> Result<(String, Value)> {
let mut name = None;
let mut value = None;
for pair in pair.into_inner() {
match pair.as_rule() {
Rule::name => name = Some(pair.as_str().to_string()),
Rule::value => value = Some(parse_value(pair)?),
_ => unreachable!(),
}
}
Ok((name.unwrap(), value.unwrap()))
}
fn parse_object_value(pair: Pair<Rule>) -> Result<Value> {
let mut map = BTreeMap::new();
for pair in pair.into_inner() {
match pair.as_rule() {
Rule::pair => {
map.extend(std::iter::once(parse_object_pair(pair)?));
}
_ => unreachable!(),
}
}
Ok(Value::Object(map))
}
fn parse_array_value(pair: Pair<Rule>) -> Result<Value> {
let mut array = Vec::new();
for pair in pair.into_inner() {
match pair.as_rule() {
Rule::value => {
array.push(parse_value(pair)?);
}
_ => unreachable!(),
}
}
Ok(Value::List(array))
}
fn parse_pair(pair: Pair<Rule>) -> Result<(Spanned<String>, Spanned<Value>)> {
let mut name = None;
let mut value = None;
for pair in pair.into_inner() {
let span = pair.as_span();
match pair.as_rule() {
Rule::name => name = Some(Spanned::new(pair.as_str().to_string(), span)),
Rule::value => value = Some(Spanned::new(parse_value(pair)?, span)),
_ => unreachable!(),
}
}
Ok((name.unwrap(), value.unwrap()))
}
fn parse_arguments(pair: Pair<Rule>) -> Result<Vec<(Spanned<String>, Spanned<Value>)>> {
let mut arguments = Vec::new();
for pair in pair.into_inner() {
match pair.as_rule() {
Rule::pair => arguments.extend(std::iter::once(parse_pair(pair)?)),
_ => unreachable!(),
}
}
Ok(arguments)
}
fn parse_alias(pair: Pair<Rule>) -> Result<Spanned<String>> {
for pair in pair.into_inner() {
if let Rule::name = pair.as_rule() {
return Ok(Spanned::new(pair.as_str().to_string(), pair.as_span()));
}
}
unreachable!()
}
fn parse_field(pair: Pair<Rule>) -> Result<Spanned<Field>> {
let span = pair.as_span();
let mut alias = None;
let mut name = None;
let mut directives = None;
let mut arguments = None;
let mut selection_set = None;
for pair in pair.into_inner() {
match pair.as_rule() {
Rule::alias => alias = Some(parse_alias(pair)?),
Rule::name => name = Some(Spanned::new(pair.as_str().to_string(), pair.as_span())),
Rule::arguments => arguments = Some(parse_arguments(pair)?),
Rule::directives => directives = Some(parse_directives(pair)?),
Rule::selection_set => selection_set = Some(parse_selection_set(pair)?),
_ => unreachable!(),
}
}
Ok(Spanned::new(
Field {
alias,
name: name.unwrap(),
arguments: arguments.unwrap_or_default(),
directives: directives.unwrap_or_default(),
selection_set: selection_set.unwrap_or_default(),
},
span,
))
}
fn parse_fragment_spread(pair: Pair<Rule>) -> Result<Spanned<FragmentSpread>> {
let span = pair.as_span();
let mut name = None;
let mut directives = None;
for pair in pair.into_inner() {
match pair.as_rule() {
Rule::name => name = Some(Spanned::new(pair.as_str().to_string(), pair.as_span())),
Rule::directives => directives = Some(parse_directives(pair)?),
_ => unreachable!(),
}
}
Ok(Spanned::new(
FragmentSpread {
fragment_name: name.unwrap(),
directives: directives.unwrap_or_default(),
},
span,
))
}
fn parse_type_condition(pair: Pair<Rule>) -> Result<Spanned<TypeCondition>> {
for pair in pair.into_inner() {
if let Rule::name = pair.as_rule() {
return Ok(Spanned::new(
TypeCondition::On(Spanned::new(pair.as_str().to_string(), pair.as_span())),
pair.as_span(),
));
}
}
unreachable!()
}
fn parse_inline_fragment(pair: Pair<Rule>) -> Result<Spanned<InlineFragment>> {
let span = pair.as_span();
let mut type_condition = None;
let mut directives = None;
let mut selection_set = None;
for pair in pair.into_inner() {
match pair.as_rule() {
Rule::type_condition => type_condition = Some(parse_type_condition(pair)?),
Rule::directives => directives = Some(parse_directives(pair)?),
Rule::selection_set => selection_set = Some(parse_selection_set(pair)?),
_ => unreachable!(),
}
}
Ok(Spanned::new(
InlineFragment {
type_condition,
directives: directives.unwrap_or_default(),
selection_set: selection_set.unwrap(),
},
span,
))
}
fn parse_selection_set(pair: Pair<Rule>) -> Result<Spanned<SelectionSet>> {
let span = pair.as_span();
let mut items = Vec::new();
for pair in pair.into_inner().map(|pair| pair.into_inner()).flatten() {
match pair.as_rule() {
Rule::field => items.push(parse_field(pair)?.pack(Selection::Field)),
Rule::fragment_spread => {
items.push(parse_fragment_spread(pair)?.pack(Selection::FragmentSpread))
}
Rule::inline_fragment => {
items.push(parse_inline_fragment(pair)?.pack(Selection::InlineFragment))
}
_ => unreachable!(),
}
}
Ok(Spanned::new(SelectionSet { items }, span))
}
fn parse_fragment_definition(pair: Pair<Rule>) -> Result<Spanned<FragmentDefinition>> {
let span = pair.as_span();
let mut name = None;
let mut type_condition = None;
let mut directives = None;
let mut selection_set = None;
for pair in pair.into_inner() {
match pair.as_rule() {
Rule::name => name = Some(Spanned::new(pair.as_str().to_string(), pair.as_span())),
Rule::type_condition => type_condition = Some(parse_type_condition(pair)?),
Rule::directives => directives = Some(parse_directives(pair)?),
Rule::selection_set => selection_set = Some(parse_selection_set(pair)?),
_ => unreachable!(),
}
}
Ok(Spanned::new(
FragmentDefinition {
name: name.unwrap(),
type_condition: type_condition.unwrap(),
directives: directives.unwrap_or_default(),
selection_set: selection_set.unwrap(),
},
span,
))
}
fn unquote_string(s: &str, pos: Pos) -> Result<String> {
let mut res = String::with_capacity(s.len());
debug_assert!(s.starts_with('"') && s.ends_with('"'));
let mut chars = s[1..s.len() - 1].chars();
let mut temp_code_point = String::with_capacity(4);
while let Some(c) = chars.next() {
match c {
'\\' => {
match chars.next().expect("slash cant be at the end") {
c @ '"' | c @ '\\' | c @ '/' => res.push(c),
'b' => res.push('\u{0010}'),
'f' => res.push('\u{000C}'),
'n' => res.push('\n'),
'r' => res.push('\r'),
't' => res.push('\t'),
'u' => {
temp_code_point.clear();
for _ in 0..4 {
match chars.next() {
Some(inner_c) => temp_code_point.push(inner_c),
None => {
return Err(Error {
pos,
message: format!(
"\\u must have 4 characters after it, only found '{}'",
temp_code_point
),
});
}
}
}
// convert our hex string into a u32, then convert that into a char
match u32::from_str_radix(&temp_code_point, 16).map(std::char::from_u32) {
Ok(Some(unicode_char)) => res.push(unicode_char),
_ => {
return Err(Error {
pos,
message: format!(
"{} is not a valid unicode code point",
temp_code_point
),
});
}
}
}
c => {
return Err(Error {
pos,
message: format!("bad escaped char {:?}", c),
});
}
}
}
c => res.push(c),
}
}
Ok(res)
}
#[cfg(test)]
mod tests {
use super::*;
use std::fs;
#[test]
fn test_parser() {
for entry in fs::read_dir("tests/queries").unwrap() {
if let Ok(entry) = entry {
QueryParser::parse(Rule::document, &fs::read_to_string(entry.path()).unwrap())
.unwrap();
}
}
}
#[test]
fn test_parser_ast() {
for entry in fs::read_dir("tests/queries").unwrap() {
if let Ok(entry) = entry {
parse_query(fs::read_to_string(entry.path()).unwrap()).unwrap();
}
}
}
}
|
#[cfg(test)]
#[path = "../../../tests/unit/format/problem/reader_test.rs"]
mod reader_test;
#[path = "./job_reader.rs"]
mod job_reader;
#[path = "./fleet_reader.rs"]
mod fleet_reader;
#[path = "./objective_reader.rs"]
mod objective_reader;
use self::fleet_reader::{create_transport_costs, read_fleet, read_travel_limits};
use self::job_reader::{read_jobs_with_extra_locks, read_locks};
use self::objective_reader::create_objective;
use crate::constraints::*;
use crate::extensions::{get_route_modifier, OnlyVehicleActivityCost};
use crate::format::coord_index::CoordIndex;
use crate::format::problem::{deserialize_matrix, deserialize_problem, get_job_tasks, Matrix};
use crate::format::*;
use crate::utils::get_approx_transportation;
use crate::validation::ValidationContext;
use crate::{get_unique_locations, parse_time};
use hashbrown::HashSet;
use std::cmp::Ordering::Equal;
use std::io::{BufReader, Read};
use std::sync::Arc;
use vrp_core::construction::constraints::*;
use vrp_core::models::common::{MultiDimLoad, SingleDimLoad, TimeWindow, ValueDimension};
use vrp_core::models::problem::{ActivityCost, Fleet, TransportCost};
use vrp_core::models::{Extras, Lock, Problem};
use vrp_core::utils::{compare_floats, DefaultRandom, Random};
pub type ApiProblem = crate::format::problem::Problem;
/// Reads specific problem definition from various sources.
pub trait PragmaticProblem {
/// Reads problem defined in pragmatic format.
fn read_pragmatic(self) -> Result<Problem, Vec<FormatError>>;
}
impl<R: Read> PragmaticProblem for (BufReader<R>, Vec<BufReader<R>>) {
fn read_pragmatic(self) -> Result<Problem, Vec<FormatError>> {
let problem = deserialize_problem(self.0)?;
let mut matrices = vec![];
for matrix in self.1 {
matrices.push(deserialize_matrix(matrix)?);
}
map_to_problem_with_matrices(problem, matrices)
}
}
impl<R: Read> PragmaticProblem for BufReader<R> {
fn read_pragmatic(self) -> Result<Problem, Vec<FormatError>> {
let problem = deserialize_problem(self)?;
map_to_problem_with_approx(problem)
}
}
impl PragmaticProblem for (String, Vec<String>) {
fn read_pragmatic(self) -> Result<Problem, Vec<FormatError>> {
let problem = deserialize_problem(BufReader::new(self.0.as_bytes()))?;
let mut matrices = vec![];
for matrix in self.1 {
matrices.push(deserialize_matrix(BufReader::new(matrix.as_bytes()))?);
}
map_to_problem_with_matrices(problem, matrices)
}
}
impl PragmaticProblem for String {
fn read_pragmatic(self) -> Result<Problem, Vec<FormatError>> {
let problem = deserialize_problem(BufReader::new(self.as_bytes()))?;
map_to_problem_with_approx(problem)
}
}
impl PragmaticProblem for (ApiProblem, Vec<Matrix>) {
fn read_pragmatic(self) -> Result<Problem, Vec<FormatError>> {
map_to_problem_with_matrices(self.0, self.1)
}
}
impl PragmaticProblem for ApiProblem {
fn read_pragmatic(self) -> Result<Problem, Vec<FormatError>> {
map_to_problem_with_approx(self)
}
}
impl PragmaticProblem for (ApiProblem, Option<Vec<Matrix>>) {
fn read_pragmatic(self) -> Result<Problem, Vec<FormatError>> {
if let Some(matrices) = self.1 {
(self.0, matrices).read_pragmatic()
} else {
self.0.read_pragmatic()
}
}
}
pub struct ProblemProperties {
has_multi_dimen_capacity: bool,
has_breaks: bool,
has_skills: bool,
has_unreachable_locations: bool,
has_dispatch: bool,
has_reloads: bool,
has_order: bool,
has_area_limits: bool,
has_tour_size_limits: bool,
max_job_value: Option<f64>,
}
fn create_approx_matrices(problem: &ApiProblem) -> Vec<Matrix> {
const DEFAULT_SPEED: f64 = 10.;
// get each speed value once
let speeds = problem
.fleet
.profiles
.iter()
.map(|profile| profile.speed.unwrap_or(DEFAULT_SPEED))
.map(|speed| speed.to_bits())
.collect::<HashSet<u64>>();
let speeds = speeds.into_iter().map(f64::from_bits).collect::<Vec<_>>();
let locations = get_unique_locations(&problem);
let approx_data = get_approx_transportation(&locations, speeds.as_slice());
problem
.fleet
.profiles
.iter()
.map(move |profile| {
let speed = profile.speed.unwrap_or(DEFAULT_SPEED);
let idx =
speeds.iter().position(|s| compare_floats(*s, speed) == Equal).expect("Cannot find profile speed");
Matrix {
profile: Some(profile.name.clone()),
timestamp: None,
travel_times: approx_data[idx].0.clone(),
distances: approx_data[idx].1.clone(),
error_codes: None,
}
})
.collect()
}
fn map_to_problem_with_approx(problem: ApiProblem) -> Result<Problem, Vec<FormatError>> {
let coord_index = CoordIndex::new(&problem);
let matrices = if coord_index.get_used_types().1 { vec![] } else { create_approx_matrices(&problem) };
map_to_problem(problem, matrices, coord_index)
}
fn map_to_problem_with_matrices(problem: ApiProblem, matrices: Vec<Matrix>) -> Result<Problem, Vec<FormatError>> {
let coord_index = CoordIndex::new(&problem);
map_to_problem(problem, matrices, coord_index)
}
fn map_to_problem(
api_problem: ApiProblem,
matrices: Vec<Matrix>,
coord_index: CoordIndex,
) -> Result<Problem, Vec<FormatError>> {
ValidationContext::new(&api_problem, Some(&matrices)).validate()?;
let problem_props = get_problem_properties(&api_problem, &matrices);
let coord_index = Arc::new(coord_index);
let transport = create_transport_costs(&api_problem, &matrices).map_err(|err| {
vec![FormatError::new(
"E0002".to_string(),
"cannot create transport costs".to_string(),
format!("Check matrix routing data: '{}'", err),
)]
})?;
let activity = Arc::new(OnlyVehicleActivityCost::default());
let fleet = read_fleet(&api_problem, &problem_props, &coord_index);
// TODO pass random from outside as there might be need to have it initialized with seed
// at the moment, this random instance is used only by multi job permutation generator
let random: Arc<dyn Random + Send + Sync> = Arc::new(DefaultRandom::default());
let mut job_index = Default::default();
let (jobs, locks) = read_jobs_with_extra_locks(
&api_problem,
&problem_props,
&coord_index,
&fleet,
&transport,
&mut job_index,
&random,
);
let locks = locks.into_iter().chain(read_locks(&api_problem, &job_index).into_iter()).collect::<Vec<_>>();
let limits = read_travel_limits(&api_problem).unwrap_or_else(|| Arc::new(|_| (None, None)));
let mut constraint = create_constraint_pipeline(
coord_index.clone(),
&fleet,
transport.clone(),
activity.clone(),
&problem_props,
&locks,
limits,
);
let objective = create_objective(&api_problem, &mut constraint, &problem_props);
let constraint = Arc::new(constraint);
let extras = Arc::new(create_extras(constraint.clone(), &problem_props, job_index, coord_index));
Ok(Problem {
fleet: Arc::new(fleet),
jobs: Arc::new(jobs),
locks,
constraint,
activity,
transport,
objective,
extras,
})
}
fn create_constraint_pipeline(
coord_index: Arc<CoordIndex>,
fleet: &Fleet,
transport: Arc<dyn TransportCost + Send + Sync>,
activity: Arc<dyn ActivityCost + Send + Sync>,
props: &ProblemProperties,
locks: &[Arc<Lock>],
limits: TravelLimitFunc,
) -> ConstraintPipeline {
let mut constraint = ConstraintPipeline::default();
if props.has_unreachable_locations {
constraint.add_module(Box::new(ReachableModule::new(transport.clone(), REACHABLE_CONSTRAINT_CODE)));
}
constraint.add_module(Box::new(TransportConstraintModule::new(
transport.clone(),
activity.clone(),
limits,
TIME_CONSTRAINT_CODE,
DISTANCE_LIMIT_CONSTRAINT_CODE,
DURATION_LIMIT_CONSTRAINT_CODE,
)));
add_capacity_module(&mut constraint, &props, transport.clone());
if props.has_breaks {
constraint.add_module(Box::new(BreakModule::new(transport.clone(), BREAK_CONSTRAINT_CODE)));
}
if props.has_skills {
constraint.add_module(Box::new(SkillsModule::new(SKILL_CONSTRAINT_CODE)));
}
if props.has_dispatch {
constraint.add_module(Box::new(DispatchModule::new(DISPATCH_CONSTRAINT_CODE)));
}
if !locks.is_empty() {
constraint.add_module(Box::new(StrictLockingModule::new(fleet, locks, LOCKING_CONSTRAINT_CODE)));
}
if props.has_tour_size_limits {
add_tour_size_module(&mut constraint)
}
if props.has_area_limits {
add_area_module(&mut constraint, coord_index);
}
constraint
}
fn add_capacity_module(
constraint: &mut ConstraintPipeline,
props: &ProblemProperties,
transport: Arc<dyn TransportCost + Send + Sync>,
) {
constraint.add_module(if props.has_reloads {
let threshold = 0.9;
if props.has_multi_dimen_capacity {
Box::new(CapacityConstraintModule::<MultiDimLoad>::new_with_multi_trip(
transport,
CAPACITY_CONSTRAINT_CODE,
Arc::new(ReloadMultiTrip::new(Box::new(move |capacity| *capacity * threshold))),
))
} else {
Box::new(CapacityConstraintModule::<SingleDimLoad>::new_with_multi_trip(
transport,
CAPACITY_CONSTRAINT_CODE,
Arc::new(ReloadMultiTrip::new(Box::new(move |capacity| *capacity * threshold))),
))
}
} else if props.has_multi_dimen_capacity {
Box::new(CapacityConstraintModule::<MultiDimLoad>::new(transport, CAPACITY_CONSTRAINT_CODE))
} else {
Box::new(CapacityConstraintModule::<SingleDimLoad>::new(transport, CAPACITY_CONSTRAINT_CODE))
});
}
fn add_area_module(constraint: &mut ConstraintPipeline, coord_index: Arc<CoordIndex>) {
constraint.add_module(Box::new(AreaModule::new(
Arc::new(|actor| actor.vehicle.dimens.get_value::<Vec<Area>>("areas")),
Arc::new(move |location| {
coord_index
.get_by_idx(location)
.map_or_else(|| panic!("cannot find location!"), |location| location.to_lat_lng())
}),
AREA_CONSTRAINT_CODE,
)));
}
fn add_tour_size_module(constraint: &mut ConstraintPipeline) {
constraint.add_module(Box::new(TourSizeModule::new(
Arc::new(|actor| actor.vehicle.dimens.get_value::<usize>("tour_size").cloned()),
TOUR_SIZE_CONSTRAINT_CODE,
)));
}
fn create_extras(
constraint: Arc<ConstraintPipeline>,
props: &ProblemProperties,
job_index: JobIndex,
coord_index: Arc<CoordIndex>,
) -> Extras {
let mut extras = Extras::default();
extras.insert(
"capacity_type".to_string(),
Arc::new((if props.has_multi_dimen_capacity { "multi" } else { "single" }).to_string()),
);
extras.insert("coord_index".to_owned(), coord_index);
extras.insert("job_index".to_owned(), Arc::new(job_index.clone()));
if props.has_dispatch {
extras.insert("route_modifier".to_owned(), Arc::new(get_route_modifier(constraint, job_index)));
}
extras
}
fn parse_time_window(tw: &[String]) -> TimeWindow {
assert_eq!(tw.len(), 2);
TimeWindow::new(parse_time(tw.first().unwrap()), parse_time(tw.last().unwrap()))
}
fn get_problem_properties(api_problem: &ApiProblem, matrices: &[Matrix]) -> ProblemProperties {
let has_unreachable_locations = matrices.iter().any(|m| m.error_codes.is_some());
let has_multi_dimen_capacity = api_problem.fleet.vehicles.iter().any(|t| t.capacity.len() > 1)
|| api_problem.plan.jobs.iter().any(|job| {
job.pickups
.iter()
.chain(job.deliveries.iter())
.flat_map(|tasks| tasks.iter())
.any(|task| task.demand.as_ref().map_or(false, |d| d.len() > 1))
});
let has_breaks = api_problem
.fleet
.vehicles
.iter()
.flat_map(|t| &t.shifts)
.any(|shift| shift.breaks.as_ref().map_or(false, |b| !b.is_empty()));
let has_skills = api_problem.plan.jobs.iter().any(|job| job.skills.is_some());
let max_job_value = api_problem
.plan
.jobs
.iter()
.filter_map(|job| job.value)
.filter(|value| *value > 0.)
.max_by(|a, b| compare_floats(*a, *b));
let has_dispatch = api_problem
.fleet
.vehicles
.iter()
.any(|t| t.shifts.iter().any(|s| s.dispatch.as_ref().map_or(false, |dispatch| !dispatch.is_empty())));
let has_reloads = api_problem
.fleet
.vehicles
.iter()
.any(|t| t.shifts.iter().any(|s| s.reloads.as_ref().map_or(false, |reloads| !reloads.is_empty())));
let has_order = api_problem
.plan
.jobs
.iter()
.flat_map(get_job_tasks)
.filter_map(|job_task| job_task.order)
.any(|order| order > 0);
let has_area_limits = api_problem
.fleet
.vehicles
.iter()
.any(|v| v.limits.as_ref().and_then(|l| l.allowed_areas.as_ref()).map_or(false, |a| !a.is_empty()));
let has_tour_size_limits =
api_problem.fleet.vehicles.iter().any(|v| v.limits.as_ref().map_or(false, |l| l.tour_size.is_some()));
ProblemProperties {
has_multi_dimen_capacity,
has_breaks,
has_skills,
has_unreachable_locations,
has_dispatch,
has_reloads,
has_order,
has_area_limits,
has_tour_size_limits,
max_job_value,
}
}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use super::{models, API_VERSION};
#[non_exhaustive]
#[derive(Debug, thiserror :: Error)]
#[allow(non_camel_case_types)]
pub enum Error {
#[error(transparent)]
CreateKey(#[from] create_key::Error),
#[error(transparent)]
ImportKey(#[from] import_key::Error),
#[error(transparent)]
DeleteKey(#[from] delete_key::Error),
#[error(transparent)]
GetKey(#[from] get_key::Error),
#[error(transparent)]
UpdateKey(#[from] update_key::Error),
#[error(transparent)]
GetKeyVersions(#[from] get_key_versions::Error),
#[error(transparent)]
GetKeys(#[from] get_keys::Error),
#[error(transparent)]
BackupKey(#[from] backup_key::Error),
#[error(transparent)]
RestoreKey(#[from] restore_key::Error),
#[error(transparent)]
Encrypt(#[from] encrypt::Error),
#[error(transparent)]
Decrypt(#[from] decrypt::Error),
#[error(transparent)]
Sign(#[from] sign::Error),
#[error(transparent)]
Verify(#[from] verify::Error),
#[error(transparent)]
WrapKey(#[from] wrap_key::Error),
#[error(transparent)]
UnwrapKey(#[from] unwrap_key::Error),
#[error(transparent)]
GetDeletedKeys(#[from] get_deleted_keys::Error),
#[error(transparent)]
GetDeletedKey(#[from] get_deleted_key::Error),
#[error(transparent)]
PurgeDeletedKey(#[from] purge_deleted_key::Error),
#[error(transparent)]
RecoverDeletedKey(#[from] recover_deleted_key::Error),
#[error(transparent)]
SetSecret(#[from] set_secret::Error),
#[error(transparent)]
DeleteSecret(#[from] delete_secret::Error),
#[error(transparent)]
GetSecret(#[from] get_secret::Error),
#[error(transparent)]
UpdateSecret(#[from] update_secret::Error),
#[error(transparent)]
GetSecrets(#[from] get_secrets::Error),
#[error(transparent)]
GetSecretVersions(#[from] get_secret_versions::Error),
#[error(transparent)]
GetDeletedSecrets(#[from] get_deleted_secrets::Error),
#[error(transparent)]
GetDeletedSecret(#[from] get_deleted_secret::Error),
#[error(transparent)]
PurgeDeletedSecret(#[from] purge_deleted_secret::Error),
#[error(transparent)]
RecoverDeletedSecret(#[from] recover_deleted_secret::Error),
#[error(transparent)]
BackupSecret(#[from] backup_secret::Error),
#[error(transparent)]
RestoreSecret(#[from] restore_secret::Error),
#[error(transparent)]
GetCertificates(#[from] get_certificates::Error),
#[error(transparent)]
DeleteCertificate(#[from] delete_certificate::Error),
#[error(transparent)]
GetCertificateContacts(#[from] get_certificate_contacts::Error),
#[error(transparent)]
SetCertificateContacts(#[from] set_certificate_contacts::Error),
#[error(transparent)]
DeleteCertificateContacts(#[from] delete_certificate_contacts::Error),
#[error(transparent)]
GetCertificateIssuers(#[from] get_certificate_issuers::Error),
#[error(transparent)]
GetCertificateIssuer(#[from] get_certificate_issuer::Error),
#[error(transparent)]
SetCertificateIssuer(#[from] set_certificate_issuer::Error),
#[error(transparent)]
UpdateCertificateIssuer(#[from] update_certificate_issuer::Error),
#[error(transparent)]
DeleteCertificateIssuer(#[from] delete_certificate_issuer::Error),
#[error(transparent)]
CreateCertificate(#[from] create_certificate::Error),
#[error(transparent)]
ImportCertificate(#[from] import_certificate::Error),
#[error(transparent)]
GetCertificateVersions(#[from] get_certificate_versions::Error),
#[error(transparent)]
GetCertificatePolicy(#[from] get_certificate_policy::Error),
#[error(transparent)]
UpdateCertificatePolicy(#[from] update_certificate_policy::Error),
#[error(transparent)]
GetCertificate(#[from] get_certificate::Error),
#[error(transparent)]
UpdateCertificate(#[from] update_certificate::Error),
#[error(transparent)]
GetCertificateOperation(#[from] get_certificate_operation::Error),
#[error(transparent)]
UpdateCertificateOperation(#[from] update_certificate_operation::Error),
#[error(transparent)]
DeleteCertificateOperation(#[from] delete_certificate_operation::Error),
#[error(transparent)]
MergeCertificate(#[from] merge_certificate::Error),
#[error(transparent)]
GetDeletedCertificates(#[from] get_deleted_certificates::Error),
#[error(transparent)]
GetDeletedCertificate(#[from] get_deleted_certificate::Error),
#[error(transparent)]
PurgeDeletedCertificate(#[from] purge_deleted_certificate::Error),
#[error(transparent)]
RecoverDeletedCertificate(#[from] recover_deleted_certificate::Error),
#[error(transparent)]
GetStorageAccounts(#[from] get_storage_accounts::Error),
#[error(transparent)]
GetStorageAccount(#[from] get_storage_account::Error),
#[error(transparent)]
SetStorageAccount(#[from] set_storage_account::Error),
#[error(transparent)]
UpdateStorageAccount(#[from] update_storage_account::Error),
#[error(transparent)]
DeleteStorageAccount(#[from] delete_storage_account::Error),
#[error(transparent)]
RegenerateStorageAccountKey(#[from] regenerate_storage_account_key::Error),
#[error(transparent)]
GetSasDefinitions(#[from] get_sas_definitions::Error),
#[error(transparent)]
GetSasDefinition(#[from] get_sas_definition::Error),
#[error(transparent)]
SetSasDefinition(#[from] set_sas_definition::Error),
#[error(transparent)]
UpdateSasDefinition(#[from] update_sas_definition::Error),
#[error(transparent)]
DeleteSasDefinition(#[from] delete_sas_definition::Error),
}
pub async fn create_key(
operation_config: &crate::OperationConfig,
key_name: &str,
parameters: &models::KeyCreateParameters,
) -> std::result::Result<models::KeyBundle, create_key::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/keys/{}/create", operation_config.base_path(), key_name);
let mut url = url::Url::parse(url_str).map_err(create_key::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_key::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(create_key::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_key::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_key::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::KeyBundle =
serde_json::from_slice(rsp_body).map_err(|source| create_key::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError =
serde_json::from_slice(rsp_body).map_err(|source| create_key::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_key::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_key {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn import_key(
operation_config: &crate::OperationConfig,
key_name: &str,
parameters: &models::KeyImportParameters,
) -> std::result::Result<models::KeyBundle, import_key::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/keys/{}", operation_config.base_path(), key_name);
let mut url = url::Url::parse(url_str).map_err(import_key::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(import_key::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(import_key::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(import_key::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(import_key::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::KeyBundle =
serde_json::from_slice(rsp_body).map_err(|source| import_key::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError =
serde_json::from_slice(rsp_body).map_err(|source| import_key::Error::DeserializeError(source, rsp_body.clone()))?;
Err(import_key::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod import_key {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete_key(
operation_config: &crate::OperationConfig,
key_name: &str,
) -> std::result::Result<models::DeletedKeyBundle, delete_key::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/keys/{}", operation_config.base_path(), key_name);
let mut url = url::Url::parse(url_str).map_err(delete_key::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete_key::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete_key::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(delete_key::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DeletedKeyBundle =
serde_json::from_slice(rsp_body).map_err(|source| delete_key::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError =
serde_json::from_slice(rsp_body).map_err(|source| delete_key::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete_key::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete_key {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_key(
operation_config: &crate::OperationConfig,
key_name: &str,
key_version: &str,
) -> std::result::Result<models::KeyBundle, get_key::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/keys/{}/{}", operation_config.base_path(), key_name, key_version);
let mut url = url::Url::parse(url_str).map_err(get_key::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_key::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_key::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_key::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::KeyBundle =
serde_json::from_slice(rsp_body).map_err(|source| get_key::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError =
serde_json::from_slice(rsp_body).map_err(|source| get_key::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_key::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_key {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update_key(
operation_config: &crate::OperationConfig,
key_name: &str,
key_version: &str,
parameters: &models::KeyUpdateParameters,
) -> std::result::Result<models::KeyBundle, update_key::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/keys/{}/{}", operation_config.base_path(), key_name, key_version);
let mut url = url::Url::parse(url_str).map_err(update_key::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update_key::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(update_key::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update_key::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(update_key::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::KeyBundle =
serde_json::from_slice(rsp_body).map_err(|source| update_key::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError =
serde_json::from_slice(rsp_body).map_err(|source| update_key::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update_key::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update_key {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_key_versions(
operation_config: &crate::OperationConfig,
key_name: &str,
maxresults: Option<i32>,
) -> std::result::Result<models::KeyListResult, get_key_versions::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/keys/{}/versions", operation_config.base_path(), key_name);
let mut url = url::Url::parse(url_str).map_err(get_key_versions::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_key_versions::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(maxresults) = maxresults {
url.query_pairs_mut().append_pair("maxresults", maxresults.to_string().as_str());
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_key_versions::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_key_versions::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::KeyListResult =
serde_json::from_slice(rsp_body).map_err(|source| get_key_versions::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError =
serde_json::from_slice(rsp_body).map_err(|source| get_key_versions::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_key_versions::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_key_versions {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_keys(
operation_config: &crate::OperationConfig,
maxresults: Option<i32>,
) -> std::result::Result<models::KeyListResult, get_keys::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/keys", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(get_keys::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_keys::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(maxresults) = maxresults {
url.query_pairs_mut().append_pair("maxresults", maxresults.to_string().as_str());
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_keys::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_keys::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::KeyListResult =
serde_json::from_slice(rsp_body).map_err(|source| get_keys::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError =
serde_json::from_slice(rsp_body).map_err(|source| get_keys::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_keys::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_keys {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn backup_key(
operation_config: &crate::OperationConfig,
key_name: &str,
) -> std::result::Result<models::BackupKeyResult, backup_key::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/keys/{}/backup", operation_config.base_path(), key_name);
let mut url = url::Url::parse(url_str).map_err(backup_key::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(backup_key::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(backup_key::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(backup_key::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::BackupKeyResult =
serde_json::from_slice(rsp_body).map_err(|source| backup_key::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError =
serde_json::from_slice(rsp_body).map_err(|source| backup_key::Error::DeserializeError(source, rsp_body.clone()))?;
Err(backup_key::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod backup_key {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn restore_key(
operation_config: &crate::OperationConfig,
parameters: &models::KeyRestoreParameters,
) -> std::result::Result<models::KeyBundle, restore_key::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/keys/restore", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(restore_key::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(restore_key::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(restore_key::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(restore_key::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(restore_key::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::KeyBundle =
serde_json::from_slice(rsp_body).map_err(|source| restore_key::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError =
serde_json::from_slice(rsp_body).map_err(|source| restore_key::Error::DeserializeError(source, rsp_body.clone()))?;
Err(restore_key::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod restore_key {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn encrypt(
operation_config: &crate::OperationConfig,
key_name: &str,
key_version: &str,
parameters: &models::KeyOperationsParameters,
) -> std::result::Result<models::KeyOperationResult, encrypt::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/keys/{}/{}/encrypt", operation_config.base_path(), key_name, key_version);
let mut url = url::Url::parse(url_str).map_err(encrypt::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(encrypt::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(encrypt::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(encrypt::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(encrypt::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::KeyOperationResult =
serde_json::from_slice(rsp_body).map_err(|source| encrypt::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError =
serde_json::from_slice(rsp_body).map_err(|source| encrypt::Error::DeserializeError(source, rsp_body.clone()))?;
Err(encrypt::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod encrypt {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn decrypt(
operation_config: &crate::OperationConfig,
key_name: &str,
key_version: &str,
parameters: &models::KeyOperationsParameters,
) -> std::result::Result<models::KeyOperationResult, decrypt::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/keys/{}/{}/decrypt", operation_config.base_path(), key_name, key_version);
let mut url = url::Url::parse(url_str).map_err(decrypt::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(decrypt::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(decrypt::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(decrypt::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(decrypt::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::KeyOperationResult =
serde_json::from_slice(rsp_body).map_err(|source| decrypt::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError =
serde_json::from_slice(rsp_body).map_err(|source| decrypt::Error::DeserializeError(source, rsp_body.clone()))?;
Err(decrypt::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod decrypt {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn sign(
operation_config: &crate::OperationConfig,
key_name: &str,
key_version: &str,
parameters: &models::KeySignParameters,
) -> std::result::Result<models::KeyOperationResult, sign::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/keys/{}/{}/sign", operation_config.base_path(), key_name, key_version);
let mut url = url::Url::parse(url_str).map_err(sign::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(sign::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(sign::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(sign::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(sign::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::KeyOperationResult =
serde_json::from_slice(rsp_body).map_err(|source| sign::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError =
serde_json::from_slice(rsp_body).map_err(|source| sign::Error::DeserializeError(source, rsp_body.clone()))?;
Err(sign::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod sign {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn verify(
operation_config: &crate::OperationConfig,
key_name: &str,
key_version: &str,
parameters: &models::KeyVerifyParameters,
) -> std::result::Result<models::KeyVerifyResult, verify::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/keys/{}/{}/verify", operation_config.base_path(), key_name, key_version);
let mut url = url::Url::parse(url_str).map_err(verify::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(verify::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(verify::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(verify::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(verify::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVerifyResult =
serde_json::from_slice(rsp_body).map_err(|source| verify::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError =
serde_json::from_slice(rsp_body).map_err(|source| verify::Error::DeserializeError(source, rsp_body.clone()))?;
Err(verify::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod verify {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn wrap_key(
operation_config: &crate::OperationConfig,
key_name: &str,
key_version: &str,
parameters: &models::KeyOperationsParameters,
) -> std::result::Result<models::KeyOperationResult, wrap_key::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/keys/{}/{}/wrapkey", operation_config.base_path(), key_name, key_version);
let mut url = url::Url::parse(url_str).map_err(wrap_key::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(wrap_key::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(wrap_key::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(wrap_key::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(wrap_key::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::KeyOperationResult =
serde_json::from_slice(rsp_body).map_err(|source| wrap_key::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError =
serde_json::from_slice(rsp_body).map_err(|source| wrap_key::Error::DeserializeError(source, rsp_body.clone()))?;
Err(wrap_key::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod wrap_key {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn unwrap_key(
operation_config: &crate::OperationConfig,
key_name: &str,
key_version: &str,
parameters: &models::KeyOperationsParameters,
) -> std::result::Result<models::KeyOperationResult, unwrap_key::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/keys/{}/{}/unwrapkey", operation_config.base_path(), key_name, key_version);
let mut url = url::Url::parse(url_str).map_err(unwrap_key::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(unwrap_key::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(unwrap_key::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(unwrap_key::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(unwrap_key::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::KeyOperationResult =
serde_json::from_slice(rsp_body).map_err(|source| unwrap_key::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError =
serde_json::from_slice(rsp_body).map_err(|source| unwrap_key::Error::DeserializeError(source, rsp_body.clone()))?;
Err(unwrap_key::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod unwrap_key {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_deleted_keys(
operation_config: &crate::OperationConfig,
maxresults: Option<i32>,
) -> std::result::Result<models::DeletedKeyListResult, get_deleted_keys::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/deletedkeys", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(get_deleted_keys::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_deleted_keys::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(maxresults) = maxresults {
url.query_pairs_mut().append_pair("maxresults", maxresults.to_string().as_str());
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_deleted_keys::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_deleted_keys::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DeletedKeyListResult =
serde_json::from_slice(rsp_body).map_err(|source| get_deleted_keys::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError =
serde_json::from_slice(rsp_body).map_err(|source| get_deleted_keys::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_deleted_keys::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_deleted_keys {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_deleted_key(
operation_config: &crate::OperationConfig,
key_name: &str,
) -> std::result::Result<models::DeletedKeyBundle, get_deleted_key::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/deletedkeys/{}", operation_config.base_path(), key_name);
let mut url = url::Url::parse(url_str).map_err(get_deleted_key::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_deleted_key::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_deleted_key::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_deleted_key::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DeletedKeyBundle =
serde_json::from_slice(rsp_body).map_err(|source| get_deleted_key::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError =
serde_json::from_slice(rsp_body).map_err(|source| get_deleted_key::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_deleted_key::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_deleted_key {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn purge_deleted_key(
operation_config: &crate::OperationConfig,
key_name: &str,
) -> std::result::Result<(), purge_deleted_key::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/deletedkeys/{}", operation_config.base_path(), key_name);
let mut url = url::Url::parse(url_str).map_err(purge_deleted_key::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(purge_deleted_key::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(purge_deleted_key::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(purge_deleted_key::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::NO_CONTENT => Ok(()),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError =
serde_json::from_slice(rsp_body).map_err(|source| purge_deleted_key::Error::DeserializeError(source, rsp_body.clone()))?;
Err(purge_deleted_key::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod purge_deleted_key {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn recover_deleted_key(
operation_config: &crate::OperationConfig,
key_name: &str,
) -> std::result::Result<models::KeyBundle, recover_deleted_key::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/deletedkeys/{}/recover", operation_config.base_path(), key_name);
let mut url = url::Url::parse(url_str).map_err(recover_deleted_key::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(recover_deleted_key::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(recover_deleted_key::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(recover_deleted_key::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::KeyBundle = serde_json::from_slice(rsp_body)
.map_err(|source| recover_deleted_key::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError = serde_json::from_slice(rsp_body)
.map_err(|source| recover_deleted_key::Error::DeserializeError(source, rsp_body.clone()))?;
Err(recover_deleted_key::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod recover_deleted_key {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn set_secret(
operation_config: &crate::OperationConfig,
secret_name: &str,
parameters: &models::SecretSetParameters,
) -> std::result::Result<models::SecretBundle, set_secret::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/secrets/{}", operation_config.base_path(), secret_name);
let mut url = url::Url::parse(url_str).map_err(set_secret::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(set_secret::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(set_secret::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(set_secret::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(set_secret::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::SecretBundle =
serde_json::from_slice(rsp_body).map_err(|source| set_secret::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError =
serde_json::from_slice(rsp_body).map_err(|source| set_secret::Error::DeserializeError(source, rsp_body.clone()))?;
Err(set_secret::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod set_secret {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete_secret(
operation_config: &crate::OperationConfig,
secret_name: &str,
) -> std::result::Result<models::DeletedSecretBundle, delete_secret::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/secrets/{}", operation_config.base_path(), secret_name);
let mut url = url::Url::parse(url_str).map_err(delete_secret::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete_secret::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete_secret::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(delete_secret::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DeletedSecretBundle =
serde_json::from_slice(rsp_body).map_err(|source| delete_secret::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError =
serde_json::from_slice(rsp_body).map_err(|source| delete_secret::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete_secret::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete_secret {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_secret(
operation_config: &crate::OperationConfig,
secret_name: &str,
secret_version: &str,
) -> std::result::Result<models::SecretBundle, get_secret::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/secrets/{}/{}", operation_config.base_path(), secret_name, secret_version);
let mut url = url::Url::parse(url_str).map_err(get_secret::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_secret::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_secret::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_secret::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::SecretBundle =
serde_json::from_slice(rsp_body).map_err(|source| get_secret::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError =
serde_json::from_slice(rsp_body).map_err(|source| get_secret::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_secret::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_secret {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update_secret(
operation_config: &crate::OperationConfig,
secret_name: &str,
secret_version: &str,
parameters: &models::SecretUpdateParameters,
) -> std::result::Result<models::SecretBundle, update_secret::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/secrets/{}/{}", operation_config.base_path(), secret_name, secret_version);
let mut url = url::Url::parse(url_str).map_err(update_secret::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update_secret::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(update_secret::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update_secret::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(update_secret::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::SecretBundle =
serde_json::from_slice(rsp_body).map_err(|source| update_secret::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError =
serde_json::from_slice(rsp_body).map_err(|source| update_secret::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update_secret::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update_secret {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_secrets(
operation_config: &crate::OperationConfig,
maxresults: Option<i32>,
) -> std::result::Result<models::SecretListResult, get_secrets::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/secrets", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(get_secrets::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_secrets::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(maxresults) = maxresults {
url.query_pairs_mut().append_pair("maxresults", maxresults.to_string().as_str());
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_secrets::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_secrets::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::SecretListResult =
serde_json::from_slice(rsp_body).map_err(|source| get_secrets::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError =
serde_json::from_slice(rsp_body).map_err(|source| get_secrets::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_secrets::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_secrets {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_secret_versions(
operation_config: &crate::OperationConfig,
secret_name: &str,
maxresults: Option<i32>,
) -> std::result::Result<models::SecretListResult, get_secret_versions::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/secrets/{}/versions", operation_config.base_path(), secret_name);
let mut url = url::Url::parse(url_str).map_err(get_secret_versions::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_secret_versions::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(maxresults) = maxresults {
url.query_pairs_mut().append_pair("maxresults", maxresults.to_string().as_str());
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_secret_versions::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_secret_versions::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::SecretListResult = serde_json::from_slice(rsp_body)
.map_err(|source| get_secret_versions::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError = serde_json::from_slice(rsp_body)
.map_err(|source| get_secret_versions::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_secret_versions::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_secret_versions {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_deleted_secrets(
operation_config: &crate::OperationConfig,
maxresults: Option<i32>,
) -> std::result::Result<models::DeletedSecretListResult, get_deleted_secrets::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/deletedsecrets", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(get_deleted_secrets::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_deleted_secrets::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(maxresults) = maxresults {
url.query_pairs_mut().append_pair("maxresults", maxresults.to_string().as_str());
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_deleted_secrets::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_deleted_secrets::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DeletedSecretListResult = serde_json::from_slice(rsp_body)
.map_err(|source| get_deleted_secrets::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError = serde_json::from_slice(rsp_body)
.map_err(|source| get_deleted_secrets::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_deleted_secrets::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_deleted_secrets {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_deleted_secret(
operation_config: &crate::OperationConfig,
secret_name: &str,
) -> std::result::Result<models::DeletedSecretBundle, get_deleted_secret::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/deletedsecrets/{}", operation_config.base_path(), secret_name);
let mut url = url::Url::parse(url_str).map_err(get_deleted_secret::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_deleted_secret::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_deleted_secret::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_deleted_secret::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DeletedSecretBundle =
serde_json::from_slice(rsp_body).map_err(|source| get_deleted_secret::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError =
serde_json::from_slice(rsp_body).map_err(|source| get_deleted_secret::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_deleted_secret::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_deleted_secret {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn purge_deleted_secret(
operation_config: &crate::OperationConfig,
secret_name: &str,
) -> std::result::Result<(), purge_deleted_secret::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/deletedsecrets/{}", operation_config.base_path(), secret_name);
let mut url = url::Url::parse(url_str).map_err(purge_deleted_secret::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(purge_deleted_secret::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(purge_deleted_secret::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(purge_deleted_secret::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::NO_CONTENT => Ok(()),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError = serde_json::from_slice(rsp_body)
.map_err(|source| purge_deleted_secret::Error::DeserializeError(source, rsp_body.clone()))?;
Err(purge_deleted_secret::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod purge_deleted_secret {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn recover_deleted_secret(
operation_config: &crate::OperationConfig,
secret_name: &str,
) -> std::result::Result<models::SecretBundle, recover_deleted_secret::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/deletedsecrets/{}/recover", operation_config.base_path(), secret_name);
let mut url = url::Url::parse(url_str).map_err(recover_deleted_secret::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(recover_deleted_secret::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(recover_deleted_secret::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(recover_deleted_secret::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::SecretBundle = serde_json::from_slice(rsp_body)
.map_err(|source| recover_deleted_secret::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError = serde_json::from_slice(rsp_body)
.map_err(|source| recover_deleted_secret::Error::DeserializeError(source, rsp_body.clone()))?;
Err(recover_deleted_secret::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod recover_deleted_secret {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn backup_secret(
operation_config: &crate::OperationConfig,
secret_name: &str,
) -> std::result::Result<models::BackupSecretResult, backup_secret::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/secrets/{}/backup", operation_config.base_path(), secret_name);
let mut url = url::Url::parse(url_str).map_err(backup_secret::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(backup_secret::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(backup_secret::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(backup_secret::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::BackupSecretResult =
serde_json::from_slice(rsp_body).map_err(|source| backup_secret::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError =
serde_json::from_slice(rsp_body).map_err(|source| backup_secret::Error::DeserializeError(source, rsp_body.clone()))?;
Err(backup_secret::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod backup_secret {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn restore_secret(
operation_config: &crate::OperationConfig,
parameters: &models::SecretRestoreParameters,
) -> std::result::Result<models::SecretBundle, restore_secret::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/secrets/restore", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(restore_secret::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(restore_secret::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(restore_secret::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(restore_secret::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(restore_secret::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::SecretBundle =
serde_json::from_slice(rsp_body).map_err(|source| restore_secret::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError =
serde_json::from_slice(rsp_body).map_err(|source| restore_secret::Error::DeserializeError(source, rsp_body.clone()))?;
Err(restore_secret::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod restore_secret {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_certificates(
operation_config: &crate::OperationConfig,
maxresults: Option<i32>,
) -> std::result::Result<models::CertificateListResult, get_certificates::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/certificates", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(get_certificates::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_certificates::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(maxresults) = maxresults {
url.query_pairs_mut().append_pair("maxresults", maxresults.to_string().as_str());
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_certificates::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_certificates::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::CertificateListResult =
serde_json::from_slice(rsp_body).map_err(|source| get_certificates::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError =
serde_json::from_slice(rsp_body).map_err(|source| get_certificates::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_certificates::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_certificates {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete_certificate(
operation_config: &crate::OperationConfig,
certificate_name: &str,
) -> std::result::Result<models::DeletedCertificateBundle, delete_certificate::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/certificates/{}", operation_config.base_path(), certificate_name);
let mut url = url::Url::parse(url_str).map_err(delete_certificate::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete_certificate::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete_certificate::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(delete_certificate::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DeletedCertificateBundle =
serde_json::from_slice(rsp_body).map_err(|source| delete_certificate::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError =
serde_json::from_slice(rsp_body).map_err(|source| delete_certificate::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete_certificate::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete_certificate {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_certificate_contacts(
operation_config: &crate::OperationConfig,
) -> std::result::Result<models::Contacts, get_certificate_contacts::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/certificates/contacts", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(get_certificate_contacts::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_certificate_contacts::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(get_certificate_contacts::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_certificate_contacts::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Contacts = serde_json::from_slice(rsp_body)
.map_err(|source| get_certificate_contacts::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError = serde_json::from_slice(rsp_body)
.map_err(|source| get_certificate_contacts::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_certificate_contacts::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_certificate_contacts {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn set_certificate_contacts(
operation_config: &crate::OperationConfig,
contacts: &models::Contacts,
) -> std::result::Result<models::Contacts, set_certificate_contacts::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/certificates/contacts", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(set_certificate_contacts::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(set_certificate_contacts::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(contacts).map_err(set_certificate_contacts::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(set_certificate_contacts::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(set_certificate_contacts::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Contacts = serde_json::from_slice(rsp_body)
.map_err(|source| set_certificate_contacts::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError = serde_json::from_slice(rsp_body)
.map_err(|source| set_certificate_contacts::Error::DeserializeError(source, rsp_body.clone()))?;
Err(set_certificate_contacts::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod set_certificate_contacts {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete_certificate_contacts(
operation_config: &crate::OperationConfig,
) -> std::result::Result<models::Contacts, delete_certificate_contacts::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/certificates/contacts", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(delete_certificate_contacts::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete_certificate_contacts::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(delete_certificate_contacts::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(delete_certificate_contacts::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Contacts = serde_json::from_slice(rsp_body)
.map_err(|source| delete_certificate_contacts::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError = serde_json::from_slice(rsp_body)
.map_err(|source| delete_certificate_contacts::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete_certificate_contacts::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete_certificate_contacts {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_certificate_issuers(
operation_config: &crate::OperationConfig,
maxresults: Option<i32>,
) -> std::result::Result<models::CertificateIssuerListResult, get_certificate_issuers::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/certificates/issuers", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(get_certificate_issuers::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_certificate_issuers::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(maxresults) = maxresults {
url.query_pairs_mut().append_pair("maxresults", maxresults.to_string().as_str());
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(get_certificate_issuers::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_certificate_issuers::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::CertificateIssuerListResult = serde_json::from_slice(rsp_body)
.map_err(|source| get_certificate_issuers::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError = serde_json::from_slice(rsp_body)
.map_err(|source| get_certificate_issuers::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_certificate_issuers::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_certificate_issuers {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_certificate_issuer(
operation_config: &crate::OperationConfig,
issuer_name: &str,
) -> std::result::Result<models::IssuerBundle, get_certificate_issuer::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/certificates/issuers/{}", operation_config.base_path(), issuer_name);
let mut url = url::Url::parse(url_str).map_err(get_certificate_issuer::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_certificate_issuer::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(get_certificate_issuer::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_certificate_issuer::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::IssuerBundle = serde_json::from_slice(rsp_body)
.map_err(|source| get_certificate_issuer::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError = serde_json::from_slice(rsp_body)
.map_err(|source| get_certificate_issuer::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_certificate_issuer::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_certificate_issuer {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn set_certificate_issuer(
operation_config: &crate::OperationConfig,
issuer_name: &str,
parameter: &models::CertificateIssuerSetParameters,
) -> std::result::Result<models::IssuerBundle, set_certificate_issuer::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/certificates/issuers/{}", operation_config.base_path(), issuer_name);
let mut url = url::Url::parse(url_str).map_err(set_certificate_issuer::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(set_certificate_issuer::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameter).map_err(set_certificate_issuer::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(set_certificate_issuer::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(set_certificate_issuer::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::IssuerBundle = serde_json::from_slice(rsp_body)
.map_err(|source| set_certificate_issuer::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError = serde_json::from_slice(rsp_body)
.map_err(|source| set_certificate_issuer::Error::DeserializeError(source, rsp_body.clone()))?;
Err(set_certificate_issuer::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod set_certificate_issuer {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update_certificate_issuer(
operation_config: &crate::OperationConfig,
issuer_name: &str,
parameter: &models::CertificateIssuerUpdateParameters,
) -> std::result::Result<models::IssuerBundle, update_certificate_issuer::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/certificates/issuers/{}", operation_config.base_path(), issuer_name);
let mut url = url::Url::parse(url_str).map_err(update_certificate_issuer::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update_certificate_issuer::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameter).map_err(update_certificate_issuer::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(update_certificate_issuer::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(update_certificate_issuer::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::IssuerBundle = serde_json::from_slice(rsp_body)
.map_err(|source| update_certificate_issuer::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError = serde_json::from_slice(rsp_body)
.map_err(|source| update_certificate_issuer::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update_certificate_issuer::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update_certificate_issuer {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete_certificate_issuer(
operation_config: &crate::OperationConfig,
issuer_name: &str,
) -> std::result::Result<models::IssuerBundle, delete_certificate_issuer::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/certificates/issuers/{}", operation_config.base_path(), issuer_name);
let mut url = url::Url::parse(url_str).map_err(delete_certificate_issuer::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete_certificate_issuer::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(delete_certificate_issuer::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(delete_certificate_issuer::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::IssuerBundle = serde_json::from_slice(rsp_body)
.map_err(|source| delete_certificate_issuer::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError = serde_json::from_slice(rsp_body)
.map_err(|source| delete_certificate_issuer::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete_certificate_issuer::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete_certificate_issuer {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_certificate(
operation_config: &crate::OperationConfig,
certificate_name: &str,
parameters: &models::CertificateCreateParameters,
) -> std::result::Result<models::CertificateOperation, create_certificate::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/certificates/{}/create", operation_config.base_path(), certificate_name);
let mut url = url::Url::parse(url_str).map_err(create_certificate::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_certificate::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(create_certificate::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_certificate::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_certificate::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::ACCEPTED => {
let rsp_body = rsp.body();
let rsp_value: models::CertificateOperation =
serde_json::from_slice(rsp_body).map_err(|source| create_certificate::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError =
serde_json::from_slice(rsp_body).map_err(|source| create_certificate::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_certificate::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_certificate {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn import_certificate(
operation_config: &crate::OperationConfig,
certificate_name: &str,
parameters: &models::CertificateImportParameters,
) -> std::result::Result<models::CertificateBundle, import_certificate::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/certificates/{}/import", operation_config.base_path(), certificate_name);
let mut url = url::Url::parse(url_str).map_err(import_certificate::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(import_certificate::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(import_certificate::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(import_certificate::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(import_certificate::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::CertificateBundle =
serde_json::from_slice(rsp_body).map_err(|source| import_certificate::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError =
serde_json::from_slice(rsp_body).map_err(|source| import_certificate::Error::DeserializeError(source, rsp_body.clone()))?;
Err(import_certificate::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod import_certificate {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_certificate_versions(
operation_config: &crate::OperationConfig,
certificate_name: &str,
maxresults: Option<i32>,
) -> std::result::Result<models::CertificateListResult, get_certificate_versions::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/certificates/{}/versions", operation_config.base_path(), certificate_name);
let mut url = url::Url::parse(url_str).map_err(get_certificate_versions::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_certificate_versions::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(maxresults) = maxresults {
url.query_pairs_mut().append_pair("maxresults", maxresults.to_string().as_str());
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(get_certificate_versions::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_certificate_versions::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::CertificateListResult = serde_json::from_slice(rsp_body)
.map_err(|source| get_certificate_versions::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError = serde_json::from_slice(rsp_body)
.map_err(|source| get_certificate_versions::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_certificate_versions::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_certificate_versions {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_certificate_policy(
operation_config: &crate::OperationConfig,
certificate_name: &str,
) -> std::result::Result<models::CertificatePolicy, get_certificate_policy::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/certificates/{}/policy", operation_config.base_path(), certificate_name);
let mut url = url::Url::parse(url_str).map_err(get_certificate_policy::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_certificate_policy::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(get_certificate_policy::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_certificate_policy::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::CertificatePolicy = serde_json::from_slice(rsp_body)
.map_err(|source| get_certificate_policy::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError = serde_json::from_slice(rsp_body)
.map_err(|source| get_certificate_policy::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_certificate_policy::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_certificate_policy {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update_certificate_policy(
operation_config: &crate::OperationConfig,
certificate_name: &str,
certificate_policy: &models::CertificatePolicy,
) -> std::result::Result<models::CertificatePolicy, update_certificate_policy::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/certificates/{}/policy", operation_config.base_path(), certificate_name);
let mut url = url::Url::parse(url_str).map_err(update_certificate_policy::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update_certificate_policy::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(certificate_policy).map_err(update_certificate_policy::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(update_certificate_policy::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(update_certificate_policy::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::CertificatePolicy = serde_json::from_slice(rsp_body)
.map_err(|source| update_certificate_policy::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError = serde_json::from_slice(rsp_body)
.map_err(|source| update_certificate_policy::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update_certificate_policy::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update_certificate_policy {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_certificate(
operation_config: &crate::OperationConfig,
certificate_name: &str,
certificate_version: &str,
) -> std::result::Result<models::CertificateBundle, get_certificate::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/certificates/{}/{}",
operation_config.base_path(),
certificate_name,
certificate_version
);
let mut url = url::Url::parse(url_str).map_err(get_certificate::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_certificate::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_certificate::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_certificate::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::CertificateBundle =
serde_json::from_slice(rsp_body).map_err(|source| get_certificate::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError =
serde_json::from_slice(rsp_body).map_err(|source| get_certificate::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_certificate::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_certificate {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update_certificate(
operation_config: &crate::OperationConfig,
certificate_name: &str,
certificate_version: &str,
parameters: &models::CertificateUpdateParameters,
) -> std::result::Result<models::CertificateBundle, update_certificate::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/certificates/{}/{}",
operation_config.base_path(),
certificate_name,
certificate_version
);
let mut url = url::Url::parse(url_str).map_err(update_certificate::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update_certificate::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(update_certificate::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update_certificate::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(update_certificate::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::CertificateBundle =
serde_json::from_slice(rsp_body).map_err(|source| update_certificate::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError =
serde_json::from_slice(rsp_body).map_err(|source| update_certificate::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update_certificate::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update_certificate {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_certificate_operation(
operation_config: &crate::OperationConfig,
certificate_name: &str,
) -> std::result::Result<models::CertificateOperation, get_certificate_operation::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/certificates/{}/pending", operation_config.base_path(), certificate_name);
let mut url = url::Url::parse(url_str).map_err(get_certificate_operation::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_certificate_operation::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(get_certificate_operation::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_certificate_operation::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::CertificateOperation = serde_json::from_slice(rsp_body)
.map_err(|source| get_certificate_operation::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError = serde_json::from_slice(rsp_body)
.map_err(|source| get_certificate_operation::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_certificate_operation::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_certificate_operation {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update_certificate_operation(
operation_config: &crate::OperationConfig,
certificate_name: &str,
certificate_operation: &models::CertificateOperationUpdateParameter,
) -> std::result::Result<models::CertificateOperation, update_certificate_operation::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/certificates/{}/pending", operation_config.base_path(), certificate_name);
let mut url = url::Url::parse(url_str).map_err(update_certificate_operation::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update_certificate_operation::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(certificate_operation).map_err(update_certificate_operation::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(update_certificate_operation::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(update_certificate_operation::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::CertificateOperation = serde_json::from_slice(rsp_body)
.map_err(|source| update_certificate_operation::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError = serde_json::from_slice(rsp_body)
.map_err(|source| update_certificate_operation::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update_certificate_operation::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update_certificate_operation {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete_certificate_operation(
operation_config: &crate::OperationConfig,
certificate_name: &str,
) -> std::result::Result<models::CertificateOperation, delete_certificate_operation::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/certificates/{}/pending", operation_config.base_path(), certificate_name);
let mut url = url::Url::parse(url_str).map_err(delete_certificate_operation::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete_certificate_operation::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(delete_certificate_operation::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(delete_certificate_operation::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::CertificateOperation = serde_json::from_slice(rsp_body)
.map_err(|source| delete_certificate_operation::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError = serde_json::from_slice(rsp_body)
.map_err(|source| delete_certificate_operation::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete_certificate_operation::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete_certificate_operation {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn merge_certificate(
operation_config: &crate::OperationConfig,
certificate_name: &str,
parameters: &models::CertificateMergeParameters,
) -> std::result::Result<models::CertificateBundle, merge_certificate::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/certificates/{}/pending/merge", operation_config.base_path(), certificate_name);
let mut url = url::Url::parse(url_str).map_err(merge_certificate::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(merge_certificate::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(merge_certificate::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(merge_certificate::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(merge_certificate::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::CertificateBundle =
serde_json::from_slice(rsp_body).map_err(|source| merge_certificate::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError =
serde_json::from_slice(rsp_body).map_err(|source| merge_certificate::Error::DeserializeError(source, rsp_body.clone()))?;
Err(merge_certificate::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod merge_certificate {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_deleted_certificates(
operation_config: &crate::OperationConfig,
maxresults: Option<i32>,
) -> std::result::Result<models::DeletedCertificateListResult, get_deleted_certificates::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/deletedcertificates", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(get_deleted_certificates::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_deleted_certificates::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(maxresults) = maxresults {
url.query_pairs_mut().append_pair("maxresults", maxresults.to_string().as_str());
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(get_deleted_certificates::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_deleted_certificates::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DeletedCertificateListResult = serde_json::from_slice(rsp_body)
.map_err(|source| get_deleted_certificates::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError = serde_json::from_slice(rsp_body)
.map_err(|source| get_deleted_certificates::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_deleted_certificates::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_deleted_certificates {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_deleted_certificate(
operation_config: &crate::OperationConfig,
certificate_name: &str,
) -> std::result::Result<models::DeletedCertificateBundle, get_deleted_certificate::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/deletedcertificates/{}", operation_config.base_path(), certificate_name);
let mut url = url::Url::parse(url_str).map_err(get_deleted_certificate::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_deleted_certificate::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(get_deleted_certificate::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_deleted_certificate::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DeletedCertificateBundle = serde_json::from_slice(rsp_body)
.map_err(|source| get_deleted_certificate::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError = serde_json::from_slice(rsp_body)
.map_err(|source| get_deleted_certificate::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_deleted_certificate::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_deleted_certificate {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn purge_deleted_certificate(
operation_config: &crate::OperationConfig,
certificate_name: &str,
) -> std::result::Result<(), purge_deleted_certificate::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/deletedcertificates/{}", operation_config.base_path(), certificate_name);
let mut url = url::Url::parse(url_str).map_err(purge_deleted_certificate::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(purge_deleted_certificate::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(purge_deleted_certificate::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(purge_deleted_certificate::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::NO_CONTENT => Ok(()),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError = serde_json::from_slice(rsp_body)
.map_err(|source| purge_deleted_certificate::Error::DeserializeError(source, rsp_body.clone()))?;
Err(purge_deleted_certificate::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod purge_deleted_certificate {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn recover_deleted_certificate(
operation_config: &crate::OperationConfig,
certificate_name: &str,
) -> std::result::Result<models::CertificateBundle, recover_deleted_certificate::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/deletedcertificates/{}/recover", operation_config.base_path(), certificate_name);
let mut url = url::Url::parse(url_str).map_err(recover_deleted_certificate::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(recover_deleted_certificate::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(recover_deleted_certificate::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(recover_deleted_certificate::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::CertificateBundle = serde_json::from_slice(rsp_body)
.map_err(|source| recover_deleted_certificate::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError = serde_json::from_slice(rsp_body)
.map_err(|source| recover_deleted_certificate::Error::DeserializeError(source, rsp_body.clone()))?;
Err(recover_deleted_certificate::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod recover_deleted_certificate {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_storage_accounts(
operation_config: &crate::OperationConfig,
maxresults: Option<i32>,
) -> std::result::Result<models::StorageListResult, get_storage_accounts::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/storage", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(get_storage_accounts::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_storage_accounts::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(maxresults) = maxresults {
url.query_pairs_mut().append_pair("maxresults", maxresults.to_string().as_str());
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_storage_accounts::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_storage_accounts::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::StorageListResult = serde_json::from_slice(rsp_body)
.map_err(|source| get_storage_accounts::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError = serde_json::from_slice(rsp_body)
.map_err(|source| get_storage_accounts::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_storage_accounts::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_storage_accounts {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_storage_account(
operation_config: &crate::OperationConfig,
storage_account_name: &str,
) -> std::result::Result<models::StorageBundle, get_storage_account::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/storage/{}", operation_config.base_path(), storage_account_name);
let mut url = url::Url::parse(url_str).map_err(get_storage_account::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_storage_account::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_storage_account::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_storage_account::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::StorageBundle = serde_json::from_slice(rsp_body)
.map_err(|source| get_storage_account::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError = serde_json::from_slice(rsp_body)
.map_err(|source| get_storage_account::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_storage_account::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_storage_account {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn set_storage_account(
operation_config: &crate::OperationConfig,
storage_account_name: &str,
parameters: &models::StorageAccountCreateParameters,
) -> std::result::Result<models::StorageBundle, set_storage_account::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/storage/{}", operation_config.base_path(), storage_account_name);
let mut url = url::Url::parse(url_str).map_err(set_storage_account::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(set_storage_account::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(set_storage_account::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(set_storage_account::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(set_storage_account::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::StorageBundle = serde_json::from_slice(rsp_body)
.map_err(|source| set_storage_account::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError = serde_json::from_slice(rsp_body)
.map_err(|source| set_storage_account::Error::DeserializeError(source, rsp_body.clone()))?;
Err(set_storage_account::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod set_storage_account {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update_storage_account(
operation_config: &crate::OperationConfig,
storage_account_name: &str,
parameters: &models::StorageAccountUpdateParameters,
) -> std::result::Result<models::StorageBundle, update_storage_account::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/storage/{}", operation_config.base_path(), storage_account_name);
let mut url = url::Url::parse(url_str).map_err(update_storage_account::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update_storage_account::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(update_storage_account::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(update_storage_account::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(update_storage_account::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::StorageBundle = serde_json::from_slice(rsp_body)
.map_err(|source| update_storage_account::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError = serde_json::from_slice(rsp_body)
.map_err(|source| update_storage_account::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update_storage_account::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update_storage_account {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete_storage_account(
operation_config: &crate::OperationConfig,
storage_account_name: &str,
) -> std::result::Result<models::StorageBundle, delete_storage_account::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/storage/{}", operation_config.base_path(), storage_account_name);
let mut url = url::Url::parse(url_str).map_err(delete_storage_account::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete_storage_account::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(delete_storage_account::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(delete_storage_account::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::StorageBundle = serde_json::from_slice(rsp_body)
.map_err(|source| delete_storage_account::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError = serde_json::from_slice(rsp_body)
.map_err(|source| delete_storage_account::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete_storage_account::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete_storage_account {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn regenerate_storage_account_key(
operation_config: &crate::OperationConfig,
storage_account_name: &str,
parameters: &models::StorageAccountRegenerteKeyParameters,
) -> std::result::Result<models::StorageBundle, regenerate_storage_account_key::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/storage/{}/regeneratekey", operation_config.base_path(), storage_account_name);
let mut url = url::Url::parse(url_str).map_err(regenerate_storage_account_key::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(regenerate_storage_account_key::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(regenerate_storage_account_key::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(regenerate_storage_account_key::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(regenerate_storage_account_key::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::StorageBundle = serde_json::from_slice(rsp_body)
.map_err(|source| regenerate_storage_account_key::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError = serde_json::from_slice(rsp_body)
.map_err(|source| regenerate_storage_account_key::Error::DeserializeError(source, rsp_body.clone()))?;
Err(regenerate_storage_account_key::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod regenerate_storage_account_key {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_sas_definitions(
operation_config: &crate::OperationConfig,
storage_account_name: &str,
maxresults: Option<i32>,
) -> std::result::Result<models::SasDefinitionListResult, get_sas_definitions::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/storage/{}/sas", operation_config.base_path(), storage_account_name);
let mut url = url::Url::parse(url_str).map_err(get_sas_definitions::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_sas_definitions::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(maxresults) = maxresults {
url.query_pairs_mut().append_pair("maxresults", maxresults.to_string().as_str());
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_sas_definitions::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_sas_definitions::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::SasDefinitionListResult = serde_json::from_slice(rsp_body)
.map_err(|source| get_sas_definitions::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError = serde_json::from_slice(rsp_body)
.map_err(|source| get_sas_definitions::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_sas_definitions::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_sas_definitions {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_sas_definition(
operation_config: &crate::OperationConfig,
storage_account_name: &str,
sas_definition_name: &str,
) -> std::result::Result<models::SasDefinitionBundle, get_sas_definition::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/storage/{}/sas/{}",
operation_config.base_path(),
storage_account_name,
sas_definition_name
);
let mut url = url::Url::parse(url_str).map_err(get_sas_definition::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_sas_definition::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_sas_definition::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_sas_definition::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::SasDefinitionBundle =
serde_json::from_slice(rsp_body).map_err(|source| get_sas_definition::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError =
serde_json::from_slice(rsp_body).map_err(|source| get_sas_definition::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_sas_definition::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_sas_definition {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn set_sas_definition(
operation_config: &crate::OperationConfig,
storage_account_name: &str,
sas_definition_name: &str,
parameters: &models::SasDefinitionCreateParameters,
) -> std::result::Result<models::SasDefinitionBundle, set_sas_definition::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/storage/{}/sas/{}",
operation_config.base_path(),
storage_account_name,
sas_definition_name
);
let mut url = url::Url::parse(url_str).map_err(set_sas_definition::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(set_sas_definition::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(set_sas_definition::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(set_sas_definition::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(set_sas_definition::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::SasDefinitionBundle =
serde_json::from_slice(rsp_body).map_err(|source| set_sas_definition::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError =
serde_json::from_slice(rsp_body).map_err(|source| set_sas_definition::Error::DeserializeError(source, rsp_body.clone()))?;
Err(set_sas_definition::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod set_sas_definition {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update_sas_definition(
operation_config: &crate::OperationConfig,
storage_account_name: &str,
sas_definition_name: &str,
parameters: &models::SasDefinitionUpdateParameters,
) -> std::result::Result<models::SasDefinitionBundle, update_sas_definition::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/storage/{}/sas/{}",
operation_config.base_path(),
storage_account_name,
sas_definition_name
);
let mut url = url::Url::parse(url_str).map_err(update_sas_definition::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update_sas_definition::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(update_sas_definition::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(update_sas_definition::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(update_sas_definition::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::SasDefinitionBundle = serde_json::from_slice(rsp_body)
.map_err(|source| update_sas_definition::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError = serde_json::from_slice(rsp_body)
.map_err(|source| update_sas_definition::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update_sas_definition::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update_sas_definition {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete_sas_definition(
operation_config: &crate::OperationConfig,
storage_account_name: &str,
sas_definition_name: &str,
) -> std::result::Result<models::SasDefinitionBundle, delete_sas_definition::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/storage/{}/sas/{}",
operation_config.base_path(),
storage_account_name,
sas_definition_name
);
let mut url = url::Url::parse(url_str).map_err(delete_sas_definition::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete_sas_definition::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(delete_sas_definition::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(delete_sas_definition::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::SasDefinitionBundle = serde_json::from_slice(rsp_body)
.map_err(|source| delete_sas_definition::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::KeyVaultError = serde_json::from_slice(rsp_body)
.map_err(|source| delete_sas_definition::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete_sas_definition::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete_sas_definition {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::KeyVaultError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
|
use poise::serenity_prelude::{Mentionable, User};
use crate::{
types::{Error, PoiseContext},
utils::{
apis::neko_api,
discord::{reply_embed, reply_plain},
},
};
async fn _neko_command(ctx: PoiseContext<'_>, user: User, key: &str) -> Result<(), Error> {
let title_builder = match key {
"baka" => "{} calls {} baka",
"cuddle" => "{} cuddles {}",
"hug" => "{} hugs {}",
"kiss" => "{} kisses {}",
"pat" => "{} pats {}",
"poke" => "{} pokes {}",
"slap" => "{} slaps {}",
"smug" => "{} smugs at {}",
"tickle" => "{} tickles {}",
_ => {
reply_plain(ctx, "Wrong key").await?;
return Ok(());
}
};
let user_1 = ctx.author().mention();
let user_2 = user.mention();
let resp = neko_api(key, true).await?;
if let Some(url) = resp.get("url") {
reply_embed(ctx, |e| {
e.description(
title_builder
.replacen("{}", &user_1.to_string(), 1)
.replacen("{}", &user_2.to_string(), 1),
);
e.image(url)
})
.await?;
return Ok(());
} else {
reply_plain(ctx, "Can't find a gif").await?;
}
Ok(())
}
/// Call somebody baka.
///
/// Usage: `baka @user`
#[poise::command(slash_command, defer_response)]
pub async fn baka(ctx: PoiseContext<'_>, #[description = "Who?"] user: User) -> Result<(), Error> {
return _neko_command(ctx, user, "baka").await;
}
/// Cuddle somebody.
///
/// Usage: `cuddle @user`
#[poise::command(slash_command, defer_response)]
pub async fn cuddle(ctx: PoiseContext<'_>, #[description = "Who?"] user: User) -> Result<(), Error> {
return _neko_command(ctx, user, "cuddle").await;
}
/// Hug somebody.
///
/// Usage: `hug @user`
#[poise::command(slash_command, defer_response)]
pub async fn hug(ctx: PoiseContext<'_>, #[description = "Who?"] user: User) -> Result<(), Error> {
return _neko_command(ctx, user, "hug").await;
}
/// Kiss somebody.
///
/// Usage: `kiss @user`
#[poise::command(slash_command, defer_response)]
pub async fn kiss(ctx: PoiseContext<'_>, #[description = "Who?"] user: User) -> Result<(), Error> {
return _neko_command(ctx, user, "kiss").await;
}
/// Pat somebody.
///
/// Usage: `pat @user`
#[poise::command(slash_command, defer_response)]
pub async fn pat(ctx: PoiseContext<'_>, #[description = "Who?"] user: User) -> Result<(), Error> {
return _neko_command(ctx, user, "pat").await;
}
/// Poke somebody.
///
/// Usage: `poke @user`
#[poise::command(slash_command, defer_response)]
pub async fn poke(ctx: PoiseContext<'_>, #[description = "Who?"] user: User) -> Result<(), Error> {
return _neko_command(ctx, user, "poke").await;
}
/// Slap somebody.
///
/// Usage: `slap @user`
#[poise::command(slash_command, defer_response)]
pub async fn slap(ctx: PoiseContext<'_>, #[description = "Who?"] user: User) -> Result<(), Error> {
return _neko_command(ctx, user, "slap").await;
}
/// Smug at somebody.
///
/// Usage: `smug @user`
#[poise::command(slash_command, defer_response)]
pub async fn smug(ctx: PoiseContext<'_>, #[description = "Who?"] user: User) -> Result<(), Error> {
return _neko_command(ctx, user, "smug").await;
}
/// Tickle somebody.
///
/// Usage: `tickle @user`
#[poise::command(slash_command, defer_response)]
pub async fn tickle(ctx: PoiseContext<'_>, #[description = "Who?"] user: User) -> Result<(), Error> {
return _neko_command(ctx, user, "tickle").await;
}
|
/*
* Datadog API V1 Collection
*
* Collection of all Datadog Public endpoints.
*
* The version of the OpenAPI document: 1.0
* Contact: support@datadoghq.com
* Generated by: https://openapi-generator.tech
*/
/// UsageCustomReportsAttributes : The response containing attributes for custom reports.
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct UsageCustomReportsAttributes {
/// The date the specified custom report was computed.
#[serde(rename = "computed_on", skip_serializing_if = "Option::is_none")]
pub computed_on: Option<String>,
/// The ending date of custom report.
#[serde(rename = "end_date", skip_serializing_if = "Option::is_none")]
pub end_date: Option<String>,
/// size
#[serde(rename = "size", skip_serializing_if = "Option::is_none")]
pub size: Option<i64>,
/// The starting date of custom report.
#[serde(rename = "start_date", skip_serializing_if = "Option::is_none")]
pub start_date: Option<String>,
/// A list of tags to apply to custom reports.
#[serde(rename = "tags", skip_serializing_if = "Option::is_none")]
pub tags: Option<Vec<String>>,
}
impl UsageCustomReportsAttributes {
/// The response containing attributes for custom reports.
pub fn new() -> UsageCustomReportsAttributes {
UsageCustomReportsAttributes {
computed_on: None,
end_date: None,
size: None,
start_date: None,
tags: None,
}
}
}
|
pub trait Policy<O, R>
{
fn execute(&self, operation: O) -> R;
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.