text stringlengths 8 4.13M |
|---|
extern crate bulletrs;
use bulletrs::sys as bt;
#[test]
fn broadphase_destcutor() {
unsafe {
let mut broadphase = bt::btDbvtBroadphase::new(std::ptr::null_mut());
bt::btDbvtBroadphase_btDbvtBroadphase_destructor(&mut broadphase as *mut _);
}
}
#[test]
fn bindgen_generated_test() {
unsafe {
let mut broadphase = bt::btDbvtBroadphase::new(std::ptr::null_mut());
let info = bt::btDefaultCollisionConstructionInfo::new();
let mut collision_configuration =
bt::btDefaultCollisionConfiguration::new(&info as *const _);
let mut dispatcher =
bt::btCollisionDispatcher::new(&mut collision_configuration as *mut _ as *mut _);
let mut solver = bt::btSequentialImpulseConstraintSolver::new();
let mut dynamics_world = bt::btDiscreteDynamicsWorld::new(
&mut dispatcher as *mut _ as *mut _,
&mut broadphase as *mut _ as *mut _,
&mut solver as *mut _ as *mut _,
&mut collision_configuration as *mut _ as *mut _,
);
let gravity = bt::btVector3 {
m_floats: [0.0, -10.0, 0.0, 0.0],
};
bt::btDiscreteDynamicsWorld_setGravity(
&mut dynamics_world as *mut _ as *mut _,
&gravity as *const _ as *const _,
);
let mut fall_shape = bt::btSphereShape::new(2.0);
let sphere_rotation = bt::btMatrix3x3 {
m_el: [
bt::btVector3 {
m_floats: [1.0, 0.0, 0.0, 0.0],
},
bt::btVector3 {
m_floats: [0.0, 1.0, 0.0, 0.0],
},
bt::btVector3 {
m_floats: [0.0, 0.0, 1.0, 0.0],
},
],
};
let sphere_transform = bt::btTransform {
m_basis: sphere_rotation.clone(),
m_origin: bt::btVector3 {
m_floats: [0.0, 5.0, 0.0, 0.0],
},
};
let mut fall_motion_state = bt::btDefaultMotionState::new(&sphere_transform as *const _, bt::btTransform_getIdentity());
let mass = 1.0;
let mut fall_inertia = bt::btVector3 {
m_floats: [0.0, 0.0, 0.0, 0.0],
};
bt::btSphereShape_calculateLocalInertia(
&mut fall_shape as *mut _ as *mut _,
mass,
&mut fall_inertia as *mut _,
);
let world_rotation = bt::btMatrix3x3 {
m_el: [
bt::btVector3 {
m_floats: [1.0, 0.0, 0.0, 0.0],
},
bt::btVector3 {
m_floats: [0.0, 1.0, 0.0, 0.0],
},
bt::btVector3 {
m_floats: [0.0, 0.0, 1.0, 0.0],
},
],
};
let world_transform = bt::btTransform {
m_basis: world_rotation.clone(),
m_origin: bt::btVector3 {
m_floats: [0.0, 0.0, 0.0, 0.0],
},
};
let mut fall_rigid_body_ci = bt::btRigidBody_btRigidBodyConstructionInfo {
m_mass: mass,
m_motionState: &mut fall_motion_state as *mut _ as *mut _,
m_collisionShape: &mut fall_shape as *mut _ as *mut _,
m_localInertia: fall_inertia,
m_linearDamping: 0.0,
m_angularDamping: 0.0,
m_friction: 0.5,
m_rollingFriction: 0.0,
m_spinningFriction: 0.0,
m_restitution: 0.0,
m_linearSleepingThreshold: 0.8,
m_angularSleepingThreshold: 1.0,
m_additionalDamping: false,
m_additionalDampingFactor: 0.005,
m_additionalLinearDampingThresholdSqr: 0.01,
m_additionalAngularDampingThresholdSqr: 0.01,
m_additionalAngularDampingFactor: 0.01,
m_startWorldTransform: world_transform
};
let mut fall_rigid_body = bt::btRigidBody::new(&mut fall_rigid_body_ci as *mut _ as *mut _);
bt::btDiscreteDynamicsWorld_addRigidBody(&mut dynamics_world as *mut _ as *mut _, &mut fall_rigid_body as *mut _ as *mut _);
let mut old_y = 5.0;
for _ in 0 .. 100 {
bt::btDiscreteDynamicsWorld_stepSimulation(&mut dynamics_world as *mut _ as *mut _, 0.01, 0, 0.01);
let motion_state : *mut bt::btDefaultMotionState = fall_rigid_body.m_optionalMotionState as *mut _;
let graphics_world_trans = (*motion_state).m_graphicsWorldTrans;
let sphere_y = graphics_world_trans.m_origin.m_floats[1];
assert!(old_y > sphere_y);
old_y = sphere_y;
}
}
}
|
use actix_web::{get, web, HttpResponse};
use crate::appdata::AppData;
use prometheus::{TextEncoder, Encoder};
#[get("/metrics")]
pub async fn get_metrics(data: web::Data<AppData>) -> HttpResponse {
let families = data.prom.registry.gather();
let mut buff = Vec::new();
let encoder = TextEncoder::new();
encoder.encode(&families, &mut buff).unwrap();
HttpResponse::Ok().body(String::from_utf8(buff).unwrap())
}
|
#![allow(dead_code)]
use arc_runtime::prelude::*;
#[rewrite]
fn foo(x: i32) {
let a: i32 = x - 1;
if x == 0 {
println!("Hello, world!");
} else {
foo((a,))
}
}
// Expands into:
// fn _foo(x: i32, ctx: Context) {
// let a: i32 = x - 1;
// if x == 0 {
// println!("Hello, world!");
// } else {
// _foo(a, ctx)
// }
// }
#[rewrite(main)]
fn main() {
let x: String = String::from_str("Hello, world!");
let y: &str = "Hello, world!";
let _z: unit = String::push_str(x, y);
}
// Expands into:
// fn _bar() {
// let system = &KompactConfig::default().build().unwrap();
// let mutator = &mut instantiate_immix(ImmixOptions::default());
// let ctx = Context::new(system, mutator);
//
// let stack: &ShadowStack = &ctx.mutator.shadow_stack();
// let value = String::from_str("Hello, world!", ctx);
// #[allow(unused_unsafe)]
// let mut x = unsafe {
// ShadowStackInternal::<String>::construct(
// stack,
// stack.head.get(),
// core::mem::transmute::<_, TraitObject>(&value as &dyn Rootable).vtable as usize,
// value,
// )
// };
// #[allow(unused_unsafe)]
// stack.head.set(unsafe { core::mem::transmute(&mut x) });
// #[allow(unused_mut)]
// let mut x = unsafe { Rooted::construct(&mut x.value) };
// let y: &str = "Hello, world!";
// let _z: unit = String::push_str(x.clone(), y, ctx);
// }
|
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Operator {
Or,
And,
}
impl std::fmt::Display for Operator {
fn fmt(&self, formatter: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
match self {
Operator::Or => write!(formatter, "OR"),
Operator::And => write!(formatter, "AND"),
}
}
}
impl From<&str> for Operator {
fn from(item: &str) -> Self {
if item == "OR" {
return Operator::Or;
}
if item == "AND" {
return Operator::And;
}
panic!("could not convert {:?} to operator", item);
}
}
|
use crate::entity_type::{Count, EntityType};
use crate::util::clip;
use std::borrow::Borrow;
#[derive(PartialOrd, PartialEq, Copy, Clone, Debug, Default)]
#[repr(transparent)]
pub struct Wrapper(pub f32);
const SIZE: usize = EntityType::COUNT + 4;
type WrapperArray = [Wrapper; SIZE];
#[derive(Clone, Debug)]
pub struct EmotionalState {
arr: WrapperArray,
}
impl EmotionalState {
const HUNGER: usize = EntityType::COUNT + 0;
const FEAR: usize = EntityType::COUNT + 1;
const TIREDNESS: usize = EntityType::COUNT + 2;
const AGGRESSION: usize = EntityType::COUNT + 3;
pub fn pref(&self, et: EntityType) -> Preference {
self.arr[et.idx()]
}
pub fn new(food_preferences: Vec<(EntityType, f32)>) -> Self {
let mut es = Self {
arr: Default::default(),
};
for (et, r) in food_preferences {
es.arr[et.idx()] += r;
}
es += Hunger::default();
es
}
pub fn preferences(&self) -> &[Preference] {
&self.arr[0..EntityType::COUNT]
}
pub fn set_preference(&mut self, et: EntityType, pref: f32) {
let p = &mut self.arr[et.idx()];
*p -= 1.0;
*p += pref;
}
pub fn hunger(&self) -> Hunger {
Hunger(self.arr[Self::HUNGER].0)
}
pub fn set_hunger(&mut self, hunger: Hunger) {
*self -= Hunger(1.0);
*self += hunger;
}
pub fn tiredness(&self) -> Tiredness {
Tiredness(self.arr[Self::TIREDNESS].0)
}
pub fn set_tiredness(&mut self, tiredness: Tiredness) {
*self -= Tiredness(1.0);
*self += tiredness;
}
pub fn fear(&self) -> Fear {
Fear(self.arr[Self::FEAR].0)
}
pub fn set_fear(&mut self, fear: Fear) {
*self -= Fear(1.0);
*self += fear;
}
pub fn aggression(&self) -> Aggression {
Aggression(self.arr[Self::AGGRESSION].0)
}
pub fn set_aggression(&mut self, aggression: Aggression) {
*self -= Aggression(1.0);
*self += aggression;
}
pub fn average<B>(iter: impl Iterator<Item = B>) -> Self
where
B: Borrow<Self>,
{
let c = 1;
let mut start = Self {
arr: Default::default(),
};
for b in iter {
let em = b.borrow();
for i in 0..start.arr.len() {
start.arr[i].0 += em.arr[i].0
}
}
let inv = 1.0 / c as f32;
for e in &mut start.arr {
(*e).0 *= inv;
}
start
}
pub(crate) fn encode(&self) -> &[f32; SIZE] {
// Safe because wrapper has the same representation as f32
unsafe { std::mem::transmute(&self.arr) }
}
pub(crate) const SIZE: usize = SIZE;
}
type Reward = f32;
pub type Preference = Wrapper;
impl std::ops::AddAssign<&EmotionalState> for EmotionalState {
fn add_assign(&mut self, rhs: &Self) {
for i in 0..self.arr.len() {
self.arr[i].0 = 0.5 * (self.arr[i].0 + rhs.arr[i].0)
}
}
}
impl std::ops::AddAssign<f32> for Wrapper {
fn add_assign(&mut self, rhs: f32) {
self.0 = clip(self.0 + rhs, 0.0, 1.0);
}
}
impl std::ops::SubAssign<f32> for Wrapper {
fn sub_assign(&mut self, rhs: f32) {
self.0 = clip(self.0 - rhs, 0.0, 1.0);
}
}
#[derive(PartialOrd, PartialEq, Copy, Clone, Debug)]
#[repr(transparent)]
pub struct Hunger(pub f32);
impl Default for Hunger {
fn default() -> Self {
Self(0.39)
}
}
impl std::ops::AddAssign<Hunger> for EmotionalState {
fn add_assign(&mut self, rhs: Hunger) {
self.arr[Self::HUNGER] += rhs.0;
}
}
impl std::ops::SubAssign<Hunger> for EmotionalState {
fn sub_assign(&mut self, rhs: Hunger) {
self.arr[Self::HUNGER] -= rhs.0;
}
}
#[derive(PartialOrd, PartialEq, Copy, Clone, Debug)]
#[repr(transparent)]
pub struct Tiredness(pub f32);
impl Default for Tiredness {
fn default() -> Self {
Self(0.0)
}
}
impl std::ops::AddAssign<Tiredness> for EmotionalState {
fn add_assign(&mut self, rhs: Tiredness) {
self.arr[Self::TIREDNESS] += rhs.0;
}
}
impl std::ops::SubAssign<Tiredness> for EmotionalState {
fn sub_assign(&mut self, rhs: Tiredness) {
self.arr[Self::TIREDNESS] -= rhs.0;
}
}
#[derive(PartialOrd, PartialEq, Copy, Clone, Debug)]
#[repr(transparent)]
pub struct Fear(pub f32);
impl Default for Fear {
fn default() -> Self {
Self(0.0)
}
}
impl std::ops::AddAssign<Fear> for EmotionalState {
fn add_assign(&mut self, rhs: Fear) {
self.arr[Self::FEAR] += rhs.0;
}
}
impl std::ops::SubAssign<Fear> for EmotionalState {
fn sub_assign(&mut self, rhs: Fear) {
self.arr[Self::FEAR] -= rhs.0;
}
}
#[derive(PartialOrd, PartialEq, Copy, Clone, Debug)]
#[repr(transparent)]
pub struct Aggression(pub f32);
impl Default for Aggression {
fn default() -> Self {
Self(0.0)
}
}
impl std::ops::AddAssign<Aggression> for EmotionalState {
fn add_assign(&mut self, rhs: Aggression) {
self.arr[Self::AGGRESSION] += rhs.0;
}
}
impl std::ops::SubAssign<Aggression> for EmotionalState {
fn sub_assign(&mut self, rhs: Aggression) {
self.arr[Self::AGGRESSION] -= rhs.0;
}
}
|
use std::io::{self, Write};
fn lookup_helper(num: i64) -> String {
let x: Option<&str> = match num {
0 => Some("zero"),
1 => Some("one"),
2 => Some("two"),
3 => Some("three"),
4 => Some("four"),
5 => Some("five"),
6 => Some("six"),
7 => Some("seven"),
8 => Some("eight"),
9 => Some("nine"),
10 => Some("ten"),
11 => Some("eleven"),
12 => Some("twelve"),
13 => Some("thirteen"),
14 => Some("fourteen"),
15 => Some("fifteen"),
16 => Some("sixteen"),
17 => Some("seventeen"),
18 => Some("eighteen"),
19 => Some("nineteen"),
20 => Some("twenty"),
30 => Some("thirty"),
40 => Some("forty"),
50 => Some("fifty"),
60 => Some("sixty"),
70 => Some("seventy"),
80 => Some("eighty"),
90 => Some("ninety"),
100 => Some("hundred"),
1000 => Some("thousand"),
1000000 => Some("million"),
1000000000 => Some("billion"),
1000000000000 => Some("trillion"),
_ => None
};
return match x {
Some(z) => z.to_string(),
None => num_to_words(num),
};
}
fn num_to_words(num: i64) -> String {
if num == 0 {
return lookup_helper(num);
}
let mut s = String::from("");
let indices: [i64; 32] = [
1000000000000, 1000000000, 1000000, 1000, 100,
90, 80, 70, 60, 50, 40, 30, 20, 19, 18, 17,
16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5,
4, 3, 2, 1
];
let mut temp = num;
let mut acc = 0;
while temp > 0 {
let found = indices.iter().find(|&&idx| idx <= temp);
let f = match found {
None => break,
Some(v) => {
while (temp - v) >= 0 {
acc = acc + 1;
temp = temp - v;
}
v
}
};
let count = if *f >= 100 {
lookup_helper(acc) + " "
} else {
String::from("")
};
s = s + " " + &count.to_string() + &lookup_helper(*f);
acc = 0
}
return s.trim().to_string()
}
fn test() {
println!("10 is {}", num_to_words(10));
println!("11 is {}", num_to_words(11));
println!("35 is {}", num_to_words(35));
println!("345 is {}", num_to_words(345));
println!("305 is {}", num_to_words(305));
println!("9305 is {}", num_to_words(9305));
println!("109305 is {}", num_to_words(109305));
println!("3109305 is {}", num_to_words(3109305));
println!("123109305 is {}", num_to_words(123109305));
println!("444123109305 is {}", num_to_words(444123109305));
println!("2444123109305 is {}", num_to_words(2444123109305));
println!("25444123109305 is {}", num_to_words(25444123109305));
}
fn main() {
println!("i turn numbers into words");
loop {
print!("try me: ");
io::stdout().flush().unwrap();
let mut ss = String::new();
match io::stdin().read_line(&mut ss) {
Ok(bytes_read) => {
ss = ss.trim().to_string();
if bytes_read == 0 {
println!("okay bye (EOF)");
break
}
if ss == "quit" {
println!("bye");
break
}
if ss == "test" {
test();
continue
}
let n = match ss.parse() {
Ok(z) => z,
Err(_) => {
println!("i don't understand");
continue;
}
};
println!("{} is {}", ss, num_to_words(n));
},
Err(_) => break,
}
}
}
|
use std::collections::HashMap;
use std::f32::consts::FRAC_PI_2;
pub fn create_hall(world: &mut ::specs::World) {
let number_of_levels = ::CONFIG.levels.len() as isize;
let levels_on_top = number_of_levels / 2;
let levels_on_side = number_of_levels - levels_on_top;
let size = ::na::Vector2::new(levels_on_top.max(1)*3+3, levels_on_side*3+3);
let mut maze = ::maze::Maze::new_rectangle(size, 1.0);
maze.circle();
maze.extend(1);
maze.circle();
let maze_size = maze.size.clone();
let mut maze_colors = HashMap::new();
// Build Player
let start_cell = ::na::Vector2::new(maze_size[0] - 3, maze_size[1] - 2);
maze_colors.insert(start_cell, (::CONFIG.start_color, false));
maze.walls.remove(&start_cell);
let dir = ::na::Vector3::new(0.0, -1.0, 0.0);
let player_pos = maze.to_world(&start_cell)
- 0.2 * ::na::Vector3::new(dir[0] as f32, dir[1] as f32, 0.0);
world.write_resource::<::resource::PlayerControl>().pointer =
[(-dir[1] as f32).atan2(dir[0] as f32), 0.0];
::entity::create_player_w(player_pos, false, world);
// Build Teleport
let teleport_cells = (0isize..levels_on_top)
.map(|i| (::na::Vector2::new(maze_size[0] - 4 - i*3, 1), ::na::Vector3::new(-FRAC_PI_2, 0.0, 0.0)))
.chain((0..levels_on_side)
.map(|i| (::na::Vector2::new(1, i*3+3), ::na::Vector3::new(0.0, FRAC_PI_2, 0.0))));
for (i, (teleport_cell, teleport_dir)) in teleport_cells.enumerate() {
let activated = i == 0 || world.read_resource::<::resource::Save>().score(i-1).is_some();
maze_colors.insert(teleport_cell, (::CONFIG.end_color, activated));
maze.walls.remove(&teleport_cell);
let score_pos = if teleport_dir[1] == 0.0 {
::na::Isometry3::new(
maze.to_world(&teleport_cell)+::na::Vector3::new(-0.7, 0.5, 0.3),
::na::Vector3::new(::std::f32::consts::FRAC_PI_2, 0.0, 0.0)
) * ::na::UnitQuaternion::new(::na::Vector3::new(0.0, ::std::f32::consts::PI, 0.0))
} else {
::na::Isometry3::new(
maze.to_world(&teleport_cell)+::na::Vector3::new(0.5, 0.7, 0.3),
::na::Vector3::new(::std::f32::consts::FRAC_PI_2, 0.0, 0.0)
) * ::na::UnitQuaternion::new(::na::Vector3::new(0.0, ::std::f32::consts::FRAC_PI_2, 0.0))
};
::entity::draw_score(
score_pos,
i,
world,
);
let number_pos = if teleport_dir[1] == 0.0 {
::na::Isometry3::new(
maze.to_world(&teleport_cell)+::na::Vector3::new(0.0, -0.5, 0.0),
::na::Vector3::new(::std::f32::consts::FRAC_PI_2, 0.0, 0.0)
) * ::na::UnitQuaternion::new(::na::Vector3::new(0.0, ::std::f32::consts::PI, 0.0))
} else {
::na::Isometry3::new(
maze.to_world(&teleport_cell)+::na::Vector3::new(-0.5, 0.0, 0.0),
::na::Vector3::new(::std::f32::consts::FRAC_PI_2, 0.0, 0.0)
) * ::na::UnitQuaternion::new(::na::Vector3::new(0.0, ::std::f32::consts::FRAC_PI_2, 0.0))
};
::entity::draw_number(
number_pos,
format!("{}", i+1),
world,
);
if activated {
::entity::create_teleport_w(
::na::Isometry3::new(
maze.to_world(&teleport_cell),
teleport_dir,
),
maze.scale,
::resource::LevelAction::Level(i),
world,
);
}
}
// Build Maze
::entity::create_2d_maze_walls_w(&maze_colors, &maze, world);
world.add_resource(::resource::Maze::Maze2D(maze));
}
|
/// Utility trait for types that can be cell values.
pub trait CellValue: std::fmt::Debug + PartialEq + Copy + Clone {}
impl<C: std::fmt::Debug + PartialEq + Copy + Clone> CellValue for C {}
/// A cell in a picross board.
#[derive(PartialEq, Copy, Clone, Debug)]
pub enum Cell<C: CellValue> {
/// An empty cell.
Empty,
/// A crossed out cell.
CrossedOut,
/// A cell with a value.
Filled(C),
}
impl<C: CellValue> Cell<C> {
/// Whether or not this cell is ignored by constraints.
pub fn is_ignored(&self) -> bool {
match self {
Cell::Empty | Cell::CrossedOut => true,
Cell::Filled(_) => false,
}
}
}
|
use axum::{
http::{header::ACCEPT_LANGUAGE, HeaderMap},
response::IntoResponse,
};
use lazy_static::lazy_static;
use regex::Regex;
use tower_cookies::{Cookie, Cookies};
/// Request guard that combines the accept-language header & the language cookie.
pub struct UserLanguage(pub String);
// // In this module we read the language settings the user is using and provide
// // the correct compiled version from it.
const LANGUAGE_COOKIE_NAME: &str = "language";
/// This first checks if there is a language defined in the cookies. If this is
/// not the case, it tries to guess a language from the accept-language header.
pub fn user_language(headers: &HeaderMap, cookies: &mut Cookies) -> UserLanguage {
if let Some(cookie) = cookies.get(LANGUAGE_COOKIE_NAME) {
let language = cookie.value().to_string();
if is_language_supported(&language) {
return UserLanguage(language);
}
}
// No cookie or cookie points to a language that was removed.
let language = parse_languages(headers).unwrap_or_else(|| "en".to_string());
UserLanguage(language)
}
/// Sets the user's language cookie if the language is supported.
pub async fn set_user_language(cookies: Cookies, language: String) -> impl IntoResponse {
if is_language_supported(&language) {
cookies.add(
Cookie::build(LANGUAGE_COOKIE_NAME, language)
.path("/")
.finish(),
);
}
}
/// Parses the "Accept-Language" header and returns the first supported language.
/// If no supported language is found, returns None.
fn parse_languages(headers: &HeaderMap) -> Option<String> {
// Extract the "Accept-Language" header value
// Convert the HeaderValue to a string
let header_str = headers.get(ACCEPT_LANGUAGE)?.to_str().ok()?;
// Create a regular expression pattern to match language codes
lazy_static! {
static ref RE: Regex = Regex::new(r"(?i)([a-z]{2})(?:-[a-z]{2})?").unwrap();
}
// Iterate over language codes in the "Accept-Language" header
for capture in RE.captures_iter(header_str) {
// Get the "main" part of the language (e.g., "fr" from "fr-CH") as a lowercase string.
let language = capture.get(1)?.as_str();
// Check if the language is supported
if is_language_supported(language) {
// Return the first supported language
return Some(language.to_string());
}
}
// If no supported language is found, return None
None
}
/// Checks if a language is supported by PacoPlay by checking if there is a
/// compiled version of the language.
fn is_language_supported(lang: &str) -> bool {
get_static_language_file(lang).is_some()
}
/// Returns the path to the minified compiled elm file if the language is supported.
pub fn get_static_language_file(lang: &str) -> Option<&'static str> {
match lang {
"en" => Some("../target/js/elm.en.min.js"),
"nl" => Some("../target/js/elm.nl.min.js"),
"eo" => Some("../target/js/elm.eo.min.js"),
"de" => Some("../target/js/elm.de.min.js"),
"sv" => Some("../target/js/elm.sv.min.js"),
"es" => Some("../target/js/elm.es.min.js"),
_ => None,
}
}
|
#![feature(custom_attribute,plugin)]
#![plugin(rocket_codegen)]
extern crate chrono;
#[macro_use] extern crate diesel;
#[macro_use] extern crate diesel_codegen;
#[macro_use] extern crate lazy_static;
extern crate r2d2;
extern crate r2d2_diesel;
extern crate rocket;
#[macro_use] extern crate rocket_contrib;
extern crate serde;
#[macro_use] extern crate serde_derive;
extern crate serde_json;
pub mod models;
use diesel::sqlite::SqliteConnection;
use r2d2::Pool;
use r2d2_diesel::ConnectionManager;
use rocket_contrib::JSON;
lazy_static! {
pub static ref CONN_POOL: Pool<ConnectionManager<SqliteConnection>> = models::create_conn_pool();
}
#[get("/")]
fn index() -> &'static str {
"Hello, world!"
}
#[get("/foods")]
fn foods() -> JSON<Vec<models::Food>> {
use diesel::LoadDsl;
use models::schema::foods;
let conn = CONN_POOL.get().unwrap();
let foods = foods::table.load::<models::food::Food>(&*conn).unwrap();
JSON(foods)
}
#[post("/foods", data="<food>")]
fn create_food(food: JSON<models::NewFood>) {
use diesel::ExecuteDsl;
use models::schema::foods;
let conn = CONN_POOL.get().unwrap();
diesel::insert(&food.0).into(foods::table).execute(&*conn).unwrap();
}
#[get("/units")]
fn units() -> JSON<Vec<models::Unit>> {
use diesel::LoadDsl;
use models::schema::units;
let conn = CONN_POOL.get().unwrap();
let units = units::table.load::<models::units::Unit>(&*conn).unwrap();
JSON(units)
}
#[post("/units", data="<unit>")]
fn create_unit(unit: JSON<models::NewUnit>) {
use diesel::ExecuteDsl;
use models::schema::units;
let conn = CONN_POOL.get().unwrap();
diesel::insert(&unit.0).into(units::table).execute(&*conn).unwrap();
}
#[get("/entries")]
fn entries() -> JSON<Vec<models::Entry>> {
use diesel::LoadDsl;
use models::schema::entries;
let conn = CONN_POOL.get().unwrap();
let entries = entries::table.load::<models::Entry>(&*conn).unwrap();
JSON(entries)
}
#[derive(Deserialize)]
struct NewEntry {
pub food: i32,
pub unit: Option<i32>,
pub quantity: f32,
}
#[post("/entries/<year>/<month>/<day>", data="<entry>")]
fn create_entry(year: u32, month: u32, day: u32, entry: JSON<NewEntry>) {
use diesel::{Connection,ExecuteDsl};
use models::schema::entries;
let conn = CONN_POOL.get().unwrap();
(&*conn).execute("PRAGMA foreign_keys = ON;").unwrap();
let entry = models::NewEntry {
date: chrono::NaiveDate::from_ymd(year as i32, month, day),
food: entry.0.food,
unit: entry.0.unit,
quantity: entry.0.quantity,
};
diesel::insert(&entry).into(entries::table).execute(&*conn).unwrap();
}
fn main() {
rocket::ignite().mount("/", routes![index, foods, create_food, units, create_unit, entries, create_entry]).launch();
}
|
extern crate sdl2;
use super::super::input::input::Input;
use super::super::network::socket::Socket;
use super::super::world::map::Map;
use super::super::world::player::Player;
use byteorder::WriteBytesExt;
pub struct PlayerController {
delay: f32,
player_id: Option<u64>,
last_move: Option<std::time::Instant>,
}
impl PlayerController {
pub fn new(delay: f32) -> PlayerController {
PlayerController {
delay,
player_id: None,
last_move: None,
}
}
pub fn set_player_id(&mut self, player_id: u64) {
self.player_id = Some(player_id);
}
}
impl PlayerController {
pub fn update(
&mut self,
now: std::time::Instant,
input: &Input,
map: &Map,
player: &mut Player,
socket: &mut Socket,
) {
match self.player_id {
Some(..) => {
if self.last_move.is_none() {
self.last_move = Some(now);
return;
}
if (now - self.last_move.unwrap()).as_secs_f32() < self.delay {
return;
}
let mut packet = vec![];
packet.write_u16::<byteorder::LittleEndian>(2).unwrap();
if input.up() != input.down() {
if input.up() {
packet.push(0);
socket.send(packet);
self.last_move = Some(now);
} else {
packet.push(1);
socket.send(packet);
self.last_move = Some(now);
}
}
packet = vec![];
packet.write_u16::<byteorder::LittleEndian>(2).unwrap();
if input.left() != input.right() {
if input.left() {
packet.push(2);
socket.send(packet);
self.last_move = Some(now);
} else {
packet.push(3);
socket.send(packet);
self.last_move = Some(now);
}
}
}
None => {}
}
}
}
|
//! A vec basically.
//! Used with indexes of the vector as ids,
//! allowing parent and child references.
use std::marker::PhantomData;
// Can be used to newtype ids for clearer types
pub trait ArenaID {
fn from_usize(idx: usize) -> Self;
fn as_usize(&self) -> usize;
}
pub struct Arena<T, I: ArenaID> {
data: Vec<T>,
_id: PhantomData<I>,
}
|
extern crate tokio;
extern crate tokio_core;
extern crate mio_uds;
extern crate capnp;
extern crate capnpc;
extern crate capnp_futures;
extern crate futures;
pub mod addressbook_capnp;
/// Generates the Rust code from the CapnProto schema.
fn main() {
::capnpc::CompilerCommand::new()
.file("addressbook.capnp")
.run()
.unwrap();
} |
use super::Opt;
use serde_json::Value;
use std::io::prelude::*;
use url::Url;
pub struct Aha<'a> {
pub domain: String,
pub client: reqwest::Client,
pub user_email: String,
pub opt: &'a Opt,
}
impl<'a> Aha<'a> {
pub fn url_builder(&self) -> Url {
let uri = format!("https://{}.aha.io/api/v1/", self.domain);
Url::parse(&uri).unwrap()
}
pub fn new(domain: String, auth_token: String, email: String, opt: &Opt) -> Aha {
let mut headers = reqwest::header::HeaderMap::new();
let mut auth =
reqwest::header::HeaderValue::from_str(&format!("Bearer {}", auth_token)).unwrap();
auth.set_sensitive(true);
headers.insert(reqwest::header::AUTHORIZATION, auth);
headers.insert(
reqwest::header::USER_AGENT,
reqwest::header::HeaderValue::from_static("Rust aha api v1 (Becker@aha.io)"),
);
headers.insert(
reqwest::header::CONTENT_TYPE,
reqwest::header::HeaderValue::from_static("application/json"),
);
headers.insert(
reqwest::header::ACCEPT,
reqwest::header::HeaderValue::from_static("application/json"),
);
let client = reqwest::Client::builder()
.gzip(true)
.default_headers(headers)
.timeout(std::time::Duration::from_secs(50))
.build()
.unwrap();
Aha {
client,
domain,
user_email: email,
opt,
}
}
pub fn projects(&self) -> Vec<Value> {
let projects_url = self.url_builder().join("products?per_page=200").unwrap();
let projects = self
.get(projects_url, "products".to_string())
.expect("Can not load projects. Check your domain and api keys");
projects.as_array().unwrap().to_vec()
}
pub fn releases(&self, project_id: String) -> Vec<Value> {
let releases_url = self
.url_builder()
.join("products/")
.unwrap()
.join(&format!("{}/", project_id))
.unwrap()
.join("releases?exclude_shipped=true&per_page=200")
.unwrap();
let releases = self
.get(releases_url, "releases".to_string())
.expect("Can not load release. Check your access in Aha!");
releases.as_array().unwrap().to_vec()
}
pub fn features(&self, project_id: String) -> Vec<Value> {
let releases_url = self
.url_builder()
.join("releases/")
.unwrap()
.join(&format!("{}/", project_id))
.unwrap()
.join("features?per_page=200&fields=*")
.unwrap();
let releases = self
.get(releases_url, "features".to_string())
.expect("Can not load features. Check your access in Aha!");
releases.as_array().unwrap().to_vec()
}
pub fn send_feature(&self, feature: &FeatureCreate) -> Result<Value, serde_json::Error> {
let uri = format!("https://{}.aha.io/api/v1/features", self.domain);
let response = self.client.post(&uri).json(&feature).send();
let content = response.unwrap().text();
if self.opt.verbose {
println!("created {:?}", content);
}
serde_json::from_str(&content.unwrap_or("".to_string()))
}
pub fn send_requirement(
&self,
feature_ref: String,
requirement: &RequirementCreate,
) -> Result<Value, serde_json::Error> {
let uri = format!(
"https://{}.aha.io/api/v1/features/{}/requirements",
self.domain, feature_ref
);
let response = self.client.post(&uri).json(&requirement).send();
let content = response.unwrap().text();
if self.opt.verbose {
println!("created {:?}", content);
}
serde_json::from_str(&content.unwrap_or("".to_string()))
}
pub fn get(&self, url: Url, base: String) -> Result<Value, serde_json::Error> {
let uri = url.to_string();
if self.opt.verbose {
println!("{} url: {}", base, uri);
}
let response = self.client.get(&uri).send();
let content = response.unwrap().text();
if self.opt.verbose {
println!("{} text {:?}", base, content);
}
let feature: Result<Value, _> = serde_json::from_str(&content.unwrap_or("".to_string()));
if let Ok(mut fe) = feature {
Ok(fe[base].take())
} else {
let ex: Result<Value, serde_json::Error> = Err(feature.unwrap_err());
ex
}
}
pub fn get_json(&self, end_path: String, base: String) -> Result<Value, serde_json::Error> {
let uri = format!("https://{}.aha.io/api/v1/", self.domain);
let url = Url::parse(&uri).unwrap();
let api_url = if !end_path.is_empty() {
format!("/{}", end_path)
} else {
"".to_string()
};
let url = url.join(&format!("{}{}{}", base, "s", api_url)).unwrap();
self.get(url, base)
}
}
// keep
#[derive(Serialize, Debug, Deserialize)]
pub struct FeatureCreate {
pub name: String,
pub description: String,
pub release_id: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub custom_fields: Option<CustomNotes>,
}
impl FeatureCreate {
pub fn new() -> FeatureCreate {
FeatureCreate {
name: "".to_string(),
description: "".to_string(),
release_id: "".to_string(),
custom_fields: None,
}
}
pub fn advance(&mut self, data: String) -> Option<&str> {
if self.name.is_empty() {
self.name = data;
Some("Description")
} else if self.description.is_empty() {
self.description = data;
Some("Needs notes? (Yes/No)")
} else {
if data == "Yes" {
self.custom_fields = Some(CustomNotes {
notes: "Required".to_string(),
})
}
if data == "No" {
self.custom_fields = Some(CustomNotes {
notes: "Not required".to_string(),
})
}
None
}
}
}
#[derive(Serialize, Debug, Deserialize)]
pub struct RequirementCreate {
pub name: String,
pub description: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub custom_fields: Option<CustomNotes>,
}
impl RequirementCreate {
pub fn new() -> RequirementCreate {
RequirementCreate {
name: "".to_string(),
description: "".to_string(),
custom_fields: None,
}
}
pub fn advance(&mut self, data: String) -> Option<&str> {
if self.name.is_empty() {
self.name = data;
Some("Description")
} else if self.description.is_empty() {
self.description = data;
None
} else {
None
}
}
}
// keep
#[derive(Serialize, Debug, Deserialize)]
pub struct FeatureUpdateCreate {
#[serde(skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub assigned_to_user: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub custom_fields: Option<CustomFieldGithub>,
#[serde(skip_serializing_if = "Option::is_none")]
pub workflow_status: Option<WorkflowStatusUpdate>,
}
// keep
#[derive(Serialize, Debug, Deserialize)]
pub struct FeatureUpdate {
#[serde(skip_serializing_if = "Option::is_none")]
assigned_to_user: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
custom_fields: Option<CustomFieldGithub>,
#[serde(skip_serializing_if = "Option::is_none")]
workflow_status: Option<WorkflowStatusUpdate>,
}
//keep
#[derive(Serialize, Debug, Deserialize)]
pub struct WorkflowStatusUpdate {
pub name: String,
}
// kepp
#[derive(Serialize, Debug, Deserialize)]
pub struct CustomNotes {
#[serde(rename = "release_notes1")]
notes: String,
}
// kepp
#[derive(Serialize, Debug, Deserialize)]
pub struct CustomFieldGithub {
#[serde(rename = "pull_request")]
github_url: String,
}
|
use chrono::prelude::*;
use regex::Regex;
use serde::Serialize;
pub mod get;
pub mod search;
//#region User
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct User {
id: String,
name: String,
start_date: Option<NaiveDate>,
address: Option<String>,
phone_number: Option<String>,
username: String,
email: String,
photo: Option<String>,
}
impl User {
fn get_field_regex_pattern(field_name: &str) -> Option<&'static str> {
match field_name {
"name" => Some(r"^.{2,50}$"),
"address" => Some(r"^.{1,500}$"),
"phone_number" => Some(r"^[\d+]{4,20}$"),
"username" => Some(r"^\w{1,20}$"),
"email" => Some(r"^\w+@\w+\.\w{2,}$"),
_ => None,
}
}
fn validate_field_str(field_name: &str, value: &str) -> Result<(), String> {
let pattern = Self::get_field_regex_pattern(field_name);
if let Some(pattern) = pattern {
if !Regex::new(pattern).unwrap().is_match(value) {
return Err(format!(
"Field '{}' is invalid. Expected pattern: {}.",
field_name, pattern
));
}
}
Ok(())
}
}
//#endregion
|
#[doc = "Register `I2C_CR2` reader"]
pub type R = crate::R<I2C_CR2_SPEC>;
#[doc = "Register `I2C_CR2` writer"]
pub type W = crate::W<I2C_CR2_SPEC>;
#[doc = "Field `SADD` reader - SADD"]
pub type SADD_R = crate::FieldReader<u16>;
#[doc = "Field `SADD` writer - SADD"]
pub type SADD_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 10, O, u16>;
#[doc = "Field `RD_WRN` reader - RD_WRN"]
pub type RD_WRN_R = crate::BitReader;
#[doc = "Field `RD_WRN` writer - RD_WRN"]
pub type RD_WRN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ADD10` reader - ADD10"]
pub type ADD10_R = crate::BitReader;
#[doc = "Field `ADD10` writer - ADD10"]
pub type ADD10_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `HEAD10R` reader - HEAD10R"]
pub type HEAD10R_R = crate::BitReader;
#[doc = "Field `HEAD10R` writer - HEAD10R"]
pub type HEAD10R_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `START` reader - START"]
pub type START_R = crate::BitReader;
#[doc = "Field `START` writer - START"]
pub type START_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `STOP` reader - STOP"]
pub type STOP_R = crate::BitReader;
#[doc = "Field `STOP` writer - STOP"]
pub type STOP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `NACK` reader - NACK"]
pub type NACK_R = crate::BitReader;
#[doc = "Field `NACK` writer - NACK"]
pub type NACK_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `NBYTES` reader - NBYTES"]
pub type NBYTES_R = crate::FieldReader;
#[doc = "Field `NBYTES` writer - NBYTES"]
pub type NBYTES_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 8, O>;
#[doc = "Field `RELOAD` reader - RELOAD"]
pub type RELOAD_R = crate::BitReader;
#[doc = "Field `RELOAD` writer - RELOAD"]
pub type RELOAD_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `AUTOEND` reader - AUTOEND"]
pub type AUTOEND_R = crate::BitReader;
#[doc = "Field `AUTOEND` writer - AUTOEND"]
pub type AUTOEND_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `PECBYTE` reader - PECBYTE"]
pub type PECBYTE_R = crate::BitReader;
#[doc = "Field `PECBYTE` writer - PECBYTE"]
pub type PECBYTE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bits 0:9 - SADD"]
#[inline(always)]
pub fn sadd(&self) -> SADD_R {
SADD_R::new((self.bits & 0x03ff) as u16)
}
#[doc = "Bit 10 - RD_WRN"]
#[inline(always)]
pub fn rd_wrn(&self) -> RD_WRN_R {
RD_WRN_R::new(((self.bits >> 10) & 1) != 0)
}
#[doc = "Bit 11 - ADD10"]
#[inline(always)]
pub fn add10(&self) -> ADD10_R {
ADD10_R::new(((self.bits >> 11) & 1) != 0)
}
#[doc = "Bit 12 - HEAD10R"]
#[inline(always)]
pub fn head10r(&self) -> HEAD10R_R {
HEAD10R_R::new(((self.bits >> 12) & 1) != 0)
}
#[doc = "Bit 13 - START"]
#[inline(always)]
pub fn start(&self) -> START_R {
START_R::new(((self.bits >> 13) & 1) != 0)
}
#[doc = "Bit 14 - STOP"]
#[inline(always)]
pub fn stop(&self) -> STOP_R {
STOP_R::new(((self.bits >> 14) & 1) != 0)
}
#[doc = "Bit 15 - NACK"]
#[inline(always)]
pub fn nack(&self) -> NACK_R {
NACK_R::new(((self.bits >> 15) & 1) != 0)
}
#[doc = "Bits 16:23 - NBYTES"]
#[inline(always)]
pub fn nbytes(&self) -> NBYTES_R {
NBYTES_R::new(((self.bits >> 16) & 0xff) as u8)
}
#[doc = "Bit 24 - RELOAD"]
#[inline(always)]
pub fn reload(&self) -> RELOAD_R {
RELOAD_R::new(((self.bits >> 24) & 1) != 0)
}
#[doc = "Bit 25 - AUTOEND"]
#[inline(always)]
pub fn autoend(&self) -> AUTOEND_R {
AUTOEND_R::new(((self.bits >> 25) & 1) != 0)
}
#[doc = "Bit 26 - PECBYTE"]
#[inline(always)]
pub fn pecbyte(&self) -> PECBYTE_R {
PECBYTE_R::new(((self.bits >> 26) & 1) != 0)
}
}
impl W {
#[doc = "Bits 0:9 - SADD"]
#[inline(always)]
#[must_use]
pub fn sadd(&mut self) -> SADD_W<I2C_CR2_SPEC, 0> {
SADD_W::new(self)
}
#[doc = "Bit 10 - RD_WRN"]
#[inline(always)]
#[must_use]
pub fn rd_wrn(&mut self) -> RD_WRN_W<I2C_CR2_SPEC, 10> {
RD_WRN_W::new(self)
}
#[doc = "Bit 11 - ADD10"]
#[inline(always)]
#[must_use]
pub fn add10(&mut self) -> ADD10_W<I2C_CR2_SPEC, 11> {
ADD10_W::new(self)
}
#[doc = "Bit 12 - HEAD10R"]
#[inline(always)]
#[must_use]
pub fn head10r(&mut self) -> HEAD10R_W<I2C_CR2_SPEC, 12> {
HEAD10R_W::new(self)
}
#[doc = "Bit 13 - START"]
#[inline(always)]
#[must_use]
pub fn start(&mut self) -> START_W<I2C_CR2_SPEC, 13> {
START_W::new(self)
}
#[doc = "Bit 14 - STOP"]
#[inline(always)]
#[must_use]
pub fn stop(&mut self) -> STOP_W<I2C_CR2_SPEC, 14> {
STOP_W::new(self)
}
#[doc = "Bit 15 - NACK"]
#[inline(always)]
#[must_use]
pub fn nack(&mut self) -> NACK_W<I2C_CR2_SPEC, 15> {
NACK_W::new(self)
}
#[doc = "Bits 16:23 - NBYTES"]
#[inline(always)]
#[must_use]
pub fn nbytes(&mut self) -> NBYTES_W<I2C_CR2_SPEC, 16> {
NBYTES_W::new(self)
}
#[doc = "Bit 24 - RELOAD"]
#[inline(always)]
#[must_use]
pub fn reload(&mut self) -> RELOAD_W<I2C_CR2_SPEC, 24> {
RELOAD_W::new(self)
}
#[doc = "Bit 25 - AUTOEND"]
#[inline(always)]
#[must_use]
pub fn autoend(&mut self) -> AUTOEND_W<I2C_CR2_SPEC, 25> {
AUTOEND_W::new(self)
}
#[doc = "Bit 26 - PECBYTE"]
#[inline(always)]
#[must_use]
pub fn pecbyte(&mut self) -> PECBYTE_W<I2C_CR2_SPEC, 26> {
PECBYTE_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "Access: No wait states, except if a write access occurs while a write access to this register is ongoing. In this case, wait states are inserted in the second write access until the previous one is completed. The latency of the second write access can be up to 2 x i2c_pclk + 6 x i2c_ker_ck.\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`i2c_cr2::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`i2c_cr2::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct I2C_CR2_SPEC;
impl crate::RegisterSpec for I2C_CR2_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`i2c_cr2::R`](R) reader structure"]
impl crate::Readable for I2C_CR2_SPEC {}
#[doc = "`write(|w| ..)` method takes [`i2c_cr2::W`](W) writer structure"]
impl crate::Writable for I2C_CR2_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets I2C_CR2 to value 0"]
impl crate::Resettable for I2C_CR2_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
#![allow(dead_code)]
/*
MATCH ARMS
match arms consist of a pattern and an expression to run in the value
matches the arm's pattern:
match VALUE {
PATTERN => EXPRESSION,
PATTERN => EXPRESSION,
PATTERN => EXPRESSION
}
match expressions need to be exhaustive.
The _ pattern will match anything, but it never binds to a variable.
IF LET EXPRESSIONS
if let expressions are used as a shorter wat to write the equivalent of a
match than only matches one case.
WHILE LET CONDITIONAL LOOPS
while let loop allows a while loop to run for as long as a pattern continues
to match.
*/
enum PATTERN {
ONE,
TWO,
THREE,
}
fn main() {
println!("Hello, world!");
let a = PATTERN::ONE;
let favorite: Option<&str> = None;
let is_tuesday = false;
let age: Result<u8, _> = "28".parse();
match a {
PATTERN::ONE => println!("pattern is one"),
_ => println!("no"),
}
if let Some(color) = favorite {
println!("yes {}", color);
} else if is_tuesday {
println!("yes it's tuesday");
} else if let Ok(age) = age {
if age > 30 {
println!("greater than 30");
} else {
println!("underage");
}
} else {
println!("no favorite");
}
let mut stack = Vec::new();
stack.push(1);
stack.push(2);
stack.push(3);
while let Some(top) = stack.pop() {
// .pop returns an option with the last element of a vec
println!("{}", top);
}
// FOR LOOPS
/*
the enumerate method adapts an interator to produce a value and the
index in the iterator, inside a tuple.
*/
let v = vec!['a', 'b', 'c'];
for (index, value) in v.iter().enumerate() {
println!("{} is at index {}", value, index);
}
// MATCHING NAMED VARIABLES AND LITERALS
let x = 1;
match x {
// this match statement could literally go to infinity
1 => println!("yes"),
2 => println!("yes"),
_ => println!("no"),
}
let x = Some(5);
let y = 10;
match x {
Some(50) => println!("got 50!"),
Some(num) => println!("Matched, some num x = {:?}", num),
_ => println!("default case"),
}
//MULTIPLE PATTERNS AND RANGES
let x = 1;
match x {
1 | 2 => println!("one or two"),
3 => println!("three"),
_ => println!("anything"),
} // one or two
let x = 5;
match x {
1...5 => println!("it's a number from 1 to 5"),
_ => println!("some other number"),
}
let x = 'c';
match x {
'a'...'j' => println!("between a and j"),
'k'...'z' => println!("between k and z",),
_ => println!("something else"),
}
//DESTRUCTURING TO BREAK APART VALUES
// DESTRUCTURING STRUCTS
struct Point {
x: i32,
y: i32,
}
let p = Point { x: 0, y: 7 };
let Point { x: a, y: b } = p;
assert_eq!(0, a);
assert_eq!(7, b);
/*
above the vars a and b were created to match the pattern of the p variable
that is of type Point.
It can also be done as below
*/
let Point { x, y } = p;
println!("{}, {}", x, y); // 0, 7
match p {
Point { x, y: 0 } => println!("on the x axis at {}", x),
Point { x: 0, y } => println!("on the y axis at {}", y),
Point { x, y } => println!("on neither axis: {}, {}", x, y),
}
// DESTRUCTURING ENUMS
enum Message {
Quit,
Move { x: i32, y: i32 },
Write(String),
ChangeColor(i32, i32, i32),
}
let msg = Message::ChangeColor(0, 160, 255);
let mut a = 0;
match msg {
Message::Quit => {
println!("No data on Message::Quit");
}
Message::Move { x, y } => {
println!("Move in x: {}, and y: {}", x, y);
}
Message::Write(text) => println!("Text Message: {}", text),
Message::ChangeColor(r, g, b) => {
println!("change color r{}, g{}, b{}", r, g, b);
a = g;
}
}
println!("{}", a);
//NESTED ENUM
enum Color {
Rgb(i32, i32, i32),
Hsv(i32, i32, i32),
}
enum Display {
ChangeColor(Color),
On,
Off,
}
let var = Display::ChangeColor(Color::Hsv(0, 160, 255));
match var {
Display::ChangeColor(Color::Rgb(r, g, b)) => println!("change red, green, blue"),
Display::ChangeColor(Color::Hsv(h, s, v)) => println!("change hue, saturation, value"),
_ => (),
}
/*
we can use _ inside another pattern to ignore just part of a
value.
*/
let mut setting_value = Some(5);
let new_setting_value = None;
match (setting_value, new_setting_value) {
(Some(_), Some(_)) => {
println!("Can't overwrite an existing value");
}
_ => {
setting_value = new_setting_value;
}
}
println!("setting {:?}", setting_value);
//MATCH GUARDS
let num = Some(6);
match num {
Some(x) if x < 5 => println!("less than five: {}", x),
Some(x) => println!("{}", x),
None => (),
}
} // end main
|
use crate::method::{NativeMethod, StdMethod};
use crate::operator::Operator;
use crate::runtime::Runtime;
use crate::tuple::LangTuple;
use crate::variable::{FnResult, InnerVar, Variable};
pub fn op_fn(o: Operator) -> NativeMethod<LangTuple> {
match o {
Operator::Equals => equals,
Operator::Bool => bool,
Operator::Str => str,
Operator::Repr => repr,
Operator::Hash => hash,
_ => unimplemented!("tuple.{} unimplemented", o.name()),
}
}
pub fn get_operator(this: LangTuple, o: Operator) -> Variable {
StdMethod::new_native(this, op_fn(o)).into()
}
pub fn get_attr(this: LangTuple, s: &str) -> Variable {
if s == "length" {
return this.len().into();
}
match s.parse() {
Result::Ok(x) => this[x].clone(),
Result::Err(_) => unimplemented!("tuple.{}", s),
}
}
pub fn equals(this: LangTuple, args: Vec<Variable>, runtime: &mut Runtime) -> FnResult {
for arg in args {
match arg {
Variable::Normal(InnerVar::Tuple(other)) => {
if this.len() != other.len() {
return runtime.return_1(false.into());
}
for (x, y) in this.iter().zip(&other) {
if !x.clone().equals(y.clone(), runtime)? {
return runtime.return_1(false.into());
}
}
}
_ => {
if !arg.equals(this.clone().into(), runtime)? {
return runtime.return_1(false.into());
}
}
}
}
runtime.return_1(true.into())
}
pub fn bool(this: LangTuple, args: Vec<Variable>, runtime: &mut Runtime) -> FnResult {
debug_assert!(args.is_empty());
runtime.return_1((!this.is_empty()).into())
}
pub fn str(this: LangTuple, args: Vec<Variable>, runtime: &mut Runtime) -> FnResult {
debug_assert!(args.is_empty());
let result = this.str(runtime)?.into();
runtime.return_1(result)
}
pub fn repr(this: LangTuple, args: Vec<Variable>, runtime: &mut Runtime) -> FnResult {
debug_assert!(args.is_empty());
let result = this.repr(runtime)?.into();
runtime.return_1(result)
}
pub fn hash(this: LangTuple, args: Vec<Variable>, runtime: &mut Runtime) -> FnResult {
debug_assert!(args.is_empty());
let result = this.lang_hash(runtime)?.into();
runtime.return_1(result)
}
|
use std::io::{stdout, Write};
use crossterm::{
event::{KeyEvent, KeyCode, Event},
event::{read, DisableMouseCapture, EnableMouseCapture},
execute,
style::{Color, Print, ResetColor, SetBackgroundColor, SetForegroundColor},
terminal, ExecutableCommand, Result,
};
fn main()-> Result<()> {
let mut buffer = String::new();
stdout()
.execute(SetForegroundColor(Color::Blue))?
.execute(SetBackgroundColor(Color::Red))?
.execute(Print("Styled text here."))?
.execute(ResetColor)?
// .execute(EnableMouseCapture)?;
terminal::enable_raw_mode()?;
loop {
match read()? {
Event::Key(KeyEvent {code, modifiers}) => {
match code {
KeyCode::Char(c) => {
buffer.push(c);
}
KeyCode::Enter => {
break;
}
};
println!("{:?}", event)
}
Event::Mouse(event) => {
println!("{:?}", event)
}
Event::Resize(width, height) => {
println!("width: {} and height: {}", width, height)
}
}
}
// stdout().execute(DisableMouseCapture)?;
terminal::disable_raw_mode()?;
Ok(())
} |
use std::env;
use std::error::Error;
use std::fs;
fn ls_current() -> Result<(), Box<dyn Error>> {
let here = env::current_dir()?;
println!("list current dir: {}", here.display());
for entry in fs::read_dir(&here)? {
let path = entry?.path();
let md = fs::metadata(&path)?;
println!(" {} {} bytes", path.display(), md.len());
}
Ok(())
}
fn main() {
ls_current().unwrap()
}
|
pub type Point3 = cgmath::Point3<f32>;
pub type Vector2 = cgmath::Vector2<f32>;
pub type Vector3 = cgmath::Vector3<f32>;
pub type Vector4 = cgmath::Vector4<f32>;
pub type Matrix4 = cgmath::Matrix4<f32>; |
use ipnetwork::{IpNetwork, Ipv4Network};
use libvopono::*;
use nix::sys::signal::{kill, SIGKILL};
use std::net::Ipv4Addr;
fn main() {
let mut netns = RawNetworkNamespace::new("testlobin");
let handle = netns.exec_no_block(&["ip", "addr"], false);
std::thread::sleep(std::time::Duration::from_secs(2));
kill(handle, SIGKILL).expect("kill failed");
netns.add_loopback();
let srcip = ipnetwork::IpNetwork::new(
std::net::IpAddr::V4(std::net::Ipv4Addr::new(10, 200, 200, 2)),
24,
)
.expect("Failed to construct IP");
let dstip = ipnetwork::IpNetwork::new(
std::net::IpAddr::V4(std::net::Ipv4Addr::new(10, 200, 200, 1)),
24,
)
.expect("Failed to construct IP");
netns.add_veth_bridge("vpnsrc", "vpndst", &srcip, &dstip);
std::thread::sleep(std::time::Duration::from_secs(1));
// let handle = netns.exec_no_block(&["ip", "addr"], false);
// std::thread::sleep(std::time::Duration::from_secs(2));
// let handle = netns.exec_no_block(&["ip", "link"], false);
// std::thread::sleep(std::time::Duration::from_secs(2));
// let handle = netns.exec_no_block(&["ip", "route"], false);
// std::thread::sleep(std::time::Duration::from_secs(2));
// Note network passed to nftable rule with netlink must have 0s for insignificant bits!
let ipnet = ipnetwork::IpNetwork::new(
std::net::IpAddr::V4(std::net::Ipv4Addr::new(10, 200, 200, 0)),
24,
)
.expect("Failed to construct IP");
host_add_masquerade_nft("vopono_nat", "vopono_nat", "wlp1s0", ipnet);
host_enable_ipv4_forwarding();
// Standard internet connection should work at this point
// Create Wireguard device - ip link
// TODO: Wrap this so we can destroy it later
netns.add_wireguard_device("vopono_wg");
// Set Wireguard device config
let config = read_wg_config(std::path::Path::new(
"/home/archie/.config/vopono/mv/wireguard/usa-us4.conf",
));
netns.set_wireguard_device("vopono_wg", &config);
netns.wg_dev_up("vopono_wg", &config);
// Set up routing
// Set up DNS
// Set firewall rules
// kill(handle, SIGKILL).expect("kill failed");
// netns.destroy();
}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use super::{models, API_VERSION};
#[non_exhaustive]
#[derive(Debug, thiserror :: Error)]
#[allow(non_camel_case_types)]
pub enum Error {
#[error(transparent)]
Clusters_Get(#[from] clusters::get::Error),
#[error(transparent)]
Clusters_Create(#[from] clusters::create::Error),
#[error(transparent)]
Clusters_Update(#[from] clusters::update::Error),
#[error(transparent)]
Clusters_Delete(#[from] clusters::delete::Error),
#[error(transparent)]
Clusters_ListByResourceGroup(#[from] clusters::list_by_resource_group::Error),
#[error(transparent)]
Clusters_Resize(#[from] clusters::resize::Error),
#[error(transparent)]
Clusters_UpdateAutoScaleConfiguration(#[from] clusters::update_auto_scale_configuration::Error),
#[error(transparent)]
Clusters_List(#[from] clusters::list::Error),
#[error(transparent)]
Clusters_RotateDiskEncryptionKey(#[from] clusters::rotate_disk_encryption_key::Error),
#[error(transparent)]
Clusters_GetGatewaySettings(#[from] clusters::get_gateway_settings::Error),
#[error(transparent)]
Clusters_UpdateGatewaySettings(#[from] clusters::update_gateway_settings::Error),
#[error(transparent)]
Clusters_GetAzureAsyncOperationStatus(#[from] clusters::get_azure_async_operation_status::Error),
#[error(transparent)]
Clusters_UpdateIdentityCertificate(#[from] clusters::update_identity_certificate::Error),
#[error(transparent)]
Operations_List(#[from] operations::list::Error),
#[error(transparent)]
ScriptActions_Delete(#[from] script_actions::delete::Error),
#[error(transparent)]
Clusters_ExecuteScriptActions(#[from] clusters::execute_script_actions::Error),
#[error(transparent)]
ScriptActions_ListByCluster(#[from] script_actions::list_by_cluster::Error),
#[error(transparent)]
ScriptActions_GetExecutionDetail(#[from] script_actions::get_execution_detail::Error),
#[error(transparent)]
ScriptExecutionHistory_ListByCluster(#[from] script_execution_history::list_by_cluster::Error),
#[error(transparent)]
ScriptExecutionHistory_Promote(#[from] script_execution_history::promote::Error),
#[error(transparent)]
ScriptActions_GetExecutionAsyncOperationStatus(#[from] script_actions::get_execution_async_operation_status::Error),
#[error(transparent)]
PrivateEndpointConnections_ListByCluster(#[from] private_endpoint_connections::list_by_cluster::Error),
#[error(transparent)]
PrivateEndpointConnections_Get(#[from] private_endpoint_connections::get::Error),
#[error(transparent)]
PrivateEndpointConnections_CreateOrUpdate(#[from] private_endpoint_connections::create_or_update::Error),
#[error(transparent)]
PrivateEndpointConnections_Delete(#[from] private_endpoint_connections::delete::Error),
#[error(transparent)]
Applications_ListByCluster(#[from] applications::list_by_cluster::Error),
#[error(transparent)]
Applications_Get(#[from] applications::get::Error),
#[error(transparent)]
Applications_Create(#[from] applications::create::Error),
#[error(transparent)]
Applications_Delete(#[from] applications::delete::Error),
#[error(transparent)]
Applications_GetAzureAsyncOperationStatus(#[from] applications::get_azure_async_operation_status::Error),
#[error(transparent)]
Locations_GetCapabilities(#[from] locations::get_capabilities::Error),
#[error(transparent)]
Locations_ListUsages(#[from] locations::list_usages::Error),
#[error(transparent)]
Locations_ListBillingSpecs(#[from] locations::list_billing_specs::Error),
#[error(transparent)]
Locations_GetAzureAsyncOperationStatus(#[from] locations::get_azure_async_operation_status::Error),
#[error(transparent)]
Locations_CheckNameAvailability(#[from] locations::check_name_availability::Error),
#[error(transparent)]
Locations_ValidateClusterCreateRequest(#[from] locations::validate_cluster_create_request::Error),
#[error(transparent)]
Configurations_List(#[from] configurations::list::Error),
#[error(transparent)]
Configurations_Get(#[from] configurations::get::Error),
#[error(transparent)]
Configurations_Update(#[from] configurations::update::Error),
#[error(transparent)]
Extensions_GetMonitoringStatus(#[from] extensions::get_monitoring_status::Error),
#[error(transparent)]
Extensions_EnableMonitoring(#[from] extensions::enable_monitoring::Error),
#[error(transparent)]
Extensions_DisableMonitoring(#[from] extensions::disable_monitoring::Error),
#[error(transparent)]
Extensions_GetAzureMonitorStatus(#[from] extensions::get_azure_monitor_status::Error),
#[error(transparent)]
Extensions_EnableAzureMonitor(#[from] extensions::enable_azure_monitor::Error),
#[error(transparent)]
Extensions_DisableAzureMonitor(#[from] extensions::disable_azure_monitor::Error),
#[error(transparent)]
Extensions_Get(#[from] extensions::get::Error),
#[error(transparent)]
Extensions_Create(#[from] extensions::create::Error),
#[error(transparent)]
Extensions_Delete(#[from] extensions::delete::Error),
#[error(transparent)]
Extensions_GetAzureAsyncOperationStatus(#[from] extensions::get_azure_async_operation_status::Error),
#[error(transparent)]
VirtualMachines_ListHosts(#[from] virtual_machines::list_hosts::Error),
#[error(transparent)]
VirtualMachines_RestartHosts(#[from] virtual_machines::restart_hosts::Error),
#[error(transparent)]
VirtualMachines_GetAsyncOperationStatus(#[from] virtual_machines::get_async_operation_status::Error),
#[error(transparent)]
PrivateLinkResources_ListByCluster(#[from] private_link_resources::list_by_cluster::Error),
#[error(transparent)]
PrivateLinkResources_Get(#[from] private_link_resources::get::Error),
}
pub mod clusters {
use super::{models, API_VERSION};
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
cluster_name: &str,
) -> std::result::Result<models::Cluster, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HDInsight/clusters/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
cluster_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Cluster =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
cluster_name: &str,
parameters: &models::ClusterCreateParametersExtended,
) -> std::result::Result<models::Cluster, create::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HDInsight/clusters/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
cluster_name
);
let mut url = url::Url::parse(url_str).map_err(create::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(create::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(create::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Cluster =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
cluster_name: &str,
parameters: &models::ClusterPatchParameters,
) -> std::result::Result<models::Cluster, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HDInsight/clusters/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
cluster_name
);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Cluster =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
cluster_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HDInsight/clusters/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
cluster_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::OK => Ok(delete::Response::Ok200),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Accepted202,
Ok200,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_resource_group(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
) -> std::result::Result<models::ClusterListResult, list_by_resource_group::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HDInsight/clusters",
operation_config.base_path(),
subscription_id,
resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_resource_group::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_resource_group::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_by_resource_group::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_resource_group::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ClusterListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_resource_group::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_resource_group::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_resource_group::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_resource_group {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn resize(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
cluster_name: &str,
role_name: &str,
parameters: &models::ClusterResizeParameters,
) -> std::result::Result<resize::Response, resize::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HDInsight/clusters/{}/roles/{}/resize",
operation_config.base_path(),
subscription_id,
resource_group_name,
cluster_name,
role_name
);
let mut url = url::Url::parse(url_str).map_err(resize::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(resize::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(resize::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(resize::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(resize::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(resize::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(resize::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| resize::Error::DeserializeError(source, rsp_body.clone()))?;
Err(resize::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod resize {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update_auto_scale_configuration(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
cluster_name: &str,
role_name: &str,
parameters: &models::AutoscaleConfigurationUpdateParameter,
) -> std::result::Result<update_auto_scale_configuration::Response, update_auto_scale_configuration::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HDInsight/clusters/{}/roles/{}/autoscale",
operation_config.base_path(),
subscription_id,
resource_group_name,
cluster_name,
role_name
);
let mut url = url::Url::parse(url_str).map_err(update_auto_scale_configuration::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update_auto_scale_configuration::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(update_auto_scale_configuration::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(update_auto_scale_configuration::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(update_auto_scale_configuration::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(update_auto_scale_configuration::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(update_auto_scale_configuration::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| update_auto_scale_configuration::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update_auto_scale_configuration::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update_auto_scale_configuration {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
) -> std::result::Result<models::ClusterListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.HDInsight/clusters",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ClusterListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn rotate_disk_encryption_key(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
cluster_name: &str,
parameters: &models::ClusterDiskEncryptionParameters,
) -> std::result::Result<rotate_disk_encryption_key::Response, rotate_disk_encryption_key::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HDInsight/clusters/{}/rotatediskencryptionkey",
operation_config.base_path(),
subscription_id,
resource_group_name,
cluster_name
);
let mut url = url::Url::parse(url_str).map_err(rotate_disk_encryption_key::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(rotate_disk_encryption_key::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(rotate_disk_encryption_key::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(rotate_disk_encryption_key::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(rotate_disk_encryption_key::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(rotate_disk_encryption_key::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(rotate_disk_encryption_key::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| rotate_disk_encryption_key::Error::DeserializeError(source, rsp_body.clone()))?;
Err(rotate_disk_encryption_key::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod rotate_disk_encryption_key {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_gateway_settings(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
cluster_name: &str,
) -> std::result::Result<models::GatewaySettings, get_gateway_settings::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HDInsight/clusters/{}/getGatewaySettings",
operation_config.base_path(),
subscription_id,
resource_group_name,
cluster_name
);
let mut url = url::Url::parse(url_str).map_err(get_gateway_settings::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_gateway_settings::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_gateway_settings::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_gateway_settings::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::GatewaySettings = serde_json::from_slice(rsp_body)
.map_err(|source| get_gateway_settings::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| get_gateway_settings::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_gateway_settings::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_gateway_settings {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update_gateway_settings(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
cluster_name: &str,
parameters: &models::UpdateGatewaySettingsParameters,
) -> std::result::Result<update_gateway_settings::Response, update_gateway_settings::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HDInsight/clusters/{}/updateGatewaySettings",
operation_config.base_path(),
subscription_id,
resource_group_name,
cluster_name
);
let mut url = url::Url::parse(url_str).map_err(update_gateway_settings::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update_gateway_settings::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(update_gateway_settings::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(update_gateway_settings::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(update_gateway_settings::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(update_gateway_settings::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(update_gateway_settings::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| update_gateway_settings::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update_gateway_settings::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update_gateway_settings {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_azure_async_operation_status(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
cluster_name: &str,
operation_id: &str,
) -> std::result::Result<models::AsyncOperationResult, get_azure_async_operation_status::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HDInsight/clusters/{}/azureasyncoperations/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
cluster_name,
operation_id
);
let mut url = url::Url::parse(url_str).map_err(get_azure_async_operation_status::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_azure_async_operation_status::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(get_azure_async_operation_status::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_azure_async_operation_status::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::AsyncOperationResult = serde_json::from_slice(rsp_body)
.map_err(|source| get_azure_async_operation_status::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| get_azure_async_operation_status::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_azure_async_operation_status::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_azure_async_operation_status {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update_identity_certificate(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
cluster_name: &str,
parameters: &models::UpdateClusterIdentityCertificateParameters,
) -> std::result::Result<update_identity_certificate::Response, update_identity_certificate::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HDInsight/clusters/{}/updateClusterIdentityCertificate",
operation_config.base_path(),
subscription_id,
resource_group_name,
cluster_name
);
let mut url = url::Url::parse(url_str).map_err(update_identity_certificate::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update_identity_certificate::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(update_identity_certificate::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(update_identity_certificate::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(update_identity_certificate::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(update_identity_certificate::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(update_identity_certificate::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| update_identity_certificate::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update_identity_certificate::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update_identity_certificate {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn execute_script_actions(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
cluster_name: &str,
parameters: &models::ExecuteScriptActionParameters,
) -> std::result::Result<execute_script_actions::Response, execute_script_actions::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HDInsight/clusters/{}/executeScriptActions",
operation_config.base_path(),
subscription_id,
resource_group_name,
cluster_name
);
let mut url = url::Url::parse(url_str).map_err(execute_script_actions::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(execute_script_actions::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(execute_script_actions::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(execute_script_actions::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(execute_script_actions::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::ACCEPTED => Ok(execute_script_actions::Response::Accepted202),
http::StatusCode::OK => Ok(execute_script_actions::Response::Ok200),
http::StatusCode::NOT_FOUND => Err(execute_script_actions::Error::NotFound404 {}),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| execute_script_actions::Error::DeserializeError(source, rsp_body.clone()))?;
Err(execute_script_actions::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod execute_script_actions {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Accepted202,
Ok200,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Error response #response_type")]
NotFound404 {},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod operations {
use super::{models, API_VERSION};
pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<models::OperationListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/providers/Microsoft.HDInsight/operations", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::OperationListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod script_actions {
use super::{models, API_VERSION};
pub async fn delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
cluster_name: &str,
script_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HDInsight/clusters/{}/scriptActions/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
cluster_name,
script_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(delete::Response::Ok200),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_cluster(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
cluster_name: &str,
) -> std::result::Result<models::ScriptActionsList, list_by_cluster::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HDInsight/clusters/{}/scriptActions",
operation_config.base_path(),
subscription_id,
resource_group_name,
cluster_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_cluster::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_cluster::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_cluster::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_cluster::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ScriptActionsList = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_cluster::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_cluster::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_cluster::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_cluster {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_execution_detail(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
cluster_name: &str,
script_execution_id: &str,
) -> std::result::Result<models::RuntimeScriptActionDetail, get_execution_detail::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HDInsight/clusters/{}/scriptExecutionHistory/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
cluster_name,
script_execution_id
);
let mut url = url::Url::parse(url_str).map_err(get_execution_detail::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_execution_detail::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_execution_detail::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_execution_detail::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::RuntimeScriptActionDetail = serde_json::from_slice(rsp_body)
.map_err(|source| get_execution_detail::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| get_execution_detail::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_execution_detail::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_execution_detail {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_execution_async_operation_status(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
cluster_name: &str,
operation_id: &str,
) -> std::result::Result<models::AsyncOperationResult, get_execution_async_operation_status::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HDInsight/clusters/{}/executeScriptActions/azureasyncoperations/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
cluster_name,
operation_id
);
let mut url = url::Url::parse(url_str).map_err(get_execution_async_operation_status::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_execution_async_operation_status::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(get_execution_async_operation_status::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_execution_async_operation_status::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::AsyncOperationResult = serde_json::from_slice(rsp_body)
.map_err(|source| get_execution_async_operation_status::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| get_execution_async_operation_status::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_execution_async_operation_status::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_execution_async_operation_status {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod script_execution_history {
use super::{models, API_VERSION};
pub async fn list_by_cluster(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
cluster_name: &str,
) -> std::result::Result<models::ScriptActionExecutionHistoryList, list_by_cluster::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HDInsight/clusters/{}/scriptExecutionHistory",
operation_config.base_path(),
subscription_id,
resource_group_name,
cluster_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_cluster::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_cluster::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_cluster::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_cluster::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ScriptActionExecutionHistoryList = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_cluster::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_cluster::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_cluster::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_cluster {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn promote(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
cluster_name: &str,
script_execution_id: &str,
) -> std::result::Result<(), promote::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HDInsight/clusters/{}/scriptExecutionHistory/{}/promote",
operation_config.base_path(),
subscription_id,
resource_group_name,
cluster_name,
script_execution_id
);
let mut url = url::Url::parse(url_str).map_err(promote::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(promote::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(promote::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(promote::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(()),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| promote::Error::DeserializeError(source, rsp_body.clone()))?;
Err(promote::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod promote {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod private_endpoint_connections {
use super::{models, API_VERSION};
pub async fn list_by_cluster(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
cluster_name: &str,
) -> std::result::Result<models::PrivateEndpointConnectionListResult, list_by_cluster::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HDInsight/clusters/{}/privateEndpointConnections",
operation_config.base_path(),
subscription_id,
resource_group_name,
cluster_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_cluster::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_cluster::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_cluster::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_cluster::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PrivateEndpointConnectionListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_cluster::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_cluster::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_cluster::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_cluster {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
cluster_name: &str,
private_endpoint_connection_name: &str,
) -> std::result::Result<models::PrivateEndpointConnection, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HDInsight/clusters/{}/privateEndpointConnections/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
cluster_name,
private_endpoint_connection_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PrivateEndpointConnection =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
cluster_name: &str,
private_endpoint_connection_name: &str,
parameters: &models::PrivateEndpointConnection,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HDInsight/clusters/{}/privateEndpointConnections/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
cluster_name,
private_endpoint_connection_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PrivateEndpointConnection = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::PrivateEndpointConnection = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Created201(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_or_update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::PrivateEndpointConnection),
Created201(models::PrivateEndpointConnection),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
cluster_name: &str,
private_endpoint_connection_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HDInsight/clusters/{}/privateEndpointConnections/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
cluster_name,
private_endpoint_connection_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::OK => Ok(delete::Response::Ok200),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Accepted202,
Ok200,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod applications {
use super::{models, API_VERSION};
pub async fn list_by_cluster(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
cluster_name: &str,
) -> std::result::Result<models::ApplicationListResult, list_by_cluster::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HDInsight/clusters/{}/applications",
operation_config.base_path(),
subscription_id,
resource_group_name,
cluster_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_cluster::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_cluster::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_cluster::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_cluster::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ApplicationListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_cluster::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_cluster::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_cluster::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_cluster {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
cluster_name: &str,
application_name: &str,
) -> std::result::Result<models::Application, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HDInsight/clusters/{}/applications/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
cluster_name,
application_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Application =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
cluster_name: &str,
application_name: &str,
parameters: &models::Application,
) -> std::result::Result<models::Application, create::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HDInsight/clusters/{}/applications/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
cluster_name,
application_name
);
let mut url = url::Url::parse(url_str).map_err(create::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(create::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(create::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Application =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
cluster_name: &str,
application_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HDInsight/clusters/{}/applications/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
cluster_name,
application_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(delete::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_azure_async_operation_status(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
cluster_name: &str,
application_name: &str,
operation_id: &str,
) -> std::result::Result<models::AsyncOperationResult, get_azure_async_operation_status::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HDInsight/clusters/{}/applications/{}/azureasyncoperations/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
cluster_name,
application_name,
operation_id
);
let mut url = url::Url::parse(url_str).map_err(get_azure_async_operation_status::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_azure_async_operation_status::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(get_azure_async_operation_status::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_azure_async_operation_status::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::AsyncOperationResult = serde_json::from_slice(rsp_body)
.map_err(|source| get_azure_async_operation_status::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| get_azure_async_operation_status::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_azure_async_operation_status::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_azure_async_operation_status {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod locations {
use super::{models, API_VERSION};
pub async fn get_capabilities(
operation_config: &crate::OperationConfig,
subscription_id: &str,
location: &str,
) -> std::result::Result<models::CapabilitiesResult, get_capabilities::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.HDInsight/locations/{}/capabilities",
operation_config.base_path(),
subscription_id,
location
);
let mut url = url::Url::parse(url_str).map_err(get_capabilities::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_capabilities::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_capabilities::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_capabilities::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::CapabilitiesResult = serde_json::from_slice(rsp_body)
.map_err(|source| get_capabilities::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| get_capabilities::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_capabilities::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_capabilities {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_usages(
operation_config: &crate::OperationConfig,
subscription_id: &str,
location: &str,
) -> std::result::Result<models::UsagesListResult, list_usages::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.HDInsight/locations/{}/usages",
operation_config.base_path(),
subscription_id,
location
);
let mut url = url::Url::parse(url_str).map_err(list_usages::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_usages::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_usages::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_usages::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::UsagesListResult =
serde_json::from_slice(rsp_body).map_err(|source| list_usages::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| list_usages::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_usages::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_usages {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_billing_specs(
operation_config: &crate::OperationConfig,
subscription_id: &str,
location: &str,
) -> std::result::Result<models::BillingResponseListResult, list_billing_specs::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.HDInsight/locations/{}/billingSpecs",
operation_config.base_path(),
subscription_id,
location
);
let mut url = url::Url::parse(url_str).map_err(list_billing_specs::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_billing_specs::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_billing_specs::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_billing_specs::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::BillingResponseListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_billing_specs::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_billing_specs::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_billing_specs::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_billing_specs {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_azure_async_operation_status(
operation_config: &crate::OperationConfig,
subscription_id: &str,
location: &str,
operation_id: &str,
) -> std::result::Result<models::AsyncOperationResult, get_azure_async_operation_status::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.HDInsight/locations/{}/azureasyncoperations/{}",
operation_config.base_path(),
subscription_id,
location,
operation_id
);
let mut url = url::Url::parse(url_str).map_err(get_azure_async_operation_status::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_azure_async_operation_status::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(get_azure_async_operation_status::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_azure_async_operation_status::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::AsyncOperationResult = serde_json::from_slice(rsp_body)
.map_err(|source| get_azure_async_operation_status::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| get_azure_async_operation_status::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_azure_async_operation_status::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_azure_async_operation_status {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn check_name_availability(
operation_config: &crate::OperationConfig,
subscription_id: &str,
location: &str,
parameters: &models::NameAvailabilityCheckRequestParameters,
) -> std::result::Result<models::NameAvailabilityCheckResult, check_name_availability::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.HDInsight/locations/{}/checkNameAvailability",
operation_config.base_path(),
subscription_id,
location
);
let mut url = url::Url::parse(url_str).map_err(check_name_availability::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(check_name_availability::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(check_name_availability::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(check_name_availability::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(check_name_availability::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::NameAvailabilityCheckResult = serde_json::from_slice(rsp_body)
.map_err(|source| check_name_availability::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| check_name_availability::Error::DeserializeError(source, rsp_body.clone()))?;
Err(check_name_availability::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod check_name_availability {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn validate_cluster_create_request(
operation_config: &crate::OperationConfig,
subscription_id: &str,
location: &str,
parameters: &models::ClusterCreateRequestValidationParameters,
) -> std::result::Result<models::ClusterCreateValidationResult, validate_cluster_create_request::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.HDInsight/locations/{}/validateCreateRequest",
operation_config.base_path(),
subscription_id,
location
);
let mut url = url::Url::parse(url_str).map_err(validate_cluster_create_request::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(validate_cluster_create_request::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(validate_cluster_create_request::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(validate_cluster_create_request::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(validate_cluster_create_request::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ClusterCreateValidationResult = serde_json::from_slice(rsp_body)
.map_err(|source| validate_cluster_create_request::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| validate_cluster_create_request::Error::DeserializeError(source, rsp_body.clone()))?;
Err(validate_cluster_create_request::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod validate_cluster_create_request {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod configurations {
use super::{models, API_VERSION};
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
cluster_name: &str,
) -> std::result::Result<models::ClusterConfigurations, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HDInsight/clusters/{}/configurations",
operation_config.base_path(),
subscription_id,
resource_group_name,
cluster_name
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ClusterConfigurations =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
cluster_name: &str,
configuration_name: &str,
) -> std::result::Result<models::ClusterConfiguration, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HDInsight/clusters/{}/configurations/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
cluster_name,
configuration_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ClusterConfiguration =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
cluster_name: &str,
configuration_name: &str,
parameters: &models::ClusterConfiguration,
) -> std::result::Result<update::Response, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HDInsight/clusters/{}/configurations/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
cluster_name,
configuration_name
);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(update::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(update::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(update::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod extensions {
use super::{models, API_VERSION};
pub async fn get_monitoring_status(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
cluster_name: &str,
) -> std::result::Result<models::ClusterMonitoringResponse, get_monitoring_status::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HDInsight/clusters/{}/extensions/clustermonitoring",
operation_config.base_path(),
subscription_id,
resource_group_name,
cluster_name
);
let mut url = url::Url::parse(url_str).map_err(get_monitoring_status::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_monitoring_status::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(get_monitoring_status::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_monitoring_status::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ClusterMonitoringResponse = serde_json::from_slice(rsp_body)
.map_err(|source| get_monitoring_status::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| get_monitoring_status::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_monitoring_status::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_monitoring_status {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn enable_monitoring(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
cluster_name: &str,
parameters: &models::ClusterMonitoringRequest,
) -> std::result::Result<enable_monitoring::Response, enable_monitoring::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HDInsight/clusters/{}/extensions/clustermonitoring",
operation_config.base_path(),
subscription_id,
resource_group_name,
cluster_name
);
let mut url = url::Url::parse(url_str).map_err(enable_monitoring::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(enable_monitoring::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(enable_monitoring::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(enable_monitoring::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(enable_monitoring::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(enable_monitoring::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(enable_monitoring::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| enable_monitoring::Error::DeserializeError(source, rsp_body.clone()))?;
Err(enable_monitoring::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod enable_monitoring {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn disable_monitoring(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
cluster_name: &str,
) -> std::result::Result<disable_monitoring::Response, disable_monitoring::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HDInsight/clusters/{}/extensions/clustermonitoring",
operation_config.base_path(),
subscription_id,
resource_group_name,
cluster_name
);
let mut url = url::Url::parse(url_str).map_err(disable_monitoring::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(disable_monitoring::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(disable_monitoring::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(disable_monitoring::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(disable_monitoring::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(disable_monitoring::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(disable_monitoring::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| disable_monitoring::Error::DeserializeError(source, rsp_body.clone()))?;
Err(disable_monitoring::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod disable_monitoring {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_azure_monitor_status(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
cluster_name: &str,
) -> std::result::Result<models::AzureMonitorResponse, get_azure_monitor_status::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HDInsight/clusters/{}/extensions/azureMonitor",
operation_config.base_path(),
subscription_id,
resource_group_name,
cluster_name
);
let mut url = url::Url::parse(url_str).map_err(get_azure_monitor_status::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_azure_monitor_status::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(get_azure_monitor_status::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_azure_monitor_status::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::AzureMonitorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| get_azure_monitor_status::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| get_azure_monitor_status::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_azure_monitor_status::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_azure_monitor_status {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn enable_azure_monitor(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
cluster_name: &str,
parameters: &models::AzureMonitorRequest,
) -> std::result::Result<enable_azure_monitor::Response, enable_azure_monitor::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HDInsight/clusters/{}/extensions/azureMonitor",
operation_config.base_path(),
subscription_id,
resource_group_name,
cluster_name
);
let mut url = url::Url::parse(url_str).map_err(enable_azure_monitor::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(enable_azure_monitor::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(enable_azure_monitor::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(enable_azure_monitor::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(enable_azure_monitor::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(enable_azure_monitor::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(enable_azure_monitor::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| enable_azure_monitor::Error::DeserializeError(source, rsp_body.clone()))?;
Err(enable_azure_monitor::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod enable_azure_monitor {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn disable_azure_monitor(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
cluster_name: &str,
) -> std::result::Result<disable_azure_monitor::Response, disable_azure_monitor::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HDInsight/clusters/{}/extensions/azureMonitor",
operation_config.base_path(),
subscription_id,
resource_group_name,
cluster_name
);
let mut url = url::Url::parse(url_str).map_err(disable_azure_monitor::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(disable_azure_monitor::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(disable_azure_monitor::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(disable_azure_monitor::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(disable_azure_monitor::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(disable_azure_monitor::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(disable_azure_monitor::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| disable_azure_monitor::Error::DeserializeError(source, rsp_body.clone()))?;
Err(disable_azure_monitor::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod disable_azure_monitor {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
cluster_name: &str,
extension_name: &str,
) -> std::result::Result<models::ClusterMonitoringResponse, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HDInsight/clusters/{}/extensions/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
cluster_name,
extension_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ClusterMonitoringResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
cluster_name: &str,
extension_name: &str,
parameters: &models::Extension,
) -> std::result::Result<create::Response, create::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HDInsight/clusters/{}/extensions/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
cluster_name,
extension_name
);
let mut url = url::Url::parse(url_str).map_err(create::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(create::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(create::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(create::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(create::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
cluster_name: &str,
extension_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HDInsight/clusters/{}/extensions/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
cluster_name,
extension_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(delete::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_azure_async_operation_status(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
cluster_name: &str,
extension_name: &str,
operation_id: &str,
) -> std::result::Result<models::AsyncOperationResult, get_azure_async_operation_status::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HDInsight/clusters/{}/extensions/{}/azureAsyncOperations/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
cluster_name,
extension_name,
operation_id
);
let mut url = url::Url::parse(url_str).map_err(get_azure_async_operation_status::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_azure_async_operation_status::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(get_azure_async_operation_status::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_azure_async_operation_status::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::AsyncOperationResult = serde_json::from_slice(rsp_body)
.map_err(|source| get_azure_async_operation_status::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| get_azure_async_operation_status::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_azure_async_operation_status::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_azure_async_operation_status {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod virtual_machines {
use super::{models, API_VERSION};
pub async fn list_hosts(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
cluster_name: &str,
) -> std::result::Result<models::HostInfoListResult, list_hosts::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HDInsight/clusters/{}/listHosts",
operation_config.base_path(),
subscription_id,
resource_group_name,
cluster_name
);
let mut url = url::Url::parse(url_str).map_err(list_hosts::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_hosts::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_hosts::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_hosts::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::HostInfoListResult =
serde_json::from_slice(rsp_body).map_err(|source| list_hosts::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| list_hosts::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_hosts::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_hosts {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn restart_hosts(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
cluster_name: &str,
hosts: &models::RestartHostsParameters,
) -> std::result::Result<restart_hosts::Response, restart_hosts::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HDInsight/clusters/{}/restartHosts",
operation_config.base_path(),
subscription_id,
resource_group_name,
cluster_name
);
let mut url = url::Url::parse(url_str).map_err(restart_hosts::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(restart_hosts::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(hosts).map_err(restart_hosts::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(restart_hosts::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(restart_hosts::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(restart_hosts::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(restart_hosts::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| restart_hosts::Error::DeserializeError(source, rsp_body.clone()))?;
Err(restart_hosts::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod restart_hosts {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_async_operation_status(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
cluster_name: &str,
operation_id: &str,
) -> std::result::Result<models::AsyncOperationResult, get_async_operation_status::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HDInsight/clusters/{}/restartHosts/azureasyncoperations/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
cluster_name,
operation_id
);
let mut url = url::Url::parse(url_str).map_err(get_async_operation_status::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_async_operation_status::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(get_async_operation_status::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_async_operation_status::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::AsyncOperationResult = serde_json::from_slice(rsp_body)
.map_err(|source| get_async_operation_status::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| get_async_operation_status::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_async_operation_status::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_async_operation_status {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod private_link_resources {
use super::{models, API_VERSION};
pub async fn list_by_cluster(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
cluster_name: &str,
) -> std::result::Result<models::PrivateLinkResourceListResult, list_by_cluster::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HDInsight/clusters/{}/privateLinkResources",
operation_config.base_path(),
subscription_id,
resource_group_name,
cluster_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_cluster::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_cluster::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_cluster::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_cluster::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PrivateLinkResourceListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_cluster::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_cluster::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_cluster::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_cluster {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
cluster_name: &str,
private_link_resource_name: &str,
) -> std::result::Result<models::PrivateLinkResource, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HDInsight/clusters/{}/privateLinkResources/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
cluster_name,
private_link_resource_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PrivateLinkResource =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
|
use std::borrow::Cow;
use serde::{Deserialize, Deserializer};
use regex::Regex;
#[derive(Debug, Clone)]
pub struct Cleaner {
pattern: Regex,
}
impl Cleaner {
pub fn clean<'a>(&self, id: &'a str) -> Cow<'a, str> {
self.pattern.replace_all(id, "")
}
}
impl<'de> Deserialize<'de> for Cleaner
{
fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
let config = Box::<[Box<str>]>::deserialize(deserializer)?;
let mut pattern = String::with_capacity(200);
pattern.push_str("(?i)"); // Case insensitive.
for pat in config.iter() {
pattern.push('(');
pattern.push_str(pat);
pattern.push_str(")|");
}
pattern.pop();
let pattern = Regex
::new(&pattern)
.map_err(serde::de::Error::custom)?;
Ok(
Cleaner {
pattern,
}
)
}
}
|
use super::game::Direction;
pub struct Point {
pub x: f64,
pub y: f64
}
pub struct Rect {
pub origin: Point,
pub w: f64,
pub h: f64,
}
impl Rect {
pub fn new(x: f64, y: f64, w: f64, h: f64) -> Self {
let p = Point {x,y};
Rect { origin:p,w,h }
}
pub fn contains(&self, p: Point) -> bool {
p.x > self.origin.x
&& p.x < self.w+self.origin.x
&& p.y > self.origin.y
&& p.y < self.origin.y+self.h
}
pub fn center(&self) -> Point {
Point {
x: self.x_center(),
y: self.origin.y+self.h/2.
}
}
pub fn x_center(&self) -> f64 {
self.origin.x+self.w/2.
}
pub fn intersects(&self, r: &Rect) -> Direction {
/*
check if self intersects r. return Direction::Empty if no intersect,
otherwise return the direction of intersection.
so if self intersects from above, return Direction::Up
*/
if !(
self.origin.x > r.origin.x + r.w ||
self.origin.x + self.w < r.origin.x ||
self.origin.y > r.origin.y + r.h ||
self.origin.y + self.h < r.origin.y
){
let (xd,yd) = self.center_difference(r);
//now we need to adjust based on width/height
//if abs(xd) < abs(yd) but the width of the rects is much smaller than height,
//the collision may be in the y direction
let y_axis = self.center().x > r.origin.x && xd < r.origin.x + r.w; //if xd between x bounds of r, then probably colliding from above or below
if y_axis {
if yd < 0. { return Direction::Up }
return Direction::Down
} else {
if xd < 0. { return Direction::Left } //self's center is to the left of r's center
return Direction::Right
}
}
Direction::Empty
}
pub fn center_difference(&self, r: &Rect) -> (f64, f64) { //should be a point?
let self_c = self.center();
let r_c = r.center();
(self_c.x - r_c.x, self_c.y - r_c.y)
}
} |
use std::{collections::HashMap, vec::Vec};
use Element::*;
pub struct FlatMol {
pub atoms: Vec<Atom>,
pub bonds: Vec<Bond>,
}
#[derive(Debug, PartialEq, Clone, Copy)]
pub struct Atom {
pub isotope: Isotope,
pub charge: Charge,
pub chiral: Option<Chirality>,
pub class: Option<Class>,
}
#[derive(Debug, PartialEq, Clone, Copy)]
pub struct Isotope {
pub elem: Element,
pub mass_number: MassNumber,
}
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum Element {
H,
He,
Li,
Be,
B,
C,
N,
O,
F,
Ne,
Na,
Mg,
Al,
Si,
P,
S,
Cl,
Ar,
K,
Ca,
Sc,
Ti,
V,
Cr,
Mn,
Fe,
Co,
Ni,
Cu,
Zn,
Ga,
Ge,
As,
Se,
Br,
Kr,
Rb,
Sr,
Y,
Zr,
Nb,
Mo,
Tc,
Ru,
Rh,
Pd,
Ag,
Cd,
In,
Sn,
Sb,
Te,
I,
Xe,
Cs,
Ba,
Hf,
Ta,
W,
Re,
Os,
Ir,
Pt,
Au,
Hg,
Tl,
Pb,
Bi,
Po,
At,
Rn,
Fr,
Ra,
Rf,
Db,
Sg,
Bh,
Hs,
Mt,
Ds,
Rg,
Cn,
Fl,
Lv,
La,
Ce,
Pr,
Nd,
Pm,
Sm,
Eu,
Gd,
Tb,
Dy,
Ho,
Er,
Tm,
Yb,
Lu,
Ac,
Th,
Pa,
U,
Np,
Pu,
Am,
Cm,
Bk,
Cf,
Es,
Fm,
Md,
No,
Lr,
}
impl Element {
pub fn get_element(element: &str) -> Result<Element, String> {
let keys = vec![
"H", "He", "Li", "Be", "B", "C", "N", "O", "F", "Ne", "Na", "Mg", "Al", "Si", "P", "S",
"Cl", "Ar", "K", "Ca", "Sc", "Ti", "V", "Cr", "Mn", "Fe", "Co", "Ni", "Cu", "Zn", "Ga",
"Ge", "As", "Se", "Br", "Kr", "Rb", "Sr", "Y", "Zr", "Nb", "Mo", "Tc", "Ru", "Rh",
"Pd", "Ag", "Cd", "In", "Sn", "Sb", "Te", "I", "Xe", "Cs", "Ba", "Hf", "Ta", "W", "Re",
"Os", "Ir", "Pt", "Au", "Hg", "Tl", "Pb", "Bi", "Po", "At", "Rn", "Fr", "Ra", "Rf",
"Db", "Sg", "Bh", "Hs", "Mt", "Ds", "Rg", "Cn", "Fl", "Lv", "La", "Ce", "Pr", "Nd",
"Pm", "Sm", "Eu", "Gd", "Tb", "Dy", "Ho", "Er", "Tm", "Yb", "Lu", "Ac", "Th", "Pa",
"U", "Np", "Pu", "Am", "Cm", "Bk", "Cf", "Es", "Fm", "Md", "No", "Lr",
];
let elements = vec![
H, He, Li, Be, B, C, N, O, F, Ne, Na, Mg, Al, Si, P, S, Cl, Ar, K, Ca, Sc, Ti, V, Cr,
Mn, Fe, Co, Ni, Cu, Zn, Ga, Ge, As, Se, Br, Kr, Rb, Sr, Y, Zr, Nb, Mo, Tc, Ru, Rh, Pd,
Ag, Cd, In, Sn, Sb, Te, I, Xe, Cs, Ba, Hf, Ta, W, Re, Os, Ir, Pt, Au, Hg, Tl, Pb, Bi,
Po, At, Rn, Fr, Ra, Rf, Db, Sg, Bh, Hs, Mt, Ds, Rg, Cn, Fl, Lv, La, Ce, Pr, Nd, Pm, Sm,
Eu, Gd, Tb, Dy, Ho, Er, Tm, Yb, Lu, Ac, Th, Pa, U, Np, Pu, Am, Cm, Bk, Cf, Es, Fm, Md,
No, Lr,
];
let str_to_el: HashMap<_, _> = keys.into_iter().zip(elements.into_iter()).collect();
match str_to_el.get(element) {
Some(&e) => Ok(e),
None => {
let msg = format!("Element '{}' does not exist", element);
Err(msg)
}
}
}
}
type Charge = isize;
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum Chirality {
Clockwise,
AntiClockwise,
}
type Class = usize;
#[derive(Debug, PartialEq, Clone, Copy)]
pub enum MassNumber {
Exact(usize),
Average,
}
#[derive(Debug, PartialEq, Clone, Copy)]
pub struct Bond {
pub order: Order,
pub from: usize,
pub to: usize,
}
#[derive(Debug, PartialEq, Clone, Copy)]
pub enum Order {
Single,
Double,
Triple,
Quadruple,
}
|
#[doc = "Register `SMPR2` reader"]
pub type R = crate::R<SMPR2_SPEC>;
#[doc = "Register `SMPR2` writer"]
pub type W = crate::W<SMPR2_SPEC>;
#[doc = "Field `SMP0` reader - Channel 0 sample time selection"]
pub type SMP0_R = crate::FieldReader<SMP0_A>;
#[doc = "Channel 0 sample time selection\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(u8)]
pub enum SMP0_A {
#[doc = "0: 1.5 ADC clock cycles"]
Cycles15 = 0,
#[doc = "1: 7.5 ADC clock cycles"]
Cycles75 = 1,
#[doc = "2: 13.5 ADC clock cycles"]
Cycles135 = 2,
#[doc = "3: 28.5 ADC clock cycles"]
Cycles285 = 3,
#[doc = "4: 41.5 ADC clock cycles"]
Cycles415 = 4,
#[doc = "5: 55.5 ADC clock cycles"]
Cycles555 = 5,
#[doc = "6: 71.5 ADC clock cycles"]
Cycles715 = 6,
#[doc = "7: 239.5 ADC clock cycles"]
Cycles2395 = 7,
}
impl From<SMP0_A> for u8 {
#[inline(always)]
fn from(variant: SMP0_A) -> Self {
variant as _
}
}
impl crate::FieldSpec for SMP0_A {
type Ux = u8;
}
impl SMP0_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SMP0_A {
match self.bits {
0 => SMP0_A::Cycles15,
1 => SMP0_A::Cycles75,
2 => SMP0_A::Cycles135,
3 => SMP0_A::Cycles285,
4 => SMP0_A::Cycles415,
5 => SMP0_A::Cycles555,
6 => SMP0_A::Cycles715,
7 => SMP0_A::Cycles2395,
_ => unreachable!(),
}
}
#[doc = "1.5 ADC clock cycles"]
#[inline(always)]
pub fn is_cycles1_5(&self) -> bool {
*self == SMP0_A::Cycles15
}
#[doc = "7.5 ADC clock cycles"]
#[inline(always)]
pub fn is_cycles7_5(&self) -> bool {
*self == SMP0_A::Cycles75
}
#[doc = "13.5 ADC clock cycles"]
#[inline(always)]
pub fn is_cycles13_5(&self) -> bool {
*self == SMP0_A::Cycles135
}
#[doc = "28.5 ADC clock cycles"]
#[inline(always)]
pub fn is_cycles28_5(&self) -> bool {
*self == SMP0_A::Cycles285
}
#[doc = "41.5 ADC clock cycles"]
#[inline(always)]
pub fn is_cycles41_5(&self) -> bool {
*self == SMP0_A::Cycles415
}
#[doc = "55.5 ADC clock cycles"]
#[inline(always)]
pub fn is_cycles55_5(&self) -> bool {
*self == SMP0_A::Cycles555
}
#[doc = "71.5 ADC clock cycles"]
#[inline(always)]
pub fn is_cycles71_5(&self) -> bool {
*self == SMP0_A::Cycles715
}
#[doc = "239.5 ADC clock cycles"]
#[inline(always)]
pub fn is_cycles239_5(&self) -> bool {
*self == SMP0_A::Cycles2395
}
}
#[doc = "Field `SMP0` writer - Channel 0 sample time selection"]
pub type SMP0_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 3, O, SMP0_A>;
impl<'a, REG, const O: u8> SMP0_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
REG::Ux: From<u8>,
{
#[doc = "1.5 ADC clock cycles"]
#[inline(always)]
pub fn cycles1_5(self) -> &'a mut crate::W<REG> {
self.variant(SMP0_A::Cycles15)
}
#[doc = "7.5 ADC clock cycles"]
#[inline(always)]
pub fn cycles7_5(self) -> &'a mut crate::W<REG> {
self.variant(SMP0_A::Cycles75)
}
#[doc = "13.5 ADC clock cycles"]
#[inline(always)]
pub fn cycles13_5(self) -> &'a mut crate::W<REG> {
self.variant(SMP0_A::Cycles135)
}
#[doc = "28.5 ADC clock cycles"]
#[inline(always)]
pub fn cycles28_5(self) -> &'a mut crate::W<REG> {
self.variant(SMP0_A::Cycles285)
}
#[doc = "41.5 ADC clock cycles"]
#[inline(always)]
pub fn cycles41_5(self) -> &'a mut crate::W<REG> {
self.variant(SMP0_A::Cycles415)
}
#[doc = "55.5 ADC clock cycles"]
#[inline(always)]
pub fn cycles55_5(self) -> &'a mut crate::W<REG> {
self.variant(SMP0_A::Cycles555)
}
#[doc = "71.5 ADC clock cycles"]
#[inline(always)]
pub fn cycles71_5(self) -> &'a mut crate::W<REG> {
self.variant(SMP0_A::Cycles715)
}
#[doc = "239.5 ADC clock cycles"]
#[inline(always)]
pub fn cycles239_5(self) -> &'a mut crate::W<REG> {
self.variant(SMP0_A::Cycles2395)
}
}
#[doc = "Field `SMP1` reader - Channel 1 sample time selection"]
pub use SMP0_R as SMP1_R;
#[doc = "Field `SMP2` reader - Channel 2 sample time selection"]
pub use SMP0_R as SMP2_R;
#[doc = "Field `SMP3` reader - Channel 3 sample time selection"]
pub use SMP0_R as SMP3_R;
#[doc = "Field `SMP4` reader - Channel 4 sample time selection"]
pub use SMP0_R as SMP4_R;
#[doc = "Field `SMP5` reader - Channel 5 sample time selection"]
pub use SMP0_R as SMP5_R;
#[doc = "Field `SMP6` reader - Channel 6 sample time selection"]
pub use SMP0_R as SMP6_R;
#[doc = "Field `SMP7` reader - Channel 7 sample time selection"]
pub use SMP0_R as SMP7_R;
#[doc = "Field `SMP8` reader - Channel 8 sample time selection"]
pub use SMP0_R as SMP8_R;
#[doc = "Field `SMP9` reader - Channel 9 sample time selection"]
pub use SMP0_R as SMP9_R;
#[doc = "Field `SMP1` writer - Channel 1 sample time selection"]
pub use SMP0_W as SMP1_W;
#[doc = "Field `SMP2` writer - Channel 2 sample time selection"]
pub use SMP0_W as SMP2_W;
#[doc = "Field `SMP3` writer - Channel 3 sample time selection"]
pub use SMP0_W as SMP3_W;
#[doc = "Field `SMP4` writer - Channel 4 sample time selection"]
pub use SMP0_W as SMP4_W;
#[doc = "Field `SMP5` writer - Channel 5 sample time selection"]
pub use SMP0_W as SMP5_W;
#[doc = "Field `SMP6` writer - Channel 6 sample time selection"]
pub use SMP0_W as SMP6_W;
#[doc = "Field `SMP7` writer - Channel 7 sample time selection"]
pub use SMP0_W as SMP7_W;
#[doc = "Field `SMP8` writer - Channel 8 sample time selection"]
pub use SMP0_W as SMP8_W;
#[doc = "Field `SMP9` writer - Channel 9 sample time selection"]
pub use SMP0_W as SMP9_W;
impl R {
#[doc = "Bits 0:2 - Channel 0 sample time selection"]
#[inline(always)]
pub fn smp0(&self) -> SMP0_R {
SMP0_R::new((self.bits & 7) as u8)
}
#[doc = "Bits 3:5 - Channel 1 sample time selection"]
#[inline(always)]
pub fn smp1(&self) -> SMP1_R {
SMP1_R::new(((self.bits >> 3) & 7) as u8)
}
#[doc = "Bits 6:8 - Channel 2 sample time selection"]
#[inline(always)]
pub fn smp2(&self) -> SMP2_R {
SMP2_R::new(((self.bits >> 6) & 7) as u8)
}
#[doc = "Bits 9:11 - Channel 3 sample time selection"]
#[inline(always)]
pub fn smp3(&self) -> SMP3_R {
SMP3_R::new(((self.bits >> 9) & 7) as u8)
}
#[doc = "Bits 12:14 - Channel 4 sample time selection"]
#[inline(always)]
pub fn smp4(&self) -> SMP4_R {
SMP4_R::new(((self.bits >> 12) & 7) as u8)
}
#[doc = "Bits 15:17 - Channel 5 sample time selection"]
#[inline(always)]
pub fn smp5(&self) -> SMP5_R {
SMP5_R::new(((self.bits >> 15) & 7) as u8)
}
#[doc = "Bits 18:20 - Channel 6 sample time selection"]
#[inline(always)]
pub fn smp6(&self) -> SMP6_R {
SMP6_R::new(((self.bits >> 18) & 7) as u8)
}
#[doc = "Bits 21:23 - Channel 7 sample time selection"]
#[inline(always)]
pub fn smp7(&self) -> SMP7_R {
SMP7_R::new(((self.bits >> 21) & 7) as u8)
}
#[doc = "Bits 24:26 - Channel 8 sample time selection"]
#[inline(always)]
pub fn smp8(&self) -> SMP8_R {
SMP8_R::new(((self.bits >> 24) & 7) as u8)
}
#[doc = "Bits 27:29 - Channel 9 sample time selection"]
#[inline(always)]
pub fn smp9(&self) -> SMP9_R {
SMP9_R::new(((self.bits >> 27) & 7) as u8)
}
}
impl W {
#[doc = "Bits 0:2 - Channel 0 sample time selection"]
#[inline(always)]
#[must_use]
pub fn smp0(&mut self) -> SMP0_W<SMPR2_SPEC, 0> {
SMP0_W::new(self)
}
#[doc = "Bits 3:5 - Channel 1 sample time selection"]
#[inline(always)]
#[must_use]
pub fn smp1(&mut self) -> SMP1_W<SMPR2_SPEC, 3> {
SMP1_W::new(self)
}
#[doc = "Bits 6:8 - Channel 2 sample time selection"]
#[inline(always)]
#[must_use]
pub fn smp2(&mut self) -> SMP2_W<SMPR2_SPEC, 6> {
SMP2_W::new(self)
}
#[doc = "Bits 9:11 - Channel 3 sample time selection"]
#[inline(always)]
#[must_use]
pub fn smp3(&mut self) -> SMP3_W<SMPR2_SPEC, 9> {
SMP3_W::new(self)
}
#[doc = "Bits 12:14 - Channel 4 sample time selection"]
#[inline(always)]
#[must_use]
pub fn smp4(&mut self) -> SMP4_W<SMPR2_SPEC, 12> {
SMP4_W::new(self)
}
#[doc = "Bits 15:17 - Channel 5 sample time selection"]
#[inline(always)]
#[must_use]
pub fn smp5(&mut self) -> SMP5_W<SMPR2_SPEC, 15> {
SMP5_W::new(self)
}
#[doc = "Bits 18:20 - Channel 6 sample time selection"]
#[inline(always)]
#[must_use]
pub fn smp6(&mut self) -> SMP6_W<SMPR2_SPEC, 18> {
SMP6_W::new(self)
}
#[doc = "Bits 21:23 - Channel 7 sample time selection"]
#[inline(always)]
#[must_use]
pub fn smp7(&mut self) -> SMP7_W<SMPR2_SPEC, 21> {
SMP7_W::new(self)
}
#[doc = "Bits 24:26 - Channel 8 sample time selection"]
#[inline(always)]
#[must_use]
pub fn smp8(&mut self) -> SMP8_W<SMPR2_SPEC, 24> {
SMP8_W::new(self)
}
#[doc = "Bits 27:29 - Channel 9 sample time selection"]
#[inline(always)]
#[must_use]
pub fn smp9(&mut self) -> SMP9_W<SMPR2_SPEC, 27> {
SMP9_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "sample time register 2\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`smpr2::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`smpr2::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct SMPR2_SPEC;
impl crate::RegisterSpec for SMPR2_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`smpr2::R`](R) reader structure"]
impl crate::Readable for SMPR2_SPEC {}
#[doc = "`write(|w| ..)` method takes [`smpr2::W`](W) writer structure"]
impl crate::Writable for SMPR2_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets SMPR2 to value 0"]
impl crate::Resettable for SMPR2_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
// Copyright (C) Parity Technologies (UK) Ltd.
// SPDX-License-Identifier: Apache-2.0
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! This file is checked by clippy to make sure that the code generated by the derive macro
//! doesn't spew out warnings/errors in users' code.
use parity_scale_codec_derive::{Decode, Encode};
#[repr(u8)]
#[derive(Decode, Encode)]
pub enum CLike {
Foo = 0,
Bar = 1,
}
|
mod command;
use std::collections::HashMap;
use std::mem;
use std::io::{stdin, stdout, Write};
use std::process::Command as Subprocess;
use ansi_term::Style;
use ansi_term::Colour::Cyan;
use parse::*;
use error::CommandError;
use ident::Ident;
use asset::Asset;
use data_backend::DataBackend;
use cli::command::Command;
use http::push::push_asset;
pub enum Flow {
Break,
Continue
}
enum CurrentAsset<A> where A: Asset {
One(A),
Multi(Vec<A>),
None
}
impl<A> CurrentAsset<A> where A: Asset {
pub fn unwrap(self) -> Vec<A> {
match self {
CurrentAsset::One(a) => vec![a],
CurrentAsset::Multi(a) => a,
CurrentAsset::None => Vec::new()
}
}
}
pub struct Cli<'a, D> where D: 'a + DataBackend {
backend: &'a mut D,
command_queue: Vec<String>,
current_asset: CurrentAsset<D::Asset>,
variables: HashMap<String, CurrentAsset<D::Asset>>,
current_variable: Option<String>
}
impl<'a, D> Cli<'a, D> where D: 'a + DataBackend {
pub fn new(backend: &'a mut D) -> Self {
Cli {
backend: backend,
command_queue: Vec::new(),
current_asset: CurrentAsset::None,
variables: HashMap::new(),
current_variable: None
}
}
pub fn exec(&mut self, cmd: &str) -> Flow {
let tokens: Vec<String> = quoted_split(cmd);
match Command::new(tokens) {
Command::Store(name) => {
let mut current_asset = CurrentAsset::None;
mem::swap(&mut self.current_asset, &mut current_asset);
if let CurrentAsset::None = current_asset {
println!("Command requires an asset.");
} else {
self.variables.insert(name, current_asset);
}
},
Command::Load(ref name) => {
if let Some(value) = self.variables.remove(name) {
self.current_asset = value;
self.current_variable = Some(name.to_string());
} else {
println!("No variable has that name ☹");
}
},
Command::Add(src, tags) => {
match self.backend.add_asset(src, tags) {
Ok(f) => f.print(),
Err(e) => println!("{:?}", e)
}
},
Command::Get(uuid) => {
match self.backend.get_asset(Ident::Uuid(uuid)) {
Ok(f) => {
f.print();
self.current_asset = CurrentAsset::One(f);
},
Err(e) => match e {
CommandError::AssetNotFound => println!("Asset not found."),
_ => println!("{:?}", e)
}
}
},
Command::Find(query) => {
match self.backend.find_assets(query) {
Ok(fs) => {
for f in &fs {
f.print();
}
self.current_asset = CurrentAsset::Multi(fs);
},
Err(e) => println!("{:?}", e)
}
},
Command::Name(name) => {
match self.current_asset {
CurrentAsset::One(ref mut asset) => {
if let Err(e) = asset.name(&name) {
println!("Naming asset failed: {:?}", e);
}
},
CurrentAsset::Multi(ref mut assets) => {
if assets.len() > 1 {
println!("Command can only be run on a single asset.");
} else if let Some(asset) = assets.get_mut(0) {
if let Err(e) = asset.name(&name) {
println!("Naming asset failed: {:?}", e);
}
} else {
println!("Command requires an asset.");
}
},
CurrentAsset::None => println!("Command requires an asset.")
}
},
Command::UnName => {
match self.current_asset {
CurrentAsset::One(ref mut asset) => {
if let Err(e) = asset.unname() {
println!("Un-naming asset failed: {:?}", e);
}
},
CurrentAsset::Multi(ref mut assets) => {
for asset in assets {
if let Err(e) = asset.unname() {
println!("Un-naming asset failed: {:?}", e);
}
}
},
CurrentAsset::None => println!("Command requires an asset.")
}
},
Command::Delete => {
let mut current_asset = CurrentAsset::None;
mem::swap(&mut self.current_asset, &mut current_asset);
for asset in current_asset.unwrap() {
if let Err(e) = asset.delete() {
println!("Deleting asset failed: {:?}", e);
}
}
},
Command::AddTags(tags) => {
match self.current_asset {
CurrentAsset::One(ref mut asset) => {
if let Err(e) = asset.add_tags(tags) {
println!("Adding tags failed: {:?}", e);
}
},
CurrentAsset::Multi(ref mut assets) => {
for asset in assets {
if let Err(e) = asset.add_tags(tags.clone()) {
println!("Adding tags failed: {:?}", e);
}
}
},
CurrentAsset::None => println!("Command requires an asset.")
}
},
Command::Open => {
match self.current_asset {
CurrentAsset::One(ref mut asset) => {
let sp = Subprocess::new("sh")
.arg("-c")
.arg(&format!("xdg-open {}", asset.get_path()))
.spawn();
if let Err(e) = sp {
println!("Failed to open asset: {:?}", e);
}
},
CurrentAsset::Multi(ref mut assets) => {
for asset in assets {
let sp = Subprocess::new("sh")
.arg("-c")
.arg(&format!("xdg-open {}", asset.get_source()))
.spawn();
if let Err(e) = sp {
println!("Failed to open asset: {:?}", e);
}
}
},
CurrentAsset::None => println!("Command requires an asset.")
}
},
Command::Push => {
match self.current_asset {
CurrentAsset::One(ref mut asset) => {
let res = push_asset("https://femboyse.men/", "scottpleb", asset);
match res {
Ok(_) => println!("Pushed!"),
Err(e) => println!("Push failed: {:?}", e)
}
},
CurrentAsset::Multi(ref mut assets) => {
for asset in assets {
let res = push_asset("https://femboyse.men/", "scottpleb", asset);
match res {
Ok(_) => println!("Pushed!"),
Err(e) => println!("Push failed: {:?}", e)
}
}
},
CurrentAsset::None => println!("Command requires an asset.")
}
},
Command::Key(name) => {
match self.backend.gen_key(&name) {
Ok(_) => println!("🔑 Key saved to keys/{}.key", name),
Err(e) => println!("Key generation failed: {:?}", e)
}
},
Command::Fingerprint(_) => {
/*match self.current_asset {
CurrentAsset::One(ref mut asset) => {
if let Err(e) = self.backend.set_fingerprint(&name, asset) {
println!("Fingerprinting failed: {:?}", e);
}
},
CurrentAsset::Multi(ref mut assets) => {
if assets.len() > 1 {
println!("Command can only be run on a single asset.");
} else if let Some(asset) = assets.get_mut(0) {
if let Err(e) = self.backend.set_fingerprint(&name, asset) {
println!("Fingerprinting failed: {:?}", e);
}
} else {
println!("Command requires an asset.");
}
},
CurrentAsset::None => println!("Command requires an asset.")
}*/
},
Command::Help => println!(r#"taguna ♥ organizing your beloved assets
add <src> [tag1 tag2 ...]
find <query>
add_tag <tag> <asset>
exit | quit
help | commands | ?"#),
Command::Exit => return Flow::Break,
Command::Unknown => println!("Unknown command."),
Command::Malformed(s) => println!("Malformed command: {}", s),
Command::None => return Flow::Continue
};
Flow::Continue
}
pub fn repl(&mut self) {
loop {
let cmd: String = {
if self.command_queue.is_empty() {
if let Some(ref name) = self.current_variable {
let mut value = CurrentAsset::None;
mem::swap(&mut self.current_asset, &mut value);
self.variables.insert(name.to_string(), value);
} else {
self.current_asset = CurrentAsset::None;
}
self.current_variable = None;
let mut cmd = String::new();
print!("{}", Style::new().fg(Cyan).paint("taguna> "));
stdout().flush().unwrap();
stdin().read_line(&mut cmd).unwrap();
self.command_queue = cmd.split('$').map(|s| s.to_string()).collect();
self.command_queue.reverse();
}
self.command_queue.remove(0)
};
if let Flow::Break = self.exec(&cmd) {
break;
}
}
}
pub fn line(&mut self, cmd: &str) {
loop {
let cmd: String = {
if self.command_queue.is_empty() {
if let Some(ref name) = self.current_variable {
let mut value = CurrentAsset::None;
mem::swap(&mut self.current_asset, &mut value);
self.variables.insert(name.to_string(), value);
} else {
self.current_asset = CurrentAsset::None;
}
self.current_variable = None;
self.command_queue = cmd.split('$').map(|s| s.to_string()).collect();
self.command_queue.reverse();
}
self.command_queue.remove(0)
};
if let Flow::Break = self.exec(&cmd) {
break;
}
if self.command_queue.is_empty() {
break;
}
}
}
}
|
#[doc = "Register `RDR` reader"]
pub type R = crate::R<RDR_SPEC>;
#[doc = "Field `RDB0` reader - 8-bit received data on I3C bus."]
pub type RDB0_R = crate::FieldReader;
impl R {
#[doc = "Bits 0:7 - 8-bit received data on I3C bus."]
#[inline(always)]
pub fn rdb0(&self) -> RDB0_R {
RDB0_R::new((self.bits & 0xff) as u8)
}
}
#[doc = "I3C receive data byte register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`rdr::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct RDR_SPEC;
impl crate::RegisterSpec for RDR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`rdr::R`](R) reader structure"]
impl crate::Readable for RDR_SPEC {}
#[doc = "`reset()` method sets RDR to value 0"]
impl crate::Resettable for RDR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use std::fmt;
// simple struct that represents the point in 2D space
#[derive(PartialEq, PartialOrd, Clone, Copy)]
pub struct Point {
pub x: i64,
pub y: i64
}
impl Point {
pub fn equal(&self, other: &Point) -> bool {
return self.x == other.x && self.y == other.y;
}
}
impl fmt::Display for Point {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "({}, {})", self.x, self.y)
}
}
impl fmt::Debug for Point {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "({}, {})", self.x, self.y)
}
} |
use lunar::io::{Score, IO};
use lunar::lander::Lander;
macro_rules! assert_close {
($left:expr, $right:expr) => {
assert_eq!(
($left * 100.0).round() / 100.0,
($right * 100.0).round() / 100.0
);
};
}
/// A line of output with current lander status, and the fuel-rate input
struct Line(
i32, // secs
i32, // miles
i32, // feet
f64, // mph
f64, // lbs
i32, // rate
);
#[rustfmt::skip]
impl Line {
fn elapsed_secs(&self) -> i32 { self.0 }
fn miles(&self) -> i32 { self.1 }
fn feet(&self) -> i32 { self.2 }
fn v_mph(&self) -> f64 { self.3 }
fn fuel_remaining(&self) -> f64 { self.4 }
fn k(&self) -> i32 { self.5 }
fn altitude(&self) -> f64 {
self.miles() as f64 + (self.feet() as f64 / 5280.0)
}
}
/// Implementation of `lunar::io::IO` used for testing.
struct TestIO<'a> {
expected_lines: &'a [Line],
expected_on_moon_secs: f64,
expected_impact_mph: f64,
expected_fuel_left: f64,
line_index: usize,
}
impl<'a> TestIO<'a> {
/// Creates a new instance of `TestIO`.
fn new(
expected_lines: &'a [Line],
expected_on_moon_secs: f64,
expected_impact_mph: f64,
expected_fuel_left: f64,
) -> TestIO {
TestIO {
expected_lines: expected_lines,
expected_on_moon_secs: expected_on_moon_secs,
expected_impact_mph: expected_impact_mph,
expected_fuel_left: expected_fuel_left,
line_index: 0,
}
}
/// Returns true if all expected lines have been processed.
fn is_complete(&self) -> bool {
assert_eq!(self.line_index, self.expected_lines.len());
true
}
}
impl<'a> IO for TestIO<'a> {
fn start_game(&mut self, _lander: &Lander) {}
fn get_fuel_rate(&mut self, lander: &Lander) -> f64 {
let expected = &self.expected_lines[self.line_index];
assert_eq!(
expected.elapsed_secs(),
lander.elapsed_secs().round() as i32
);
assert_close!(expected.altitude(), lander.altitude());
assert_close!(expected.v_mph(), lander.v_mph());
assert_close!(expected.fuel_remaining(), lander.fuel_remaining());
self.line_index += 1;
expected.k() as f64
}
fn fuel_out(&mut self, _l: f64) {}
fn on_the_moon(&mut self, lander: &Lander, _score: Score) {
assert_close!(self.expected_on_moon_secs, lander.elapsed_secs());
assert_close!(self.expected_impact_mph, lander.v_mph());
assert_close!(self.expected_fuel_left, lander.fuel_remaining());
}
}
#[test]
fn good_landing() {
#[rustfmt::skip]
let expected_lines = [
// TIME,SECS ALTITUDE,MILES+FEET VELOCITY,MPH FUEL,LBS FUEL RATE
Line ( 0, 120, 0, 3600.00, 16000.0, 0 ),
Line ( 10, 109, 5016, 3636.00, 16000.0, 0 ),
Line ( 20, 99, 4224, 3672.00, 16000.0, 0 ),
Line ( 30, 89, 2904, 3708.00, 16000.0, 0 ),
Line ( 40, 79, 1056, 3744.00, 16000.0, 0 ),
Line ( 50, 68, 3960, 3780.00, 16000.0, 0 ),
Line ( 60, 58, 1056, 3816.00, 16000.0, 0 ),
Line ( 70, 47, 2904, 3852.00, 16000.0, 180 ),
Line ( 80, 37, 1626, 3518.79, 14200.0, 200 ),
Line ( 90, 28, 438, 3118.26, 12200.0, 200 ),
Line ( 100, 20, 71, 2686.18, 10200.0, 200 ),
Line ( 110, 13, 1022, 2217.65, 8200.0, 200 ),
Line ( 120, 7, 3868, 1706.49, 6200.0, 200 ),
Line ( 130, 3, 4011, 1144.83, 4200.0, 200 ),
Line ( 140, 1, 2263, 522.40, 2200.0, 130 ),
Line ( 150, 0, 3000, 91.49, 900.0, 15 ),
Line ( 160, 0, 1805, 71.39, 750.0, 15 ),
Line ( 170, 0, 908, 50.80, 600.0, 15 ),
Line ( 180, 0, 317, 29.70, 450.0, 14 ),
Line ( 190, 0, 11, 11.96, 310.0, 30 ),
];
let expected_on_moon_secs = 190.93;
let expected_impact_mph = 4.53;
let expected_fuel_left = 282.04;
let mut io = TestIO::new(
&expected_lines,
expected_on_moon_secs,
expected_impact_mph,
expected_fuel_left,
);
let mut lander = Lander::default();
lander.play_game(&mut io);
assert!(io.is_complete());
}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use super::{models, API_VERSION};
#[non_exhaustive]
#[derive(Debug, thiserror :: Error)]
#[allow(non_camel_case_types)]
pub enum Error {
#[error(transparent)]
Deployments_Get(#[from] deployments::get::Error),
#[error(transparent)]
Deployments_CreateOrUpdate(#[from] deployments::create_or_update::Error),
#[error(transparent)]
Deployments_Delete(#[from] deployments::delete::Error),
#[error(transparent)]
Deployments_CheckExistence(#[from] deployments::check_existence::Error),
#[error(transparent)]
Deployments_Cancel(#[from] deployments::cancel::Error),
#[error(transparent)]
Deployments_Validate(#[from] deployments::validate::Error),
#[error(transparent)]
Deployments_List(#[from] deployments::list::Error),
#[error(transparent)]
Providers_Unregister(#[from] providers::unregister::Error),
#[error(transparent)]
Providers_Register(#[from] providers::register::Error),
#[error(transparent)]
Providers_List(#[from] providers::list::Error),
#[error(transparent)]
Providers_Get(#[from] providers::get::Error),
#[error(transparent)]
ResourceGroups_ListResources(#[from] resource_groups::list_resources::Error),
#[error(transparent)]
ResourceGroups_Get(#[from] resource_groups::get::Error),
#[error(transparent)]
ResourceGroups_CreateOrUpdate(#[from] resource_groups::create_or_update::Error),
#[error(transparent)]
ResourceGroups_Patch(#[from] resource_groups::patch::Error),
#[error(transparent)]
ResourceGroups_Delete(#[from] resource_groups::delete::Error),
#[error(transparent)]
ResourceGroups_CheckExistence(#[from] resource_groups::check_existence::Error),
#[error(transparent)]
ResourceGroups_List(#[from] resource_groups::list::Error),
#[error(transparent)]
Resources_MoveResources(#[from] resources::move_resources::Error),
#[error(transparent)]
Resources_List(#[from] resources::list::Error),
#[error(transparent)]
Resources_Get(#[from] resources::get::Error),
#[error(transparent)]
Resources_CreateOrUpdate(#[from] resources::create_or_update::Error),
#[error(transparent)]
Resources_Update(#[from] resources::update::Error),
#[error(transparent)]
Resources_Delete(#[from] resources::delete::Error),
#[error(transparent)]
Resources_CheckExistence(#[from] resources::check_existence::Error),
#[error(transparent)]
Tags_CreateOrUpdateValue(#[from] tags::create_or_update_value::Error),
#[error(transparent)]
Tags_DeleteValue(#[from] tags::delete_value::Error),
#[error(transparent)]
Tags_CreateOrUpdate(#[from] tags::create_or_update::Error),
#[error(transparent)]
Tags_Delete(#[from] tags::delete::Error),
#[error(transparent)]
Tags_List(#[from] tags::list::Error),
#[error(transparent)]
DeploymentOperations_Get(#[from] deployment_operations::get::Error),
#[error(transparent)]
DeploymentOperations_List(#[from] deployment_operations::list::Error),
#[error(transparent)]
ResourceProviderOperationDetails_List(#[from] resource_provider_operation_details::list::Error),
#[error(transparent)]
PolicyDefinitions_Get(#[from] policy_definitions::get::Error),
#[error(transparent)]
PolicyDefinitions_CreateOrUpdate(#[from] policy_definitions::create_or_update::Error),
#[error(transparent)]
PolicyDefinitions_Delete(#[from] policy_definitions::delete::Error),
#[error(transparent)]
PolicyAssignments_ListForResource(#[from] policy_assignments::list_for_resource::Error),
#[error(transparent)]
PolicyAssignments_ListForResourceGroup(#[from] policy_assignments::list_for_resource_group::Error),
#[error(transparent)]
PolicyAssignments_Get(#[from] policy_assignments::get::Error),
#[error(transparent)]
PolicyAssignments_Create(#[from] policy_assignments::create::Error),
#[error(transparent)]
PolicyAssignments_Delete(#[from] policy_assignments::delete::Error),
#[error(transparent)]
PolicyAssignments_GetById(#[from] policy_assignments::get_by_id::Error),
#[error(transparent)]
PolicyAssignments_CreateById(#[from] policy_assignments::create_by_id::Error),
#[error(transparent)]
PolicyAssignments_DeleteById(#[from] policy_assignments::delete_by_id::Error),
#[error(transparent)]
PolicyAssignments_List(#[from] policy_assignments::list::Error),
#[error(transparent)]
PolicyAssignments_ListForScope(#[from] policy_assignments::list_for_scope::Error),
#[error(transparent)]
Deployments_CalculateTemplateHash(#[from] deployments::calculate_template_hash::Error),
}
pub mod deployments {
use super::{models, API_VERSION};
pub async fn get(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
deployment_name: &str,
subscription_id: &str,
) -> std::result::Result<models::DeploymentExtended, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.Resources/deployments/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
deployment_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DeploymentExtended =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(get::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
deployment_name: &str,
parameters: &models::Deployment,
subscription_id: &str,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.Resources/deployments/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
deployment_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DeploymentExtended = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::DeploymentExtended = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Created201(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
Err(create_or_update::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::DeploymentExtended),
Created201(models::DeploymentExtended),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
deployment_name: &str,
subscription_id: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.Resources/deployments/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
deployment_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
Err(delete::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn check_existence(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
deployment_name: &str,
subscription_id: &str,
) -> std::result::Result<(), check_existence::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.Resources/deployments/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
deployment_name
);
let mut url = url::Url::parse(url_str).map_err(check_existence::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::HEAD);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(check_existence::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(check_existence::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(check_existence::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::NO_CONTENT => Ok(()),
http::StatusCode::NOT_FOUND => Err(check_existence::Error::NotFound404 {}),
status_code => {
let rsp_body = rsp.body();
Err(check_existence::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod check_existence {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Error response #response_type")]
NotFound404 {},
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn cancel(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
deployment_name: &str,
subscription_id: &str,
) -> std::result::Result<(), cancel::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.Resources/deployments/{}/cancel",
operation_config.base_path(),
subscription_id,
resource_group_name,
deployment_name
);
let mut url = url::Url::parse(url_str).map_err(cancel::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(cancel::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(cancel::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(cancel::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::NO_CONTENT => Ok(()),
status_code => {
let rsp_body = rsp.body();
Err(cancel::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod cancel {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn validate(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
deployment_name: &str,
parameters: &models::Deployment,
subscription_id: &str,
) -> std::result::Result<models::DeploymentValidateResult, validate::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.Resources/deployments/{}/validate",
operation_config.base_path(),
subscription_id,
resource_group_name,
deployment_name
);
let mut url = url::Url::parse(url_str).map_err(validate::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(validate::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(validate::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(validate::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(validate::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DeploymentValidateResult =
serde_json::from_slice(rsp_body).map_err(|source| validate::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
http::StatusCode::BAD_REQUEST => {
let rsp_body = rsp.body();
let rsp_value: models::DeploymentValidateResult =
serde_json::from_slice(rsp_body).map_err(|source| validate::Error::DeserializeError(source, rsp_body.clone()))?;
Err(validate::Error::BadRequest400 { value: rsp_value })
}
status_code => {
let rsp_body = rsp.body();
Err(validate::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod validate {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Error response #response_type")]
BadRequest400 { value: models::DeploymentValidateResult },
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
filter: Option<&str>,
top: Option<i32>,
subscription_id: &str,
) -> std::result::Result<models::DeploymentListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.Resources/deployments/",
operation_config.base_path(),
subscription_id,
resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
if let Some(top) = top {
url.query_pairs_mut().append_pair("$top", top.to_string().as_str());
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DeploymentListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn calculate_template_hash(
operation_config: &crate::OperationConfig,
template: &serde_json::Value,
) -> std::result::Result<models::TemplateHashResult, calculate_template_hash::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Resources/calculateTemplateHash",
operation_config.base_path(),
);
let mut url = url::Url::parse(url_str).map_err(calculate_template_hash::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(calculate_template_hash::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(template).map_err(calculate_template_hash::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(calculate_template_hash::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(calculate_template_hash::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::TemplateHashResult = serde_json::from_slice(rsp_body)
.map_err(|source| calculate_template_hash::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::CloudError = serde_json::from_slice(rsp_body)
.map_err(|source| calculate_template_hash::Error::DeserializeError(source, rsp_body.clone()))?;
Err(calculate_template_hash::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod calculate_template_hash {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod providers {
use super::{models, API_VERSION};
pub async fn unregister(
operation_config: &crate::OperationConfig,
resource_provider_namespace: &str,
subscription_id: &str,
) -> std::result::Result<models::Provider, unregister::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/{}/unregister",
operation_config.base_path(),
subscription_id,
resource_provider_namespace
);
let mut url = url::Url::parse(url_str).map_err(unregister::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(unregister::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(unregister::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(unregister::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Provider =
serde_json::from_slice(rsp_body).map_err(|source| unregister::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(unregister::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod unregister {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn register(
operation_config: &crate::OperationConfig,
resource_provider_namespace: &str,
subscription_id: &str,
) -> std::result::Result<models::Provider, register::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/{}/register",
operation_config.base_path(),
subscription_id,
resource_provider_namespace
);
let mut url = url::Url::parse(url_str).map_err(register::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(register::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(register::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(register::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Provider =
serde_json::from_slice(rsp_body).map_err(|source| register::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(register::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod register {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list(
operation_config: &crate::OperationConfig,
top: Option<i32>,
subscription_id: &str,
) -> std::result::Result<models::ProviderListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/subscriptions/{}/providers", operation_config.base_path(), subscription_id);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(top) = top {
url.query_pairs_mut().append_pair("$top", top.to_string().as_str());
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ProviderListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
resource_provider_namespace: &str,
subscription_id: &str,
) -> std::result::Result<models::Provider, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/{}",
operation_config.base_path(),
subscription_id,
resource_provider_namespace
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Provider =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(get::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod resource_groups {
use super::{models, API_VERSION};
pub async fn list_resources(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
filter: Option<&str>,
expand: Option<&str>,
top: Option<i32>,
subscription_id: &str,
) -> std::result::Result<models::ResourceListResult, list_resources::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/resources",
operation_config.base_path(),
subscription_id,
resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(list_resources::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_resources::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
if let Some(expand) = expand {
url.query_pairs_mut().append_pair("$expand", expand);
}
if let Some(top) = top {
url.query_pairs_mut().append_pair("$top", top.to_string().as_str());
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_resources::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_resources::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ResourceListResult =
serde_json::from_slice(rsp_body).map_err(|source| list_resources::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list_resources::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list_resources {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
subscription_id: &str,
) -> std::result::Result<models::ResourceGroup, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourcegroups/{}",
operation_config.base_path(),
subscription_id,
resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ResourceGroup =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(get::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
parameters: &models::ResourceGroup,
subscription_id: &str,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourcegroups/{}",
operation_config.base_path(),
subscription_id,
resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::ResourceGroup = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Created201(rsp_value))
}
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ResourceGroup = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
Err(create_or_update::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Created201(models::ResourceGroup),
Ok200(models::ResourceGroup),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn patch(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
parameters: &models::ResourceGroup,
subscription_id: &str,
) -> std::result::Result<models::ResourceGroup, patch::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourcegroups/{}",
operation_config.base_path(),
subscription_id,
resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(patch::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(patch::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(patch::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(patch::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(patch::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ResourceGroup =
serde_json::from_slice(rsp_body).map_err(|source| patch::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(patch::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod patch {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
subscription_id: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourcegroups/{}",
operation_config.base_path(),
subscription_id,
resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::OK => Ok(delete::Response::Ok200),
status_code => {
let rsp_body = rsp.body();
Err(delete::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Accepted202,
Ok200,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn check_existence(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
subscription_id: &str,
) -> std::result::Result<(), check_existence::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourcegroups/{}",
operation_config.base_path(),
subscription_id,
resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(check_existence::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::HEAD);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(check_existence::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(check_existence::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(check_existence::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::NO_CONTENT => Ok(()),
http::StatusCode::NOT_FOUND => Err(check_existence::Error::NotFound404 {}),
status_code => {
let rsp_body = rsp.body();
Err(check_existence::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod check_existence {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Error response #response_type")]
NotFound404 {},
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list(
operation_config: &crate::OperationConfig,
filter: Option<&str>,
top: Option<i32>,
subscription_id: &str,
) -> std::result::Result<models::ResourceGroupListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/subscriptions/{}/resourcegroups", operation_config.base_path(), subscription_id);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
if let Some(top) = top {
url.query_pairs_mut().append_pair("$top", top.to_string().as_str());
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ResourceGroupListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod resources {
use super::{models, API_VERSION};
pub async fn move_resources(
operation_config: &crate::OperationConfig,
source_resource_group_name: &str,
parameters: &models::ResourcesMoveInfo,
subscription_id: &str,
) -> std::result::Result<move_resources::Response, move_resources::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/moveResources",
operation_config.base_path(),
subscription_id,
source_resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(move_resources::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(move_resources::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(move_resources::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(move_resources::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(move_resources::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::ACCEPTED => Ok(move_resources::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(move_resources::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
Err(move_resources::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod move_resources {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list(
operation_config: &crate::OperationConfig,
filter: Option<&str>,
expand: Option<&str>,
top: Option<i32>,
subscription_id: &str,
) -> std::result::Result<models::ResourceListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/subscriptions/{}/resources", operation_config.base_path(), subscription_id);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
if let Some(expand) = expand {
url.query_pairs_mut().append_pair("$expand", expand);
}
if let Some(top) = top {
url.query_pairs_mut().append_pair("$top", top.to_string().as_str());
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ResourceListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
resource_provider_namespace: &str,
parent_resource_path: &str,
resource_type: &str,
resource_name: &str,
subscription_id: &str,
) -> std::result::Result<get::Response, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourcegroups/{}/providers/{}/{}/{}/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
resource_provider_namespace,
parent_resource_path,
resource_type,
resource_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::GenericResource =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(get::Response::Ok200(rsp_value))
}
http::StatusCode::NO_CONTENT => {
let rsp_body = rsp.body();
let rsp_value: models::GenericResource =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(get::Response::NoContent204(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
Err(get::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::GenericResource),
NoContent204(models::GenericResource),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
resource_provider_namespace: &str,
parent_resource_path: &str,
resource_type: &str,
resource_name: &str,
parameters: &models::GenericResource,
subscription_id: &str,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourcegroups/{}/providers/{}/{}/{}/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
resource_provider_namespace,
parent_resource_path,
resource_type,
resource_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::GenericResource = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Created201(rsp_value))
}
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::GenericResource = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
Err(create_or_update::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Created201(models::GenericResource),
Ok200(models::GenericResource),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
resource_provider_namespace: &str,
parent_resource_path: &str,
resource_type: &str,
resource_name: &str,
parameters: &models::GenericResource,
subscription_id: &str,
) -> std::result::Result<update::Response, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourcegroups/{}/providers/{}/{}/{}/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
resource_provider_namespace,
parent_resource_path,
resource_type,
resource_name
);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::GenericResource =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(update::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(update::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
Err(update::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::GenericResource),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
resource_provider_namespace: &str,
parent_resource_path: &str,
resource_type: &str,
resource_name: &str,
subscription_id: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourcegroups/{}/providers/{}/{}/{}/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
resource_provider_namespace,
parent_resource_path,
resource_type,
resource_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(delete::Response::Ok200),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
Err(delete::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn check_existence(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
resource_provider_namespace: &str,
parent_resource_path: &str,
resource_type: &str,
resource_name: &str,
subscription_id: &str,
) -> std::result::Result<(), check_existence::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourcegroups/{}/providers/{}/{}/{}/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
resource_provider_namespace,
parent_resource_path,
resource_type,
resource_name
);
let mut url = url::Url::parse(url_str).map_err(check_existence::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::HEAD);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(check_existence::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(check_existence::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(check_existence::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::NO_CONTENT => Ok(()),
http::StatusCode::NOT_FOUND => Err(check_existence::Error::NotFound404 {}),
status_code => {
let rsp_body = rsp.body();
Err(check_existence::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod check_existence {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Error response #response_type")]
NotFound404 {},
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod tags {
use super::{models, API_VERSION};
pub async fn create_or_update_value(
operation_config: &crate::OperationConfig,
tag_name: &str,
tag_value: &str,
subscription_id: &str,
) -> std::result::Result<create_or_update_value::Response, create_or_update_value::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/tagNames/{}/tagValues/{}",
operation_config.base_path(),
subscription_id,
tag_name,
tag_value
);
let mut url = url::Url::parse(url_str).map_err(create_or_update_value::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update_value::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(create_or_update_value::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update_value::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::TagValue = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update_value::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update_value::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::TagValue = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update_value::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update_value::Response::Created201(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
Err(create_or_update_value::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod create_or_update_value {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::TagValue),
Created201(models::TagValue),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete_value(
operation_config: &crate::OperationConfig,
tag_name: &str,
tag_value: &str,
subscription_id: &str,
) -> std::result::Result<delete_value::Response, delete_value::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/tagNames/{}/tagValues/{}",
operation_config.base_path(),
subscription_id,
tag_name,
tag_value
);
let mut url = url::Url::parse(url_str).map_err(delete_value::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete_value::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete_value::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(delete_value::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(delete_value::Response::Ok200),
http::StatusCode::NO_CONTENT => Ok(delete_value::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
Err(delete_value::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod delete_value {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
tag_name: &str,
subscription_id: &str,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/tagNames/{}",
operation_config.base_path(),
subscription_id,
tag_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::TagDetails = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::TagDetails = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Created201(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
Err(create_or_update::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::TagDetails),
Created201(models::TagDetails),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
tag_name: &str,
subscription_id: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/tagNames/{}",
operation_config.base_path(),
subscription_id,
tag_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(delete::Response::Ok200),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
Err(delete::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
) -> std::result::Result<models::TagsListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/subscriptions/{}/tagNames", operation_config.base_path(), subscription_id);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::TagsListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod deployment_operations {
use super::{models, API_VERSION};
pub async fn get(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
deployment_name: &str,
operation_id: &str,
subscription_id: &str,
) -> std::result::Result<models::DeploymentOperation, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourcegroups/{}/deployments/{}/operations/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
deployment_name,
operation_id
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DeploymentOperation =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(get::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
deployment_name: &str,
top: Option<i32>,
subscription_id: &str,
) -> std::result::Result<models::DeploymentOperationsListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourcegroups/{}/deployments/{}/operations",
operation_config.base_path(),
subscription_id,
resource_group_name,
deployment_name
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(top) = top {
url.query_pairs_mut().append_pair("$top", top.to_string().as_str());
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DeploymentOperationsListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod resource_provider_operation_details {
use super::{models, API_VERSION};
pub async fn list(
operation_config: &crate::OperationConfig,
resource_provider_namespace: &str,
) -> std::result::Result<list::Response, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/{}/operations",
operation_config.base_path(),
resource_provider_namespace
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ResourceProviderOperationDetailListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(list::Response::Ok200(rsp_value))
}
http::StatusCode::NO_CONTENT => {
let rsp_body = rsp.body();
let rsp_value: models::ResourceProviderOperationDetailListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(list::Response::NoContent204(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
Err(list::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::ResourceProviderOperationDetailListResult),
NoContent204(models::ResourceProviderOperationDetailListResult),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod policy_definitions {
use super::{models, API_VERSION};
pub async fn get(
operation_config: &crate::OperationConfig,
policy_definition_name: &str,
subscription_id: &str,
) -> std::result::Result<models::PolicyDefinition, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Authorization/policydefinitions/{}",
operation_config.base_path(),
subscription_id,
policy_definition_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PolicyDefinition =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(get::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
policy_definition_name: &str,
parameters: &models::PolicyDefinition,
subscription_id: &str,
) -> std::result::Result<models::PolicyDefinition, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Authorization/policydefinitions/{}",
operation_config.base_path(),
subscription_id,
policy_definition_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::PolicyDefinition = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(create_or_update::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
policy_definition_name: &str,
subscription_id: &str,
) -> std::result::Result<(), delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Authorization/policydefinitions/{}",
operation_config.base_path(),
subscription_id,
policy_definition_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(()),
status_code => {
let rsp_body = rsp.body();
Err(delete::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod policy_assignments {
use super::{models, API_VERSION};
pub async fn list_for_resource(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
resource_provider_namespace: &str,
parent_resource_path: &str,
resource_type: &str,
resource_name: &str,
filter: Option<&str>,
subscription_id: &str,
) -> std::result::Result<models::PolicyAssignmentListResult, list_for_resource::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourcegroups/{}/providers/{}/{}/{}/{}providers/Microsoft.Authorization/policyAssignments",
operation_config.base_path(),
subscription_id,
resource_group_name,
resource_provider_namespace,
parent_resource_path,
resource_type,
resource_name
);
let mut url = url::Url::parse(url_str).map_err(list_for_resource::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_for_resource::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_for_resource::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_for_resource::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PolicyAssignmentListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_for_resource::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list_for_resource::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list_for_resource {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_for_resource_group(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
filter: Option<&str>,
subscription_id: &str,
) -> std::result::Result<models::PolicyAssignmentListResult, list_for_resource_group::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Authorization/policyAssignments",
operation_config.base_path(),
subscription_id,
resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(list_for_resource_group::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_for_resource_group::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_for_resource_group::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_for_resource_group::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PolicyAssignmentListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_for_resource_group::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list_for_resource_group::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list_for_resource_group {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
scope: &str,
policy_assignment_name: &str,
) -> std::result::Result<models::PolicyAssignment, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/{}/providers/Microsoft.Authorization/policyAssignments/{}",
operation_config.base_path(),
scope,
policy_assignment_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PolicyAssignment =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(get::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create(
operation_config: &crate::OperationConfig,
scope: &str,
policy_assignment_name: &str,
parameters: &models::PolicyAssignment,
) -> std::result::Result<models::PolicyAssignment, create::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/{}/providers/Microsoft.Authorization/policyAssignments/{}",
operation_config.base_path(),
scope,
policy_assignment_name
);
let mut url = url::Url::parse(url_str).map_err(create::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(create::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(create::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::PolicyAssignment =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(create::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod create {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
scope: &str,
policy_assignment_name: &str,
) -> std::result::Result<models::PolicyAssignment, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/{}/providers/Microsoft.Authorization/policyAssignments/{}",
operation_config.base_path(),
scope,
policy_assignment_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PolicyAssignment =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(delete::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_by_id(
operation_config: &crate::OperationConfig,
policy_assignment_id: &str,
) -> std::result::Result<models::PolicyAssignment, get_by_id::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/{}", operation_config.base_path(), policy_assignment_id);
let mut url = url::Url::parse(url_str).map_err(get_by_id::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_by_id::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_by_id::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_by_id::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PolicyAssignment =
serde_json::from_slice(rsp_body).map_err(|source| get_by_id::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(get_by_id::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get_by_id {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_by_id(
operation_config: &crate::OperationConfig,
policy_assignment_id: &str,
parameters: &models::PolicyAssignment,
) -> std::result::Result<models::PolicyAssignment, create_by_id::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/{}", operation_config.base_path(), policy_assignment_id);
let mut url = url::Url::parse(url_str).map_err(create_by_id::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_by_id::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(create_by_id::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_by_id::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_by_id::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::PolicyAssignment =
serde_json::from_slice(rsp_body).map_err(|source| create_by_id::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(create_by_id::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod create_by_id {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete_by_id(
operation_config: &crate::OperationConfig,
policy_assignment_id: &str,
) -> std::result::Result<models::PolicyAssignment, delete_by_id::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/{}", operation_config.base_path(), policy_assignment_id);
let mut url = url::Url::parse(url_str).map_err(delete_by_id::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete_by_id::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete_by_id::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(delete_by_id::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PolicyAssignment =
serde_json::from_slice(rsp_body).map_err(|source| delete_by_id::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(delete_by_id::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod delete_by_id {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list(
operation_config: &crate::OperationConfig,
filter: Option<&str>,
subscription_id: &str,
) -> std::result::Result<models::PolicyAssignmentListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Authorization/policyAssignments",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PolicyAssignmentListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_for_scope(
operation_config: &crate::OperationConfig,
scope: &str,
filter: Option<&str>,
) -> std::result::Result<models::PolicyAssignmentListResult, list_for_scope::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/{}/providers/Microsoft.Authorization/policyAssignments",
operation_config.base_path(),
scope
);
let mut url = url::Url::parse(url_str).map_err(list_for_scope::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_for_scope::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_for_scope::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_for_scope::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PolicyAssignmentListResult =
serde_json::from_slice(rsp_body).map_err(|source| list_for_scope::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list_for_scope::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list_for_scope {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
|
use std::cell::RefCell;
use std::rc::Rc;
use crate::treenode::TreeNode;
pub fn sorted_array_to_bst(nums: Vec<i32>) -> Option<Rc<RefCell<TreeNode>>> {
fn core(nums: &[i32]) -> Option<Rc<RefCell<TreeNode>>> {
match nums.len() {
0 => None,
n => {
let mid = n / 2;
Some(Rc::new(RefCell::new(TreeNode {
val: nums[mid],
left: core(&nums[..mid]),
right: core(&nums[mid + 1..]),
})))
}
}
}
core(nums.as_slice())
} |
use crate::use_case::{TxUseCase, UseCaseInput, UseCaseOutput};
// use super::{
// delete_all::{DeleteAllUseCase, DeleteAllUseCaseInput},
// insert::{InsertUseCase, InsertUseCaseInput},
// };
use apllodb_immutable_schema_engine_domain::{
abstract_types::ImmutableSchemaAbstractTypes,
row_projection_result::RowProjectionResult,
row_selection_plan::RowSelectionPlan,
vtable::{id::VTableId, repository::VTableRepository, VTable},
};
use apllodb_shared_components::{ApllodbError, ApllodbResult, DatabaseName, Expression, SqlValue};
use apllodb_storage_engine_interface::{
ColumnName, Row, RowProjectionQuery, RowSchema, Rows, TableName,
};
// use apllodb_storage_engine_interface::ProjectionQuery;
use async_trait::async_trait;
use std::{collections::HashMap, fmt::Debug, marker::PhantomData};
use super::insert::{InsertUseCase, InsertUseCaseInput};
#[derive(PartialEq, Debug, new)]
pub struct UpdateUseCaseInput<'usecase, Types: ImmutableSchemaAbstractTypes> {
database_name: &'usecase DatabaseName,
table_name: &'usecase TableName,
column_values: HashMap<ColumnName, Expression>,
selection: RowSelectionPlan<Types>,
}
impl<'usecase, Types: ImmutableSchemaAbstractTypes> UseCaseInput
for UpdateUseCaseInput<'usecase, Types>
{
fn validate(&self) -> ApllodbResult<()> {
self.validate_expression_type()?;
Ok(())
}
}
impl<'usecase, Types: ImmutableSchemaAbstractTypes> UpdateUseCaseInput<'usecase, Types> {
fn validate_expression_type(&self) -> ApllodbResult<()> {
for (column_name, expr) in &self.column_values {
match expr {
Expression::ConstantVariant(_) => {}
Expression::UnaryOperatorVariant(_, _) => {}
Expression::SchemaIndexVariant(_) | Expression::BooleanExpressionVariant(_) => {
return Err(
ApllodbError::feature_not_supported(
format!("trying to UpdateAll `{:?}={:?}` while expr of `UpdateAll INTO ... VALUES (expr ...)`. `expr` can only be a constant",
column_name, expr
),
))
}
}
}
Ok(())
}
}
#[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug, Default)]
pub struct UpdateUseCaseOutput;
impl UseCaseOutput for UpdateUseCaseOutput {}
pub struct UpdateUseCase<'usecase, Types: ImmutableSchemaAbstractTypes> {
_marker: PhantomData<(&'usecase (), Types)>,
}
#[async_trait(?Send)]
impl<'usecase, Types: ImmutableSchemaAbstractTypes + Clone + 'usecase> TxUseCase<Types>
for UpdateUseCase<'usecase, Types>
{
type In = UpdateUseCaseInput<'usecase, Types>;
type Out = UpdateUseCaseOutput;
/// # Failures
///
/// - [FeatureNotSupported](apllodb_shared_components::SqlState::FeatureNotSupported) when:
/// - any column_values' Expression is not a ConstantVariant.
async fn run_core(
vtable_repo: &Types::VTableRepo,
version_repo: &Types::VersionRepo,
input: Self::In,
) -> ApllodbResult<Self::Out> {
let vtable_id = VTableId::new(input.database_name, input.table_name);
let vtable = vtable_repo.read(&vtable_id).await?;
let projection_result = Self::projection_result(vtable_repo, &vtable).await?;
let rows = vtable_repo
.select(&vtable, projection_result, input.selection.clone())
.await?;
vtable_repo.delete(&vtable, input.selection).await?;
Self::insert_updated_rows(
vtable_repo,
version_repo,
&input.database_name,
&input.table_name,
rows,
input.column_values,
)
.await?;
Ok(UpdateUseCaseOutput)
}
}
impl<'usecase, Types: ImmutableSchemaAbstractTypes> UpdateUseCase<'usecase, Types> {
async fn insert_updated_rows(
vtable_repo: &Types::VTableRepo,
version_repo: &Types::VersionRepo,
database_name: &DatabaseName,
table_name: &TableName,
rows_before: Rows,
column_values_to_set: HashMap<ColumnName, Expression>,
) -> ApllodbResult<()> {
let schema = rows_before.as_schema();
let new_columns_to_insert = Self::new_columns_to_insert(schema);
let new_rows_to_insert = Self::new_rows_to_insert(rows_before, column_values_to_set)?;
let insert_usecase_input = InsertUseCaseInput::new(
database_name,
table_name,
&new_columns_to_insert,
new_rows_to_insert,
);
let _ = InsertUseCase::<'_, Types>::run(vtable_repo, version_repo, insert_usecase_input)
.await?;
Ok(())
}
async fn projection_result(
vtable_repo: &Types::VTableRepo,
vtable: &VTable,
) -> ApllodbResult<RowProjectionResult> {
let active_versions = vtable_repo.active_versions(&vtable).await?;
// Fetch all columns from all versions and update requested columns later.
// FIXME Consider CoW to reduce disk usage (append only updated column to a new version).
let projection_result =
RowProjectionResult::new(&vtable, active_versions, &RowProjectionQuery::All)?;
Ok(projection_result)
}
fn new_columns_to_insert(schema: &RowSchema) -> Vec<ColumnName> {
schema
.table_column_names()
.into_iter()
.map(|tc| tc.as_column_name().clone())
.collect()
}
fn new_rows_to_insert(
rows_before: Rows,
column_values_to_set: HashMap<ColumnName, Expression>,
) -> ApllodbResult<Vec<Row>> {
let mut ret: Vec<Row> = vec![];
let schema = rows_before.as_schema().clone();
for row in rows_before {
let mut vals_after: Vec<SqlValue> = Vec::new();
for (pos, tc) in schema.table_column_names_with_pos() {
let column_name = tc.as_column_name();
let val_after = if let Some(expr) = column_values_to_set.get(&column_name) {
if let Expression::ConstantVariant(sql_value) = expr {
Ok(sql_value.clone())
} else {
Err(ApllodbError::feature_not_supported(
"only ConstantVariant is acceptable for now",
))
}
} else {
let val_before = row.get_sql_value(pos)?;
Ok(val_before.clone())
}?;
vals_after.push(val_after);
}
ret.push(Row::new(vals_after));
}
Ok(ret)
}
}
|
#[cfg(feature = "gnuplot_backend")]
mod gnuplot_backend;
#[cfg(feature = "plotters_backend")]
mod plotters_backend;
#[cfg(feature = "gnuplot_backend")]
pub use gnuplot_backend::Gnuplot;
#[cfg(feature = "plotters_backend")]
pub use plotters_backend::PlottersBackend;
use crate::connection::AxisScale;
use crate::estimate::Statistic;
use crate::estimate::{ConfidenceInterval, Estimate};
use crate::kde;
use crate::model::Benchmark;
use crate::report::{BenchmarkId, ComparisonData, MeasurementData, ReportContext, ValueType};
use crate::stats::bivariate::regression::Slope;
use crate::stats::bivariate::Data;
use crate::stats::univariate::Sample;
use crate::stats::Distribution;
use crate::value_formatter::ValueFormatter;
use linked_hash_map::LinkedHashMap;
use std::path::PathBuf;
const REPORT_STATS: [Statistic; 7] = [
Statistic::Typical,
Statistic::Slope,
Statistic::Mean,
Statistic::Median,
Statistic::MedianAbsDev,
Statistic::MedianAbsDev,
Statistic::StdDev,
];
const CHANGE_STATS: [Statistic; 2] = [Statistic::Mean, Statistic::Median];
#[derive(Clone, Copy)]
pub struct PlotContext<'a> {
pub id: &'a BenchmarkId,
pub context: &'a ReportContext,
pub size: Option<Size>,
pub is_thumbnail: bool,
}
const KDE_POINTS: usize = 500;
#[derive(Debug, Clone, Copy)]
pub struct Size(pub usize, pub usize);
impl<'a> PlotContext<'a> {
pub fn line_comparison_path(&self) -> PathBuf {
path!(
&self.context.output_directory,
self.id.as_directory_name(),
"lines.svg"
)
}
pub fn violin_path(&self) -> PathBuf {
path!(
&self.context.output_directory,
self.id.as_directory_name(),
"violin.svg"
)
}
}
pub trait Plotter {
fn pdf(
&mut self,
ctx: PlotContext<'_>,
measurements: &MeasurementData<'_>,
formatter: &ValueFormatter<'_>,
);
fn pdf_thumbnail(
&mut self,
ctx: PlotContext<'_>,
measurements: &MeasurementData<'_>,
formatter: &ValueFormatter<'_>,
);
fn pdf_comparison(
&mut self,
ctx: PlotContext<'_>,
measurements: &MeasurementData<'_>,
formatter: &ValueFormatter<'_>,
comparison: &ComparisonData,
);
fn pdf_comparison_thumbnail(
&mut self,
ctx: PlotContext<'_>,
measurements: &MeasurementData<'_>,
formatter: &ValueFormatter<'_>,
comparison: &ComparisonData,
);
fn iteration_times(
&mut self,
ctx: PlotContext<'_>,
measurements: &MeasurementData<'_>,
formatter: &ValueFormatter<'_>,
);
fn iteration_times_thumbnail(
&mut self,
ctx: PlotContext<'_>,
measurements: &MeasurementData<'_>,
formatter: &ValueFormatter<'_>,
);
fn iteration_times_comparison(
&mut self,
ctx: PlotContext<'_>,
measurements: &MeasurementData<'_>,
formatter: &ValueFormatter<'_>,
comparison: &ComparisonData,
);
fn iteration_times_comparison_thumbnail(
&mut self,
ctx: PlotContext<'_>,
measurements: &MeasurementData<'_>,
formatter: &ValueFormatter<'_>,
comparison: &ComparisonData,
);
fn regression(
&mut self,
ctx: PlotContext<'_>,
measurements: &MeasurementData<'_>,
formatter: &ValueFormatter<'_>,
);
fn regression_thumbnail(
&mut self,
ctx: PlotContext<'_>,
measurements: &MeasurementData<'_>,
formatter: &ValueFormatter<'_>,
);
fn regression_comparison(
&mut self,
ctx: PlotContext<'_>,
measurements: &MeasurementData<'_>,
formatter: &ValueFormatter<'_>,
comparison: &ComparisonData,
);
fn regression_comparison_thumbnail(
&mut self,
ctx: PlotContext<'_>,
measurements: &MeasurementData<'_>,
formatter: &ValueFormatter<'_>,
comparison: &ComparisonData,
);
fn abs_distributions(
&mut self,
ctx: PlotContext<'_>,
measurements: &MeasurementData<'_>,
formatter: &ValueFormatter<'_>,
);
fn rel_distributions(&mut self, ctx: PlotContext<'_>, comparison: &ComparisonData);
fn line_comparison(
&mut self,
ctx: PlotContext<'_>,
formatter: &ValueFormatter,
all_curves: &[(&BenchmarkId, &Benchmark)],
value_type: ValueType,
);
fn violin(
&mut self,
ctx: PlotContext<'_>,
formatter: &ValueFormatter,
all_curves: &[(&BenchmarkId, &Benchmark)],
);
fn t_test(&mut self, ctx: PlotContext<'_>, comparison: &ComparisonData);
fn history(
&mut self,
ctx: PlotContext<'_>,
upper_bound: &[f64],
point_estimate: &[f64],
lower_bound: &[f64],
ids: &[String],
unit: &str,
);
fn wait(&mut self);
}
// Some types representing things we might want to draw
pub struct Point {
x: f64,
y: f64,
}
pub struct Line {
pub start: Point,
pub end: Point,
}
pub struct VerticalLine {
x: f64,
}
pub struct LineCurve<'a> {
xs: &'a [f64],
ys: &'a [f64],
}
pub struct Points<'a> {
xs: &'a [f64],
ys: &'a [f64],
}
pub struct FilledCurve<'a> {
xs: &'a [f64],
ys_1: &'a [f64],
ys_2: &'a [f64],
}
// If the plotting backends aren't enabled, nothing reads some of the fields here.
#[allow(dead_code)]
pub struct Rectangle {
left: f64,
right: f64,
top: f64,
bottom: f64,
}
pub trait PlottingBackend {
fn abs_distribution(
&mut self,
id: &BenchmarkId,
statistic: Statistic,
size: Option<Size>,
path: PathBuf,
x_unit: &str,
distribution_curve: LineCurve,
bootstrap_area: FilledCurve,
point_estimate: Line,
);
fn rel_distribution(
&mut self,
id: &BenchmarkId,
statistic: Statistic,
size: Option<Size>,
path: PathBuf,
distribution_curve: LineCurve,
confidence_interval: FilledCurve,
point_estimate: Line,
noise_threshold: Rectangle,
);
fn iteration_times(
&mut self,
id: &BenchmarkId,
size: Option<Size>,
path: PathBuf,
unit: &str,
is_thumbnail: bool,
current_times: Points,
base_times: Option<Points>,
);
fn regression(
&mut self,
id: &BenchmarkId,
size: Option<Size>,
path: PathBuf,
is_thumbnail: bool,
x_label: &str,
x_scale: f64,
unit: &str,
sample: Points,
regression: Line,
confidence_interval: FilledCurve,
);
fn regression_comparison(
&mut self,
id: &BenchmarkId,
size: Option<Size>,
path: PathBuf,
is_thumbnail: bool,
x_label: &str,
x_scale: f64,
unit: &str,
current_regression: Line,
current_confidence_interval: FilledCurve,
base_regression: Line,
base_confidence_interval: FilledCurve,
);
fn pdf_full(
&mut self,
id: &BenchmarkId,
size: Option<Size>,
path: PathBuf,
unit: &str,
y_label: &str,
y_scale: f64,
max_iters: f64,
pdf: FilledCurve,
mean: VerticalLine,
fences: (VerticalLine, VerticalLine, VerticalLine, VerticalLine),
points: (Points, Points, Points),
);
fn pdf_thumbnail(
&mut self,
size: Option<Size>,
path: PathBuf,
unit: &str,
mean: Line,
pdf: FilledCurve,
);
fn pdf_comparison(
&mut self,
id: &BenchmarkId,
size: Option<Size>,
path: PathBuf,
is_thumbnail: bool,
unit: &str,
current_mean: Line,
current_pdf: FilledCurve,
base_mean: Line,
base_pdf: FilledCurve,
);
fn t_test(
&mut self,
id: &BenchmarkId,
size: Option<Size>,
path: PathBuf,
t: VerticalLine,
t_distribution: FilledCurve,
);
fn line_comparison(
&mut self,
path: PathBuf,
title: &str,
unit: &str,
value_type: ValueType,
axis_scale: AxisScale,
lines: &[(Option<&String>, LineCurve)],
);
fn violin(
&mut self,
path: PathBuf,
title: &str,
unit: &str,
axis_scale: AxisScale,
lines: &[(&str, LineCurve)],
);
fn history_plot(
&mut self,
id: &BenchmarkId,
size: Size,
path: PathBuf,
point_estimate: LineCurve,
confidence_interval: FilledCurve,
ids: &[String],
unit: &str,
);
fn wait(&mut self);
}
pub struct PlotGenerator<B: PlottingBackend> {
pub backend: B,
}
impl<B: PlottingBackend> PlotGenerator<B> {
fn abs_distribution(
&mut self,
id: &BenchmarkId,
context: &ReportContext,
formatter: &ValueFormatter,
statistic: Statistic,
distribution: &Distribution<f64>,
estimate: &Estimate,
size: Option<Size>,
) {
let ci = &estimate.confidence_interval;
let typical = ci.upper_bound;
let mut ci_values = [ci.lower_bound, ci.upper_bound, estimate.point_estimate];
let unit = formatter.scale_values(typical, &mut ci_values);
let (lb, ub, point) = (ci_values[0], ci_values[1], ci_values[2]);
let start = lb - (ub - lb) / 9.;
let end = ub + (ub - lb) / 9.;
let mut scaled_xs: Vec<f64> = distribution.iter().cloned().collect();
let _ = formatter.scale_values(typical, &mut scaled_xs);
let scaled_xs_sample = Sample::new(&scaled_xs);
let (kde_xs, ys) = kde::sweep(scaled_xs_sample, KDE_POINTS, Some((start, end)));
// interpolate between two points of the KDE sweep to find the Y position at the point estimate.
let n_point = kde_xs
.iter()
.position(|&x| x >= point)
.unwrap_or(kde_xs.len() - 1)
.max(1); // Must be at least the second element or this will panic
let slope = (ys[n_point] - ys[n_point - 1]) / (kde_xs[n_point] - kde_xs[n_point - 1]);
let y_point = ys[n_point - 1] + (slope * (point - kde_xs[n_point - 1]));
let start = kde_xs
.iter()
.enumerate()
.find(|&(_, &x)| x >= lb)
.unwrap()
.0;
let end = kde_xs
.iter()
.enumerate()
.rev()
.find(|&(_, &x)| x <= ub)
.unwrap()
.0;
let len = end - start;
let distribution_curve = LineCurve {
xs: &*kde_xs,
ys: &*ys,
};
let bootstrap_area = FilledCurve {
xs: &kde_xs[start..end],
ys_1: &ys[start..end],
ys_2: &vec![0.0; len],
};
let estimate = Line {
start: Point { x: point, y: 0.0 },
end: Point {
x: point,
y: y_point,
},
};
self.backend.abs_distribution(
id,
statistic,
size,
context.report_path(id, &format!("{}.svg", statistic)),
&unit,
distribution_curve,
bootstrap_area,
estimate,
);
}
fn rel_distribution(
&mut self,
id: &BenchmarkId,
context: &ReportContext,
statistic: Statistic,
distribution: &Distribution<f64>,
estimate: &Estimate,
noise_threshold: f64,
size: Option<Size>,
) {
let ci = &estimate.confidence_interval;
let (lb, ub) = (ci.lower_bound, ci.upper_bound);
let start = lb - (ub - lb) / 9.;
let end = ub + (ub - lb) / 9.;
let (xs, ys) = kde::sweep(distribution, KDE_POINTS, Some((start, end)));
let xs_ = Sample::new(&xs);
// interpolate between two points of the KDE sweep to find the Y position at the point estimate.
let point = estimate.point_estimate;
let n_point = xs
.iter()
.position(|&x| x >= point)
.unwrap_or(ys.len() - 1)
.max(1);
let slope = (ys[n_point] - ys[n_point - 1]) / (xs[n_point] - xs[n_point - 1]);
let y_point = ys[n_point - 1] + (slope * (point - xs[n_point - 1]));
let start = xs.iter().enumerate().find(|&(_, &x)| x >= lb).unwrap().0;
let end = xs
.iter()
.enumerate()
.rev()
.find(|&(_, &x)| x <= ub)
.unwrap()
.0;
let len = end - start;
let x_min = xs_.min();
let x_max = xs_.max();
let (fc_start, fc_end) = if noise_threshold < x_min || -noise_threshold > x_max {
let middle = (x_min + x_max) / 2.;
(middle, middle)
} else {
(
if -noise_threshold < x_min {
x_min
} else {
-noise_threshold
},
if noise_threshold > x_max {
x_max
} else {
noise_threshold
},
)
};
let distribution_curve = LineCurve { xs: &*xs, ys: &*ys };
let confidence_interval = FilledCurve {
xs: &xs[start..end],
ys_1: &ys[start..end],
ys_2: &vec![0.0; len],
};
let estimate = Line {
start: Point { x: point, y: 0.0 },
end: Point {
x: point,
y: y_point,
},
};
let noise_threshold = Rectangle {
left: fc_start,
right: fc_end,
top: 1.0,
bottom: 0.0,
};
self.backend.rel_distribution(
id,
statistic,
size,
context.report_path(id, &format!("change/{}.svg", statistic)),
distribution_curve,
confidence_interval,
estimate,
noise_threshold,
);
}
fn iteration_time_plot(
&mut self,
ctx: PlotContext<'_>,
measurements: &MeasurementData<'_>,
formatter: &ValueFormatter<'_>,
is_thumbnail: bool,
file_path: PathBuf,
) {
let data = &measurements.avg_times;
let max_avg_time = data.max();
let mut scaled_y: Vec<_> = data.iter().map(|(f, _)| f).collect();
let unit = formatter.scale_values(max_avg_time, &mut scaled_y);
let scaled_y = Sample::new(&scaled_y);
let xs: Vec<f64> = (1..=scaled_y.len()).map(|i| i as f64).collect();
let points = Points {
xs: &xs,
ys: &scaled_y,
};
self.backend.iteration_times(
ctx.id,
ctx.size,
file_path,
&unit,
is_thumbnail,
points,
None,
);
}
fn iteration_time_comparison_plot(
&mut self,
ctx: PlotContext<'_>,
measurements: &MeasurementData<'_>,
formatter: &ValueFormatter<'_>,
comparison: &ComparisonData,
is_thumbnail: bool,
file_path: PathBuf,
) {
let current_data = &measurements.avg_times;
let base_data = &comparison.base_avg_times;
let mut all_data: Vec<f64> = current_data.iter().map(|(f, _)| f).collect();
all_data.extend_from_slice(base_data);
let typical_value = Sample::new(&all_data).max();
let unit = formatter.scale_values(typical_value, &mut all_data);
let (scaled_current_y, scaled_base_y) = all_data.split_at(current_data.len());
let scaled_current_y = Sample::new(scaled_current_y);
let scaled_base_y = Sample::new(scaled_base_y);
let current_xs: Vec<f64> = (1..=scaled_current_y.len()).map(|i| i as f64).collect();
let base_xs: Vec<f64> = (1..=scaled_base_y.len()).map(|i| i as f64).collect();
let current_points = Points {
xs: ¤t_xs,
ys: &scaled_current_y,
};
let base_points = Points {
xs: &base_xs,
ys: &scaled_base_y,
};
self.backend.iteration_times(
ctx.id,
ctx.size,
file_path,
&unit,
is_thumbnail,
current_points,
Some(base_points),
);
}
fn regression_plot(
&mut self,
ctx: PlotContext<'_>,
measurements: &MeasurementData<'_>,
formatter: &ValueFormatter<'_>,
is_thumbnail: bool,
file_path: PathBuf,
) {
let slope_estimate = &measurements.absolute_estimates.slope.as_ref().unwrap();
let slope_dist = &measurements.distributions.slope.as_ref().unwrap();
let (lb, ub) =
slope_dist.confidence_interval(slope_estimate.confidence_interval.confidence_level);
let data = &measurements.data;
let (max_iters, typical) = (data.x().max(), data.y().max());
let mut scaled_y: Vec<f64> = data.y().iter().cloned().collect();
let unit = formatter.scale_values(typical, &mut scaled_y);
let scaled_y = Sample::new(&scaled_y);
let point_estimate = Slope::fit(&measurements.data).0;
let mut scaled_points = [point_estimate * max_iters, lb * max_iters, ub * max_iters];
let _ = formatter.scale_values(typical, &mut scaled_points);
let [point, lb, ub] = scaled_points;
let exponent = (max_iters.log10() / 3.).floor() as i32 * 3;
let x_scale = 10f64.powi(-exponent);
let x_label = if exponent == 0 {
"Iterations".to_owned()
} else {
format!("Iterations (x 10^{})", exponent)
};
let sample = Points {
xs: data.x(),
ys: scaled_y,
};
let regression = Line {
start: Point { x: 0.0, y: 0.0 },
end: Point {
x: max_iters,
y: point,
},
};
let confidence_interval = FilledCurve {
xs: &[0.0, max_iters],
ys_1: &[0.0, lb],
ys_2: &[0.0, ub],
};
self.backend.regression(
ctx.id,
ctx.size,
file_path,
is_thumbnail,
&x_label,
x_scale,
&unit,
sample,
regression,
confidence_interval,
)
}
fn regression_comparison_plot(
&mut self,
ctx: PlotContext<'_>,
measurements: &MeasurementData<'_>,
formatter: &ValueFormatter<'_>,
comparison: &ComparisonData,
is_thumbnail: bool,
file_path: PathBuf,
) {
let base_data = Data::new(&comparison.base_iter_counts, &comparison.base_sample_times);
let data = &measurements.data;
let max_iters = base_data.x().max().max(data.x().max());
let typical = base_data.y().max().max(data.y().max());
let exponent = (max_iters.log10() / 3.).floor() as i32 * 3;
let x_scale = 10f64.powi(-exponent);
let x_label = if exponent == 0 {
"Iterations".to_owned()
} else {
format!("Iterations (x 10^{})", exponent)
};
let Estimate {
confidence_interval:
ConfidenceInterval {
lower_bound: base_lb,
upper_bound: base_ub,
..
},
point_estimate: base_point,
..
} = comparison.base_estimates.slope.as_ref().unwrap();
let Estimate {
confidence_interval:
ConfidenceInterval {
lower_bound: lb,
upper_bound: ub,
..
},
point_estimate: point,
..
} = measurements.absolute_estimates.slope.as_ref().unwrap();
let mut points = [
base_lb * max_iters,
base_point * max_iters,
base_ub * max_iters,
lb * max_iters,
point * max_iters,
ub * max_iters,
];
let unit = formatter.scale_values(typical, &mut points);
let [base_lb, base_point, base_ub, lb, point, ub] = points;
let current_regression = Line {
start: Point { x: 0.0, y: 0.0 },
end: Point {
x: max_iters,
y: point,
},
};
let current_confidence_interval = FilledCurve {
xs: &[0.0, max_iters],
ys_1: &[0.0, lb],
ys_2: &[0.0, ub],
};
let base_regression = Line {
start: Point { x: 0.0, y: 0.0 },
end: Point {
x: max_iters,
y: base_point,
},
};
let base_confidence_interval = FilledCurve {
xs: &[0.0, max_iters],
ys_1: &[0.0, base_lb],
ys_2: &[0.0, base_ub],
};
self.backend.regression_comparison(
ctx.id,
ctx.size,
file_path,
is_thumbnail,
&x_label,
x_scale,
&unit,
current_regression,
current_confidence_interval,
base_regression,
base_confidence_interval,
)
}
fn pdf_full(
&mut self,
ctx: PlotContext<'_>,
measurements: &MeasurementData<'_>,
formatter: &ValueFormatter<'_>,
file_path: PathBuf,
) {
let avg_times = &measurements.avg_times;
let typical = avg_times.max();
let mut scaled_avg_times: Vec<f64> = (avg_times as &Sample<f64>).iter().cloned().collect();
let unit = formatter.scale_values(typical, &mut scaled_avg_times);
let scaled_avg_times = Sample::new(&scaled_avg_times);
let mean = scaled_avg_times.mean();
let iter_counts = measurements.iter_counts();
let &max_iters = iter_counts
.iter()
.max_by_key(|&&iters| iters as u64)
.unwrap();
let exponent = (max_iters.log10() / 3.).floor() as i32 * 3;
let y_scale = 10f64.powi(-exponent);
let y_label = if exponent == 0 {
"Iterations".to_owned()
} else {
format!("Iterations (x 10^{})", exponent)
};
let (xs, ys) = kde::sweep(&scaled_avg_times, KDE_POINTS, None);
let (lost, lomt, himt, hist) = avg_times.fences();
let mut fences = [lost, lomt, himt, hist];
let _ = formatter.scale_values(typical, &mut fences);
let [lost, lomt, himt, hist] = fences;
let pdf = FilledCurve {
xs: &*xs,
ys_1: &*ys,
ys_2: &vec![0.0; ys.len()],
};
let mean = VerticalLine { x: mean };
let make_fence = |fence| VerticalLine { x: fence };
let low_severe = make_fence(lost);
let low_mild = make_fence(lomt);
let high_mild = make_fence(himt);
let high_severe = make_fence(hist);
let (not_xs, not_ys): (Vec<f64>, Vec<f64>) = (avg_times.iter())
.zip(scaled_avg_times.iter().copied())
.zip(iter_counts.iter().copied())
.filter_map(|(((_, point_label), x), y)| {
if !point_label.is_outlier() {
Some((x, y))
} else {
None
}
})
.unzip();
let not_outlier_points = Points {
xs: ¬_xs,
ys: ¬_ys,
};
let (mild_xs, mild_ys): (Vec<f64>, Vec<f64>) = (avg_times.iter())
.zip(scaled_avg_times.iter().copied())
.zip(iter_counts.iter().copied())
.filter_map(|(((_, point_label), x), y)| {
if point_label.is_mild() {
Some((x, y))
} else {
None
}
})
.unzip();
let mild_points = Points {
xs: &mild_xs,
ys: &mild_ys,
};
let (severe_xs, severe_ys): (Vec<f64>, Vec<f64>) = (avg_times.iter())
.zip(scaled_avg_times.iter().copied())
.zip(iter_counts.iter().copied())
.filter_map(|(((_, point_label), x), y)| {
if point_label.is_severe() {
Some((x, y))
} else {
None
}
})
.unzip();
let severe_points = Points {
xs: &severe_xs,
ys: &severe_ys,
};
self.backend.pdf_full(
ctx.id,
ctx.size,
file_path,
&unit,
&y_label,
y_scale,
max_iters,
pdf,
mean,
(low_severe, low_mild, high_mild, high_severe),
(not_outlier_points, mild_points, severe_points),
);
}
fn pdf_thumbnail_plot(
&mut self,
ctx: PlotContext<'_>,
measurements: &MeasurementData<'_>,
formatter: &ValueFormatter<'_>,
file_path: PathBuf,
) {
let avg_times = &*measurements.avg_times;
let typical = avg_times.max();
let mut scaled_avg_times: Vec<f64> = (avg_times as &Sample<f64>).iter().cloned().collect();
let unit = formatter.scale_values(typical, &mut scaled_avg_times);
let scaled_avg_times = Sample::new(&scaled_avg_times);
let mean = scaled_avg_times.mean();
let (xs, ys, mean_y) = kde::sweep_and_estimate(scaled_avg_times, KDE_POINTS, None, mean);
let mean = Line {
start: Point { x: mean, y: 0.0 },
end: Point { x: mean, y: mean_y },
};
let pdf = FilledCurve {
xs: &*xs,
ys_1: &*ys,
ys_2: &vec![0.0; ys.len()],
};
self.backend
.pdf_thumbnail(ctx.size, file_path, &unit, mean, pdf);
}
fn pdf_comparison_plot(
&mut self,
ctx: PlotContext<'_>,
measurements: &MeasurementData<'_>,
formatter: &ValueFormatter<'_>,
comparison: &ComparisonData,
file_path: PathBuf,
is_thumbnail: bool,
) {
let base_avg_times = Sample::new(&comparison.base_avg_times);
let typical = base_avg_times.max().max(measurements.avg_times.max());
let mut scaled_base_avg_times: Vec<f64> = comparison.base_avg_times.clone();
let unit = formatter.scale_values(typical, &mut scaled_base_avg_times);
let scaled_base_avg_times = Sample::new(&scaled_base_avg_times);
let mut scaled_new_avg_times: Vec<f64> = (&measurements.avg_times as &Sample<f64>)
.iter()
.cloned()
.collect();
let _ = formatter.scale_values(typical, &mut scaled_new_avg_times);
let scaled_new_avg_times = Sample::new(&scaled_new_avg_times);
let base_mean = scaled_base_avg_times.mean();
let new_mean = scaled_new_avg_times.mean();
let (base_xs, base_ys, base_y_mean) =
kde::sweep_and_estimate(scaled_base_avg_times, KDE_POINTS, None, base_mean);
let (xs, ys, y_mean) =
kde::sweep_and_estimate(scaled_new_avg_times, KDE_POINTS, None, new_mean);
let base_mean = Line {
start: Point {
x: base_mean,
y: 0.0,
},
end: Point {
x: base_mean,
y: base_y_mean,
},
};
let base_pdf = FilledCurve {
xs: &*base_xs,
ys_1: &*base_ys,
ys_2: &vec![0.0; base_ys.len()],
};
let current_mean = Line {
start: Point {
x: new_mean,
y: 0.0,
},
end: Point {
x: new_mean,
y: y_mean,
},
};
let current_pdf = FilledCurve {
xs: &*xs,
ys_1: &*ys,
ys_2: &vec![0.0; base_ys.len()],
};
self.backend.pdf_comparison(
ctx.id,
ctx.size,
file_path,
is_thumbnail,
&unit,
current_mean,
current_pdf,
base_mean,
base_pdf,
);
}
fn t_test_plot(
&mut self,
ctx: PlotContext<'_>,
comparison: &ComparisonData,
file_path: PathBuf,
) {
let t = comparison.t_value;
let (xs, ys) = kde::sweep(&comparison.t_distribution, KDE_POINTS, None);
let t = VerticalLine { x: t };
let t_distribution = FilledCurve {
xs: &*xs,
ys_1: &*ys,
ys_2: &vec![0.0; ys.len()],
};
self.backend
.t_test(ctx.id, ctx.size, file_path, t, t_distribution)
}
fn history_plot(
&mut self,
ctx: PlotContext<'_>,
size: Size,
upper_bound: &[f64],
point_estimate: &[f64],
lower_bound: &[f64],
ids: &[String],
file_path: PathBuf,
unit: &str,
) {
let xs: Vec<_> = (0..point_estimate.len()).map(|i| i as f64).collect();
let point_estimate = LineCurve {
xs: &xs,
ys: point_estimate,
};
let confidence_interval = FilledCurve {
xs: &xs,
ys_1: upper_bound,
ys_2: lower_bound,
};
self.backend.history_plot(
ctx.id,
size,
file_path,
point_estimate,
confidence_interval,
ids,
unit,
);
}
}
impl<B: PlottingBackend> Plotter for PlotGenerator<B> {
fn pdf(
&mut self,
ctx: PlotContext<'_>,
measurements: &MeasurementData<'_>,
formatter: &ValueFormatter<'_>,
) {
self.pdf_full(
ctx,
measurements,
formatter,
ctx.context.report_path(ctx.id, "pdf.svg"),
);
}
fn pdf_thumbnail(
&mut self,
ctx: PlotContext<'_>,
measurements: &MeasurementData<'_>,
formatter: &ValueFormatter<'_>,
) {
self.pdf_thumbnail_plot(
ctx,
measurements,
formatter,
ctx.context.report_path(ctx.id, "pdf_small.svg"),
);
}
fn pdf_comparison(
&mut self,
ctx: PlotContext<'_>,
measurements: &MeasurementData<'_>,
formatter: &ValueFormatter<'_>,
comparison: &ComparisonData,
) {
self.pdf_comparison_plot(
ctx,
measurements,
formatter,
comparison,
ctx.context.report_path(ctx.id, "both/pdf.svg"),
false,
)
}
fn pdf_comparison_thumbnail(
&mut self,
ctx: PlotContext<'_>,
measurements: &MeasurementData<'_>,
formatter: &ValueFormatter<'_>,
comparison: &ComparisonData,
) {
self.pdf_comparison_plot(
ctx,
measurements,
formatter,
comparison,
ctx.context.report_path(ctx.id, "relative_pdf_small.svg"),
true,
)
}
fn iteration_times(
&mut self,
ctx: PlotContext<'_>,
measurements: &MeasurementData<'_>,
formatter: &ValueFormatter<'_>,
) {
self.iteration_time_plot(
ctx,
measurements,
formatter,
false,
ctx.context.report_path(ctx.id, "iteration_times.svg"),
);
}
fn iteration_times_thumbnail(
&mut self,
ctx: PlotContext<'_>,
measurements: &MeasurementData<'_>,
formatter: &ValueFormatter<'_>,
) {
self.iteration_time_plot(
ctx,
measurements,
formatter,
true,
ctx.context.report_path(ctx.id, "iteration_times_small.svg"),
);
}
fn iteration_times_comparison(
&mut self,
ctx: PlotContext<'_>,
measurements: &MeasurementData<'_>,
formatter: &ValueFormatter<'_>,
comparison: &ComparisonData,
) {
self.iteration_time_comparison_plot(
ctx,
measurements,
formatter,
comparison,
false,
ctx.context.report_path(ctx.id, "both/iteration_times.svg"),
);
}
fn iteration_times_comparison_thumbnail(
&mut self,
ctx: PlotContext<'_>,
measurements: &MeasurementData<'_>,
formatter: &ValueFormatter<'_>,
comparison: &ComparisonData,
) {
self.iteration_time_comparison_plot(
ctx,
measurements,
formatter,
comparison,
true,
ctx.context
.report_path(ctx.id, "relative_iteration_times_small.svg"),
);
}
fn regression(
&mut self,
ctx: PlotContext<'_>,
measurements: &MeasurementData<'_>,
formatter: &ValueFormatter<'_>,
) {
self.regression_plot(
ctx,
measurements,
formatter,
false,
ctx.context.report_path(ctx.id, "regression.svg"),
);
}
fn regression_thumbnail(
&mut self,
ctx: PlotContext<'_>,
measurements: &MeasurementData<'_>,
formatter: &ValueFormatter<'_>,
) {
self.regression_plot(
ctx,
measurements,
formatter,
true,
ctx.context.report_path(ctx.id, "regression_small.svg"),
);
}
fn regression_comparison(
&mut self,
ctx: PlotContext<'_>,
measurements: &MeasurementData<'_>,
formatter: &ValueFormatter<'_>,
comparison: &ComparisonData,
) {
self.regression_comparison_plot(
ctx,
measurements,
formatter,
comparison,
false,
ctx.context.report_path(ctx.id, "both/regression.svg"),
);
}
fn regression_comparison_thumbnail(
&mut self,
ctx: PlotContext<'_>,
measurements: &MeasurementData<'_>,
formatter: &ValueFormatter<'_>,
comparison: &ComparisonData,
) {
self.regression_comparison_plot(
ctx,
measurements,
formatter,
comparison,
true,
ctx.context
.report_path(ctx.id, "relative_regression_small.svg"),
);
}
fn abs_distributions(
&mut self,
ctx: PlotContext<'_>,
measurements: &MeasurementData<'_>,
formatter: &ValueFormatter<'_>,
) {
REPORT_STATS
.iter()
.filter_map(|stat| {
measurements.distributions.get(*stat).and_then(|dist| {
measurements
.absolute_estimates
.get(*stat)
.map(|est| (*stat, dist, est))
})
})
.for_each(|(statistic, distribution, estimate)| {
self.abs_distribution(
ctx.id,
ctx.context,
formatter,
statistic,
distribution,
estimate,
ctx.size,
)
})
}
fn rel_distributions(&mut self, ctx: PlotContext<'_>, comparison: &ComparisonData) {
crate::plot::CHANGE_STATS.iter().for_each(|&statistic| {
self.rel_distribution(
ctx.id,
ctx.context,
statistic,
comparison.relative_distributions.get(statistic),
comparison.relative_estimates.get(statistic),
comparison.noise_threshold,
ctx.size,
)
});
}
fn line_comparison(
&mut self,
ctx: PlotContext<'_>,
formatter: &ValueFormatter,
all_curves: &[(&BenchmarkId, &Benchmark)],
value_type: ValueType,
) {
let max = all_curves
.iter()
.map(|(_, bench)| bench.latest_stats.estimates.typical().point_estimate)
.fold(::std::f64::NAN, f64::max);
let mut dummy = [1.0];
let unit = formatter.scale_values(max, &mut dummy);
let mut series_data = vec![];
let mut function_id_to_benchmarks = LinkedHashMap::new();
for (id, bench) in all_curves {
function_id_to_benchmarks
.entry(&id.function_id)
.or_insert(Vec::new())
.push((*id, *bench))
}
for (key, group) in function_id_to_benchmarks {
// Unwrap is fine here because the caller shouldn't call this with non-numeric IDs.
let mut tuples: Vec<_> = group
.into_iter()
.map(|(id, bench)| {
let x = id.as_number().unwrap();
let y = bench.latest_stats.estimates.typical().point_estimate;
(x, y)
})
.collect();
tuples.sort_by(|&(ax, _), &(bx, _)| {
ax.partial_cmp(&bx).unwrap_or(std::cmp::Ordering::Less)
});
let function_name = key.as_ref();
let (xs, mut ys): (Vec<_>, Vec<_>) = tuples.into_iter().unzip();
formatter.scale_values(max, &mut ys);
series_data.push((function_name, xs, ys));
}
let lines: Vec<_> = series_data
.iter()
.map(|(name, xs, ys)| (*name, LineCurve { xs: &*xs, ys: &*ys }))
.collect();
self.backend.line_comparison(
ctx.line_comparison_path(),
ctx.id.as_title(),
&unit,
value_type,
ctx.context.plot_config.summary_scale,
&lines,
);
}
fn violin(
&mut self,
ctx: PlotContext<'_>,
formatter: &ValueFormatter,
all_curves: &[(&BenchmarkId, &Benchmark)],
) {
let mut kdes = all_curves
.iter()
.rev()
.map(|(id, sample)| {
let (x, mut y) = kde::sweep(
Sample::new(&sample.latest_stats.avg_values),
KDE_POINTS,
None,
);
let y_max = Sample::new(&y).max();
for y in y.iter_mut() {
*y /= y_max;
}
(id.as_title(), x, y)
})
.collect::<Vec<_>>();
let mut xs = kdes
.iter()
.flat_map(|&(_, ref x, _)| x.iter())
.filter(|&&x| x > 0.);
let (mut min, mut max) = {
let &first = xs.next().unwrap();
(first, first)
};
for &e in xs {
if e < min {
min = e;
} else if e > max {
max = e;
}
}
let mut dummy = [1.0];
let unit = formatter.scale_values(max, &mut dummy);
kdes.iter_mut().for_each(|&mut (_, ref mut xs, _)| {
formatter.scale_values(max, xs);
});
let lines = kdes
.iter()
.map(|(name, xs, ys)| (*name, LineCurve { xs: &*xs, ys: &*ys }))
.collect::<Vec<_>>();
self.backend.violin(
ctx.violin_path(),
ctx.id.as_title(),
&unit,
ctx.context.plot_config.summary_scale,
&lines,
)
}
fn t_test(&mut self, ctx: PlotContext<'_>, comparison: &ComparisonData) {
self.t_test_plot(
ctx,
comparison,
ctx.context.report_path(ctx.id, "change/t-test.svg"),
)
}
fn history(
&mut self,
ctx: PlotContext<'_>,
upper_bound: &[f64],
point_estimate: &[f64],
lower_bound: &[f64],
ids: &[String],
unit: &str,
) {
self.history_plot(
ctx,
ctx.size.unwrap(),
upper_bound,
point_estimate,
lower_bound,
ids,
ctx.context.report_path(ctx.id, "history.svg"),
unit,
)
}
fn wait(&mut self) {
self.backend.wait();
}
}
|
use rocket::fairing::{Fairing, Info, Kind};
use rocket::http::{Header, Method, Status};
use rocket::{Request, Response};
use std::io::Cursor;
pub struct CORS();
#[rocket::async_trait]
impl Fairing for CORS {
fn info(&self) -> Info {
Info {
name: "Add CORS headers to requests",
kind: Kind::Response,
}
}
async fn on_response<'r>(&self, request: &'r Request<'_>, response: &mut Response<'r>) {
// Option 1 = dynamically let in from different origins
// let origin = request.headers().get_one("Origin").unwrap_or("*");
// TODO: check against list of approved origins
// response.set_header(Header::new("Access-Control-Allow-Origin", origin));
//
//
// Option 2 = same site
if cfg!(debug_assertions) {
response.set_header(Header::new(
"Access-Control-Allow-Origin",
"http://localhost:3000",
));
} else {
response.set_header(Header::new(
"Access-Control-Allow-Origin",
"https://comsa-portal.capacityapps.com",
));
}
response.set_header(Header::new(
"Access-Control-Allow-Methods",
"POST, GET, OPTIONS",
));
response.set_header(Header::new("Access-Control-Allow-Headers", "content-type"));
response.set_header(Header::new("Access-Control-Allow-Credentials", "true"));
if request.method() == Method::Options {
response.set_sized_body(0, Cursor::new(""));
response.set_status(Status::Accepted);
}
}
}
|
mod socket_read;
mod socket_write;
mod tcp_listener_accept;
mod tcp_stream_connect;
mod udp_recv_from;
mod udp_send_to;
pub use self::socket_read::SocketRead;
pub use self::socket_write::SocketWrite;
pub use self::tcp_listener_accept::TcpListenerAccept;
pub use self::tcp_stream_connect::TcpStreamConnect;
pub use self::udp_recv_from::UdpRecvFrom;
pub use self::udp_send_to::UdpSendTo;
|
#![allow(dead_code)]
/// Lua Token
#[derive(Debug, Clone, PartialEq)]
pub enum Token {
/// End of file
Eof,
/// `...`
VarArg,
/// `;`
SepSemi,
/// `,`
SepComma,
/// `.`
SepDot,
/// `:`
SepColon,
/// `::`
SepLabel,
/// `(`
SepLparen,
/// `)`
SepRparen,
/// `[`
SepLbrack,
/// `]`
SepRbrack,
/// `{`
SepLcurly,
/// `}`
SepRcurly,
/// `=`
OpAssign,
/// `-` (sub or unm)
OpMinus,
/// `~` (bnot or bxor)
OpWave,
/// `+`
OpAdd,
/// `*`
OpMul,
/// `/`
OpDiv,
/// `//`
OpIDiv,
/// `^`
OpPow,
/// `%`
OpMod,
/// `&`
OpBitAnd,
/// `|`
OpBitOr,
/// `>>`
OpShr,
/// `<<`
OpShl,
/// `..`
OpConcat,
/// `<`
OpLt,
/// `<=`
OpLe,
/// `>`
OpGt,
/// `>=`
OpGe,
/// `==`
OPEq,
/// `~=`
OpNe,
/// `#`
OpLen,
/// `and`
OpAnd,
/// `or`
OpOr,
/// `not`
OpNot,
/// `break`
KwBreak,
/// `do`
KwDo,
/// `else`
KwElse,
/// `elseif`
KwElseIf,
/// `end`
KwEnd,
/// `false`
KwFalse,
/// `for`
KwFor,
/// `function`
KwFunction,
/// `goto`
KwGoto,
/// `if`
KwIf,
/// `in`
KwIn,
/// `local`
KwLocal,
/// `nil`
KwNil,
/// `repeat`
KwRepeat,
/// `return`
KwReturn,
/// `then`
KwThen,
/// `true`
KwTrue,
/// `until`
KwUntil,
/// `while`
KwWhile,
/// __id__
Identifier(String),
/// __number__
Number(String),
/// __string__
String(String),
} |
use super::primitives::*;
use super::sim_id::*;
use std::vec;
// enum RoadType {
// ROAD,
// JUNCTION
// }
struct RoadGeometryBase {
s: f64,
origin: Vec2f64,
yaw: f64,
length: f64,
}
enum RoadGeometrySub {
Line{},
Spiral{curv_start: f32, curv_end: f32},
}
struct RoadGeometry {
base: RoadGeometryBase,
sub: RoadGeometrySub,
}
struct QuadrinomialParams {
a: f32,
b: f32,
c: f32,
d: f32
}
impl QuadrinomialParams {
fn zero_order(a: f32) -> QuadrinomialParams {
QuadrinomialParams{a: a, b: 0.0, c: 0.0, d: 0.0}
}
}
struct Lane {
id: i32,
s: f64,
width_params: QuadrinomialParams,
}
struct LaneSection {
s: f64,
lanes: Vec<Lane>
}
impl LaneSection {
fn new() -> LaneSection {
LaneSection {s: 0.0, lanes: Vec::<Lane>::new()}
}
}
struct Road {
id: u64,
junction: i64,
geometries: Vec<RoadGeometry>,
lane_sections: Vec<LaneSection>,
}
impl Road {
fn new(id_provider: &mut IdProvider) -> Road {
Road {
id : id_provider.next(),
junction: -1,
geometries: Vec::<RoadGeometry>::new(),
lane_sections: Vec::<LaneSection>::new(),
}
}
}
fn generate_random_road(id_provider: &mut IdProvider) -> Road {
let mut road = Road::new(id_provider);
let mut lane_section = LaneSection::new();
let lane = Lane{id: 0, s: 0.0, width_params: QuadrinomialParams::zero_order(2.0)};
lane_section.lanes.push(lane);
road.lane_sections.push(lane_section);
road
} |
extern crate untrusted;
extern crate ring;
extern crate rmp;
#[macro_use]
extern crate error_chain;
pub mod message;
pub mod frame;
pub mod signature;
// pub mod errors;
pub mod errors {
error_chain!{
foreign_links {
// TODO separate error chain for msgpack/codec errors
NumValueReadError(::rmp::decode::NumValueReadError);
ValueReadError(::rmp::decode::ValueReadError);
ValueWriteError(::rmp::encode::ValueWriteError);
Io(::std::io::Error);
}
}
}
use errors::Result;
use errors::ResultExt;
// use ring::signature;
pub type PrivKeyBytes = [u8; 85];
pub fn load_key(bytes: &[u8]) -> Result<ring::signature::Ed25519KeyPair>
{
let key_pair = ring::signature::Ed25519KeyPair::from_pkcs8(untrusted::Input::from(bytes))
.chain_err(|| "Could not load Ed25519 key from PKCS8 bytes")?;
Ok(key_pair)
}
pub fn generate_private_key() -> Result<PrivKeyBytes>
{
let randgen = ring::rand::SystemRandom::new();
let key_bytes = ring::signature::Ed25519KeyPair::generate_pkcs8(&randgen)
.chain_err(|| "Could not generate Ed25519 key!")?;
Ok(key_bytes)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn generated_key_can_be_loaded_as_keypair() {
let privkey = generate_private_key().unwrap();
let _keypair: ring::signature::Ed25519KeyPair = load_key(&privkey).unwrap();
}
#[test]
fn loading_invalid_key_results_error() {
let invalid = [0u8, 0u8];
let result = load_key(&invalid);
assert!(result.is_err());
}
}
|
use super::{Bugsnag, Severity, Error};
use std::panic::PanicInfo;
pub fn handle(api: &Bugsnag,
info: &PanicInfo,
methods_to_ignore: Option<&[&str]>)
-> Result<(), Error> {
let message = if let Some(data) = info.payload().downcast_ref::<String>() {
data.to_owned()
} else if let Some(data) = info.payload().downcast_ref::<&str>() {
(*data).to_owned()
} else {
format!("Error: {:?}", info.payload())
};
api.notify("Panic",
message.as_str(),
Severity::Error,
methods_to_ignore,
None)
}
|
#![no_main]
#![no_std]
extern crate cortex_m_rt;
extern crate panic_halt;
use cortex_m_rt::{entry, pre_init};
#[pre_init]
fn foo() {}
//~^ ERROR `#[pre_init]` function must have signature `unsafe fn()`
#[entry]
fn bar() -> ! {
loop {}
}
|
use bevy::{prelude::*, render::pass::ClearColor};
pub const WINDOW_WIDTH: f32 = 640.0;
pub const WINDOW_HEIGHT: f32 = 360.0;
pub const GRAVITY: f32 = 18.0;
pub const PLAYER_HORIZONTAL_SPEED: f32 = 5.0;
pub const PLAYER_INITIAL_VERTICAL_SPEED: f32 = 7.0;
pub const BG_COLOR: ClearColor = ClearColor(Color::rgb(0.114, 0.129, 0.176));
|
#[doc = "Register `LTDC_L1DCCR` reader"]
pub type R = crate::R<LTDC_L1DCCR_SPEC>;
#[doc = "Register `LTDC_L1DCCR` writer"]
pub type W = crate::W<LTDC_L1DCCR_SPEC>;
#[doc = "Field `DCBLUE` reader - DCBLUE"]
pub type DCBLUE_R = crate::FieldReader;
#[doc = "Field `DCBLUE` writer - DCBLUE"]
pub type DCBLUE_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 8, O>;
#[doc = "Field `DCGREEN` reader - DCGREEN"]
pub type DCGREEN_R = crate::FieldReader;
#[doc = "Field `DCGREEN` writer - DCGREEN"]
pub type DCGREEN_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 8, O>;
#[doc = "Field `DCRED` reader - DCRED"]
pub type DCRED_R = crate::FieldReader;
#[doc = "Field `DCRED` writer - DCRED"]
pub type DCRED_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 8, O>;
#[doc = "Field `DCALPHA` reader - DCALPHA"]
pub type DCALPHA_R = crate::FieldReader;
#[doc = "Field `DCALPHA` writer - DCALPHA"]
pub type DCALPHA_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 8, O>;
impl R {
#[doc = "Bits 0:7 - DCBLUE"]
#[inline(always)]
pub fn dcblue(&self) -> DCBLUE_R {
DCBLUE_R::new((self.bits & 0xff) as u8)
}
#[doc = "Bits 8:15 - DCGREEN"]
#[inline(always)]
pub fn dcgreen(&self) -> DCGREEN_R {
DCGREEN_R::new(((self.bits >> 8) & 0xff) as u8)
}
#[doc = "Bits 16:23 - DCRED"]
#[inline(always)]
pub fn dcred(&self) -> DCRED_R {
DCRED_R::new(((self.bits >> 16) & 0xff) as u8)
}
#[doc = "Bits 24:31 - DCALPHA"]
#[inline(always)]
pub fn dcalpha(&self) -> DCALPHA_R {
DCALPHA_R::new(((self.bits >> 24) & 0xff) as u8)
}
}
impl W {
#[doc = "Bits 0:7 - DCBLUE"]
#[inline(always)]
#[must_use]
pub fn dcblue(&mut self) -> DCBLUE_W<LTDC_L1DCCR_SPEC, 0> {
DCBLUE_W::new(self)
}
#[doc = "Bits 8:15 - DCGREEN"]
#[inline(always)]
#[must_use]
pub fn dcgreen(&mut self) -> DCGREEN_W<LTDC_L1DCCR_SPEC, 8> {
DCGREEN_W::new(self)
}
#[doc = "Bits 16:23 - DCRED"]
#[inline(always)]
#[must_use]
pub fn dcred(&mut self) -> DCRED_W<LTDC_L1DCCR_SPEC, 16> {
DCRED_W::new(self)
}
#[doc = "Bits 24:31 - DCALPHA"]
#[inline(always)]
#[must_use]
pub fn dcalpha(&mut self) -> DCALPHA_W<LTDC_L1DCCR_SPEC, 24> {
DCALPHA_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "This register defines the default color of a layer in the format ARGB. The default color is used outside the defined layer window or when a layer is disabled. The reset value of 0x00000000 defines a transparent black color.\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ltdc_l1dccr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ltdc_l1dccr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct LTDC_L1DCCR_SPEC;
impl crate::RegisterSpec for LTDC_L1DCCR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`ltdc_l1dccr::R`](R) reader structure"]
impl crate::Readable for LTDC_L1DCCR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`ltdc_l1dccr::W`](W) writer structure"]
impl crate::Writable for LTDC_L1DCCR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets LTDC_L1DCCR to value 0"]
impl crate::Resettable for LTDC_L1DCCR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use rocket::State;
use crate::DbConn;
use rusqlite::Error;
#[get("/user/<name>")]
pub fn user(db_conn: State<DbConn>, name: String) -> Result<String, Error> {
db_conn.lock()
.expect("db connection lock")
.query_row("SELECT username FROM User",
&[]: &[&str; 0],
|row| { row.get(0) })
}
|
impl Solution {
pub fn num_ways(steps: i32, arr_len: i32) -> i32 {
let maxjump = arr_len.min(steps);
let (steps,maxjump) = (steps as usize,maxjump as usize);
let mut dp = vec![vec![0;maxjump];steps + 1];
dp[0][0] = 1;
let modi:i32 = 1e9 as i32 + 7;
for i in 1..=steps{
for j in 0..maxjump{
dp[i][j] = dp[i - 1][j];
if j >= 1{
dp[i][j] = (dp[i][j] + dp[i - 1][j - 1]) % modi;
}
if j + 1 < maxjump{
dp[i][j] = (dp[i][j] + dp[i - 1][j + 1]) % modi;
}
}
}
dp[steps][0]
}
} |
use assert_cmd::prelude::*;
use kvs::preclude::*;
use mpsc::SyncSender;
use std::{
io::{prelude::*, BufReader, BufWriter},
process::Command,
process::{Child, ChildStdin, ChildStdout, Stdio},
sync::mpsc,
thread,
time::Duration,
};
use tempfile::TempDir;
use thread::JoinHandle;
use tonic::transport::Channel;
fn open_server(
engine: &str,
addrs: Vec<&str>,
temp_dir: &TempDir,
) -> (SyncSender<()>, JoinHandle<()>) {
let (sender, receiver) = mpsc::sync_channel::<()>(0);
let mut addr = vec![];
for add in addrs {
addr.push("--addr");
addr.push(add);
}
let mut child = Command::cargo_bin("kvs-server")
.unwrap()
.args(&[["--engine", engine, "--server", "raft"].to_vec(), addr].concat())
.env("RUST_LOG", "warn")
.current_dir(temp_dir)
.spawn()
.unwrap();
let handle = thread::spawn(move || {
let _ = receiver.recv(); // wait for main thread to finish
child.kill().expect("server exited before killed");
});
thread::sleep(Duration::from_secs(1));
(sender, handle)
}
struct ClientWrapper {
child: Child,
reader: BufReader<ChildStdout>,
writer: BufWriter<ChildStdin>,
}
impl ClientWrapper {
fn new(addrs: Vec<&str>) -> ClientWrapper {
let mut addr = vec![];
for add in addrs {
addr.push("--addr");
addr.push(add);
}
let mut child = Command::cargo_bin("kvs-client")
.unwrap()
.args(&[["txn"].to_vec(), addr].concat())
.env("RUST_LOG", "warn")
.stdout(Stdio::piped())
.stdin(Stdio::piped())
.stderr(Stdio::null())
.spawn()
.unwrap();
thread::sleep(Duration::from_secs(1));
let stdout = child.stdout.take().expect("Unable get stdout");
let reader = BufReader::new(stdout);
let stdin = child.stdin.take().expect("Unable get stdin");
let writer = BufWriter::new(stdin);
let mut client = ClientWrapper {
child,
reader,
writer,
};
client.begin();
client
}
fn begin(&mut self) {
let buf = format!("begin\n");
self.writer.write(buf.as_bytes()).expect("Writer error");
self.writer.flush().expect("Writer error");
let mut reader_buf = String::new();
self.reader.read_line(&mut reader_buf).unwrap();
assert_eq!("Transaction Started", reader_buf.trim());
}
fn set(&mut self, key: &str, value: &str) {
let buf = format!("set {} {}\n", key, value);
self.writer.write(buf.as_bytes()).expect("Writer error");
self.writer.flush().expect("Writer error");
}
fn get(&mut self, key: &str, expected: &str) {
let buf = format!("get {}\n", key);
self.writer.write(buf.as_bytes()).expect("Writer error");
self.writer.flush().expect("Writer error");
let mut reader_buf = String::new();
self.reader.read_line(&mut reader_buf).unwrap();
// println!("expected: {}, get: {}", expected, reader_buf);
assert!(reader_buf.trim().contains(expected.trim()));
}
fn commit(&mut self, expected: &str) {
let buf = format!("commit\n");
self.writer.write(buf.as_bytes()).expect("Writer error");
self.writer.flush().expect("Writer error");
let mut reader_buf = String::new();
self.reader.read_line(&mut reader_buf).unwrap();
assert!(reader_buf.trim().contains(expected.trim()));
self.exit();
}
fn exit(&mut self) {
let buf = format!("exit\n");
self.writer.write(buf.as_bytes()).expect("Writer error");
self.writer.flush().expect("Writer error");
self.child.wait().expect("command wasn't running");
}
}
#[test]
fn client_cli_txn_invalid() {
let temp_dir = TempDir::new().unwrap();
Command::cargo_bin("kvs-client")
.unwrap()
.args(&["txn", "extra"])
.current_dir(&temp_dir)
.assert()
.failure();
Command::cargo_bin("kvs-client")
.unwrap()
.args(&["txn", "--addr", "invalid-addr"])
.current_dir(&temp_dir)
.assert()
.failure();
Command::cargo_bin("kvs-client")
.unwrap()
.args(&["txn", "--unknown-flag"])
.current_dir(&temp_dir)
.assert()
.failure();
}
#[test]
fn client_cli_txn_single_access() {
let addr = vec!["127.0.0.1:6001", "127.0.0.1:6002", "127.0.0.1:6003"];
for engine in vec!["kvs", "sled"] {
let temp_dir = TempDir::new().unwrap();
let (sender, handle) = open_server(engine, addr.clone(), &temp_dir);
let mut client0 = ClientWrapper::new(addr.clone());
client0.set("key1", "100");
client0.set("key2", "200");
client0.get("key1", "Key not found");
client0.get("key3", "Key not found");
client0.commit("Transaction Success");
sender.send(()).unwrap();
handle.join().unwrap();
}
}
#[test]
fn client_cli_txn_pmp_read_predicates() {
let addr = vec!["127.0.0.1:6011", "127.0.0.1:6012", "127.0.0.1:6013"];
for engine in vec!["kvs", "sled"] {
let temp_dir = TempDir::new().unwrap();
let (sender, handle) = open_server(engine, addr.clone(), &temp_dir);
let mut client0 = ClientWrapper::new(addr.clone());
client0.set("key1", "100");
client0.set("key2", "200");
client0.commit("Transaction Success");
let mut client1 = ClientWrapper::new(addr.clone());
client1.get("key3", "Key not found");
let mut client2 = ClientWrapper::new(addr.clone());
client2.set("key3", "300");
client2.commit("Transaction Success");
client1.get("key3", "Key not found");
sender.send(()).unwrap();
handle.join().unwrap();
}
}
#[test]
fn client_cli_txn_pmp_write_predicates() {
let addr = vec!["127.0.0.1:6021", "127.0.0.1:6022", "127.0.0.1:6023"];
for engine in vec!["kvs", "sled"] {
let temp_dir = TempDir::new().unwrap();
let (sender, handle) = open_server(engine, addr.clone(), &temp_dir);
let mut client0 = ClientWrapper::new(addr.clone());
client0.set("key1", "100");
client0.set("key2", "200");
client0.commit("Transaction Success");
let mut client1 = ClientWrapper::new(addr.clone());
let mut client2 = ClientWrapper::new(addr.clone());
client1.set("key1", "200");
client1.set("key2", "300");
client1.get("key2", "200");
client2.set("key2", "400");
client1.commit("Transaction Success");
client2.commit("Transaction Failed");
sender.send(()).unwrap();
handle.join().unwrap();
}
}
#[test]
fn client_cli_txn_lost_update() {
let addr = vec!["127.0.0.1:6031", "127.0.0.1:6032", "127.0.0.1:6033"];
for engine in vec!["kvs", "sled"] {
let temp_dir = TempDir::new().unwrap();
let (sender, handle) = open_server(engine, addr.clone(), &temp_dir);
let mut client0 = ClientWrapper::new(addr.clone());
client0.set("key1", "100");
client0.set("key2", "200");
client0.commit("Transaction Success");
let mut client1 = ClientWrapper::new(addr.clone());
let mut client2 = ClientWrapper::new(addr.clone());
client1.get("key1", "100");
client2.get("key1", "100");
client1.set("key1", "101");
client2.set("key1", "101");
client1.commit("Transaction Success");
client2.commit("Transaction Failed");
sender.send(()).unwrap();
handle.join().unwrap();
}
}
#[test]
fn client_cli_txn_read_skew_read_only() {
let addr = vec!["127.0.0.1:6041", "127.0.0.1:6042", "127.0.0.1:6043"];
for engine in vec!["kvs", "sled"] {
let temp_dir = TempDir::new().unwrap();
let (sender, handle) = open_server(engine, addr.clone(), &temp_dir);
let mut client0 = ClientWrapper::new(addr.clone());
client0.set("key1", "100");
client0.set("key2", "200");
client0.commit("Transaction Success");
let mut client1 = ClientWrapper::new(addr.clone());
let mut client2 = ClientWrapper::new(addr.clone());
client1.get("key1", "100");
client2.get("key1", "100");
client2.get("key2", "200");
client2.set("key1", "101");
client2.set("key2", "201");
client2.commit("Transaction Success");
client1.get("key2", "200");
sender.send(()).unwrap();
handle.join().unwrap();
}
}
#[test]
fn client_cli_txn_read_skew_predicate_dependencies() {
let addr = vec!["127.0.0.1:6051", "127.0.0.1:6052", "127.0.0.1:6053"];
for engine in vec!["kvs", "sled"] {
let temp_dir = TempDir::new().unwrap();
let (sender, handle) = open_server(engine, addr.clone(), &temp_dir);
let mut client0 = ClientWrapper::new(addr.clone());
client0.set("key1", "100");
client0.set("key2", "200");
client0.commit("Transaction Success");
let mut client1 = ClientWrapper::new(addr.clone());
let mut client2 = ClientWrapper::new(addr.clone());
client1.get("key1", "100");
client1.get("key2", "200");
client2.set("key3", "300");
client2.commit("Transaction Success");
client1.get("key3", "Key not found");
sender.send(()).unwrap();
handle.join().unwrap();
}
}
#[test]
fn client_cli_txn_read_skew_write_predicate() {
let addr = vec!["127.0.0.1:6061", "127.0.0.1:6062", "127.0.0.1:6063"];
for engine in vec!["kvs", "sled"] {
let temp_dir = TempDir::new().unwrap();
let (sender, handle) = open_server(engine, addr.clone(), &temp_dir);
let mut client0 = ClientWrapper::new(addr.clone());
client0.set("key1", "100");
client0.set("key2", "200");
client0.commit("Transaction Success");
let mut client1 = ClientWrapper::new(addr.clone());
let mut client2 = ClientWrapper::new(addr.clone());
client1.get("key1", "100");
client2.get("key1", "100");
client2.get("key2", "200");
client2.set("key1", "101");
client2.set("key2", "201");
client2.commit("Transaction Success");
client1.set("key2", "300");
client1.commit("Transaction Failed");
sender.send(()).unwrap();
handle.join().unwrap();
}
}
#[test]
fn client_cli_txn_write_skew() {
let addr = vec!["127.0.0.1:6071", "127.0.0.1:6072", "127.0.0.1:6073"];
for engine in vec!["kvs", "sled"] {
let temp_dir = TempDir::new().unwrap();
let (sender, handle) = open_server(engine, addr.clone(), &temp_dir);
let mut client0 = ClientWrapper::new(addr.clone());
client0.set("key1", "100");
client0.set("key2", "200");
client0.commit("Transaction Success");
let mut client1 = ClientWrapper::new(addr.clone());
let mut client2 = ClientWrapper::new(addr.clone());
client1.get("key1", "100");
client1.get("key2", "200");
client2.get("key1", "100");
client2.get("key2", "200");
client1.set("key1", "101");
client2.set("key2", "201");
client1.commit("Transaction Success");
client2.commit("Transaction Success");
sender.send(()).unwrap();
handle.join().unwrap();
}
}
#[test]
fn client_cli_txn_anti_dependency_cycles() {
let addr = vec!["127.0.0.1:6081", "127.0.0.1:6082", "127.0.0.1:6083"];
for engine in vec!["kvs", "sled"] {
let temp_dir = TempDir::new().unwrap();
let (sender, handle) = open_server(engine, addr.clone(), &temp_dir);
let mut client0 = ClientWrapper::new(addr.clone());
client0.set("key1", "100");
client0.set("key2", "200");
client0.commit("Transaction Success");
let mut client1 = ClientWrapper::new(addr.clone());
let mut client2 = ClientWrapper::new(addr.clone());
client1.set("key3", "300");
client2.set("key4", "400");
client1.commit("Transaction Success");
client2.commit("Transaction Success");
let mut client3 = ClientWrapper::new(addr.clone());
client3.get("key3", "300");
client3.get("key4", "400");
sender.send(()).unwrap();
handle.join().unwrap();
}
}
struct Proxy {
addr: String,
server_addr: String,
drop_req: bool,
drop_resp: bool,
fail_primary: bool,
}
impl Proxy {
fn build_client(&self) -> KvRpcClient<Channel> {
let server_addr = format!("http://{}", self.server_addr);
let channel = Channel::from_shared(server_addr)
.unwrap()
.connect_lazy()
.unwrap();
KvRpcClient::new(channel)
}
}
#[tonic::async_trait]
impl KvRpc for Proxy {
async fn get_timestamp(
&self,
request: tonic::Request<TsRequest>,
) -> std::result::Result<tonic::Response<TsReply>, tonic::Status> {
self.build_client().get_timestamp(request).await
}
async fn txn_get(
&self,
request: tonic::Request<GetRequest>,
) -> std::result::Result<tonic::Response<GetReply>, tonic::Status> {
self.build_client().txn_get(request).await
}
async fn txn_prewrite(
&self,
request: tonic::Request<PrewriteRequest>,
) -> std::result::Result<tonic::Response<PrewriteReply>, tonic::Status> {
self.build_client().txn_prewrite(request).await
}
async fn txn_commit(
&self,
request: tonic::Request<CommitRequest>,
) -> std::result::Result<tonic::Response<CommitReply>, tonic::Status> {
let request = request.into_inner();
let req = if self.drop_req {
if request.is_primary && !self.fail_primary {
self.build_client()
.txn_commit(tonic::Request::new(request))
.await
} else {
Err(tonic::Status::data_loss("Drop request"))
}
} else {
self.build_client()
.txn_commit(tonic::Request::new(request))
.await
};
if self.drop_resp {
Err(tonic::Status::data_loss("Drop response"))
} else {
req
}
}
}
struct MultiProxy {
proxys: Vec<Proxy>,
}
impl MultiProxy {
fn new(
addr: Vec<&str>,
server_addr: Vec<&str>,
drop_req: bool,
drop_resp: bool,
fail_primary: bool,
) -> Self {
let proxys = addr
.into_iter()
.zip(server_addr.into_iter())
.map(|(a, s)| (a.to_owned(), s.to_owned()))
.map(|(addr, server_addr)| Proxy {
addr,
server_addr,
drop_req,
drop_resp,
fail_primary,
})
.collect();
Self { proxys }
}
}
fn proxy_hook(proxys: MultiProxy) -> Vec<JoinHandle<()>> {
proxys
.proxys
.into_iter()
.map(|proxy| {
let threaded_rt = tokio::runtime::Builder::new_multi_thread()
.enable_all()
.build()
.unwrap();
std::thread::spawn(move || {
threaded_rt
.block_on(async move {
let addr = proxy.addr.clone().parse().unwrap();
tonic::transport::Server::builder()
.add_service(KvRpcServer::new(proxy))
.serve(addr)
.await
.map_err(|e| KvError::StringError(e.to_string()))
})
.unwrap();
})
})
.collect()
}
#[test]
fn client_cli_txn_test_proxy_with_nothing_drop() {
let server_addr = vec!["127.0.0.1:6091", "127.0.0.1:6092", "127.0.0.1:6093"];
let addr = vec!["127.0.0.1:6101", "127.0.0.1:6102", "127.0.0.1:6103"];
let proxy = MultiProxy::new(addr.clone(), server_addr.clone(), false, false, false);
let _proxy_handle = proxy_hook(proxy);
thread::sleep(Duration::from_secs(1));
for engine in vec!["kvs", "sled"] {
let temp_dir = TempDir::new().unwrap();
let (sender, handle) = open_server(engine, server_addr.clone(), &temp_dir);
let mut client0 = ClientWrapper::new(addr.clone());
client0.set("key1", "100");
client0.set("key2", "200");
client0.commit("Transaction Success");
let mut client1 = ClientWrapper::new(addr.clone());
let mut client2 = ClientWrapper::new(addr.clone());
client1.get("key1", "100");
client1.get("key2", "200");
client2.get("key1", "100");
client2.get("key2", "200");
client1.set("key1", "101");
client2.set("key2", "201");
client1.commit("Transaction Success");
client2.commit("Transaction Success");
sender.send(()).unwrap();
handle.join().unwrap();
}
}
#[test]
fn client_cli_txn_commit_primary_drop_secondary_requests() {
let server_addr = vec!["127.0.0.1:6111", "127.0.0.1:6112", "127.0.0.1:6113"];
let addr = vec!["127.0.0.1:6121", "127.0.0.1:6122", "127.0.0.1:6123"];
let proxy = MultiProxy::new(addr.clone(), server_addr.clone(), true, false, false);
let _proxy_handle = proxy_hook(proxy);
thread::sleep(Duration::from_secs(1));
for engine in vec!["kvs", "sled"] {
let temp_dir = TempDir::new().unwrap();
let (sender, handle) = open_server(engine, server_addr.clone(), &temp_dir);
let mut client0 = ClientWrapper::new(addr.clone());
client0.set("key1", "100");
client0.set("key2", "200");
client0.set("key3", "300");
client0.commit("Transaction Success");
let mut client1 = ClientWrapper::new(addr.clone());
client1.get("key1", "100");
client1.get("key2", "200");
client1.get("key3", "300");
sender.send(()).unwrap();
handle.join().unwrap();
}
}
#[test]
fn client_cli_txn_commit_primary_success() {
let server_addr = vec!["127.0.0.1:6131", "127.0.0.1:6132", "127.0.0.1:6133"];
let addr = vec!["127.0.0.1:6141", "127.0.0.1:6142", "127.0.0.1:6143"];
let proxy = MultiProxy::new(addr.clone(), server_addr.clone(), true, false, false);
let _proxy_handle = proxy_hook(proxy);
thread::sleep(Duration::from_secs(1));
for engine in vec!["kvs", "sled"] {
let temp_dir = TempDir::new().unwrap();
let (sender, handle) = open_server(engine, server_addr.clone(), &temp_dir);
let mut client0 = ClientWrapper::new(addr.clone());
client0.set("key1", "100");
client0.set("key2", "200");
client0.set("key3", "300");
client0.commit("Transaction Success");
let mut client1 = ClientWrapper::new(addr.clone());
client1.get("key1", "100");
client1.get("key2", "200");
client1.get("key3", "300");
sender.send(()).unwrap();
handle.join().unwrap();
}
}
#[test]
fn client_cli_txn_commit_primary_success_without_response() {
let server_addr = vec!["127.0.0.1:6151", "127.0.0.1:6152", "127.0.0.1:6153"];
let addr = vec!["127.0.0.1:6161", "127.0.0.1:6162", "127.0.0.1:6163"];
let proxy = MultiProxy::new(addr.clone(), server_addr.clone(), false, true, false);
let _proxy_handle = proxy_hook(proxy);
thread::sleep(Duration::from_secs(1));
for engine in vec!["kvs", "sled"] {
let temp_dir = TempDir::new().unwrap();
let (sender, handle) = open_server(engine, server_addr.clone(), &temp_dir);
let mut client0 = ClientWrapper::new(addr.clone());
client0.set("key1", "100");
client0.set("key2", "200");
client0.set("key3", "300");
client0.commit("Transaction Failed");
let mut client1 = ClientWrapper::new(addr.clone());
client1.get("key1", "100");
client1.get("key2", "200");
client1.get("key3", "300");
sender.send(()).unwrap();
handle.join().unwrap();
}
}
#[test]
fn client_cli_txn_commit_primary_fail() {
let server_addr = vec!["127.0.0.1:6171", "127.0.0.1:6172", "127.0.0.1:6173"];
let addr = vec!["127.0.0.1:6181", "127.0.0.1:6182", "127.0.0.1:6183"];
let proxy = MultiProxy::new(addr.clone(), server_addr.clone(), true, false, true);
let _proxy_handle = proxy_hook(proxy);
thread::sleep(Duration::from_secs(1));
for engine in vec!["kvs", "sled"] {
let temp_dir = TempDir::new().unwrap();
let (sender, handle) = open_server(engine, server_addr.clone(), &temp_dir);
let mut client0 = ClientWrapper::new(addr.clone());
client0.set("key1", "100");
client0.set("key2", "200");
client0.set("key3", "300");
client0.commit("Transaction Failed");
let mut client1 = ClientWrapper::new(addr.clone());
client1.get("key1", "Key not found");
client1.get("key2", "Key not found");
client1.get("key3", "Key not found");
sender.send(()).unwrap();
handle.join().unwrap();
}
}
#[test]
fn client_cli_txn_server_crash() {
let server_addr = vec!["127.0.0.1:6191", "127.0.0.1:6192", "127.0.0.1:6193"];
let addr = vec!["127.0.0.1:6201", "127.0.0.1:6202", "127.0.0.1:6203"];
let proxy = MultiProxy::new(addr.clone(), server_addr.clone(), true, false, true);
let _proxy_handle = proxy_hook(proxy);
thread::sleep(Duration::from_secs(1));
for engine in vec!["kvs", "sled"] {
let temp_dir = TempDir::new().unwrap();
let (sender, handle) = open_server(engine, server_addr.clone(), &temp_dir);
let mut client0 = ClientWrapper::new(addr.clone());
client0.set("key1", "100");
client0.set("key2", "200");
client0.set("key3", "300");
client0.commit("Transaction Failed");
sender.send(()).unwrap();
handle.join().unwrap();
let (sender, handle) = open_server(engine, server_addr.clone(), &temp_dir);
let mut client1 = ClientWrapper::new(addr.clone());
client1.get("key1", "Key not found");
client1.get("key2", "Key not found");
client1.get("key3", "Key not found");
sender.send(()).unwrap();
handle.join().unwrap();
}
}
|
// Copyright 2019 CoreOS, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use error_chain::bail;
use nix::errno::Errno;
use nix::{self, ioctl_none, ioctl_write_ptr_bad, mount, request_code_none};
use serde::Deserialize;
use std::fs::{remove_dir, File};
use std::io::{Seek, SeekFrom};
use std::os::unix::io::AsRawFd;
use std::path::{Path, PathBuf};
use std::process::Command;
use std::thread::sleep;
use std::time::Duration;
use tempdir::TempDir;
use crate::errors::*;
pub fn mount_boot(device: &str) -> Result<Mount> {
let dev = get_partition_with_label(device, "boot")?
.chain_err(|| format!("couldn't find boot device for {}", device))?;
match dev.fstype {
Some(fstype) => Mount::try_mount(&dev.path, &fstype),
None => Err(format!("couldn't get filesystem type of boot device for {}", device).into()),
}
}
#[derive(Deserialize)]
struct LsBlk {
blockdevices: Vec<BlkDev>,
}
#[derive(Deserialize)]
struct BlkDev {
path: String,
label: Option<String>,
fstype: Option<String>,
}
fn get_partition_with_label(device: &str, label: &str) -> Result<Option<BlkDev>> {
let result = Command::new("lsblk")
.arg("--json")
.arg("--output")
.arg("PATH,LABEL,FSTYPE")
.arg(device)
.output()
.chain_err(|| "running lsblk")?;
if !result.status.success() {
// copy out its stderr
eprint!("{}", String::from_utf8_lossy(&*result.stderr));
bail!("lsblk of {} failed", device);
}
let output: LsBlk =
serde_json::from_slice(&*result.stdout).chain_err(|| "decoding lsblk JSON")?;
let mut found: Option<BlkDev> = None;
for dev in output.blockdevices {
if dev.label.is_none() || dev.label.as_ref().unwrap() != label {
continue;
}
if found.is_some() {
bail!("found multiple devices on {} with label: {}", device, label);
}
found = Some(dev);
}
Ok(found)
}
#[derive(Debug)]
pub struct Mount {
device: String,
mountpoint: PathBuf,
}
impl Mount {
fn try_mount(device: &str, fstype: &str) -> Result<Mount> {
let tempdir =
TempDir::new("coreos-installer").chain_err(|| "creating temporary directory")?;
// avoid auto-cleanup of tempdir, which could recursively remove
// the partition contents if umount failed
let mountpoint = tempdir.into_path();
mount::mount::<str, Path, str, str>(
Some(device),
&mountpoint,
Some(fstype),
mount::MsFlags::empty(),
None,
)
.chain_err(|| format!("mounting device {} on {}", device, mountpoint.display()))?;
Ok(Mount {
device: device.to_string(),
mountpoint,
})
}
pub fn mountpoint(&self) -> &Path {
self.mountpoint.as_path()
}
}
impl Drop for Mount {
fn drop(&mut self) {
// Unmount sometimes fails immediately after closing the last open
// file on the partition. Retry several times before giving up.
for retries in (0..20).rev() {
match mount::umount(&self.mountpoint) {
Ok(_) => break,
Err(err) => {
if retries == 0 {
eprintln!("umounting {}: {}", self.device, err);
return;
} else {
sleep(Duration::from_millis(100));
}
}
}
}
if let Err(err) = remove_dir(&self.mountpoint) {
eprintln!("removing {}: {}", self.mountpoint.display(), err);
return;
}
}
}
pub fn reread_partition_table(file: &mut File) -> Result<()> {
let fd = file.as_raw_fd();
// Reread sometimes fails inexplicably. Retry several times before
// giving up.
for retries in (0..20).rev() {
let result = unsafe { blkrrpart(fd) };
if result.is_ok() {
break;
} else if retries == 0 {
return result.and(Ok(())).chain_err(|| "rereading partition table");
} else {
sleep(Duration::from_millis(100));
}
}
Ok(())
}
/// Try discarding all blocks from the underlying block device.
/// Return true if successful, false if the underlying device doesn't
/// support discard, or an error otherwise.
pub fn try_discard_all(file: &mut File) -> Result<bool> {
// get device size
let length = file
.seek(SeekFrom::End(0))
.chain_err(|| "seeking device file")?;
file.seek(SeekFrom::Start(0))
.chain_err(|| "seeking device file")?;
// discard
let fd = file.as_raw_fd();
let range: [u64; 2] = [0, length];
match unsafe { blkdiscard(fd, &range) } {
Ok(_) => Ok(true),
Err(e) => {
if e == nix::Error::from_errno(Errno::EOPNOTSUPP) {
Ok(false)
} else {
Err(Error::with_chain(e, "discarding device contents"))
}
}
}
}
// create unsafe ioctl wrappers
ioctl_none!(blkrrpart, 0x12, 95);
ioctl_write_ptr_bad!(blkdiscard, request_code_none!(0x12, 119), [u64; 2]);
pub fn udev_settle() -> Result<()> {
// "udevadm settle" silently no-ops if the udev socket is missing, and
// then lsblk can't find partition labels. Catch this early.
if !Path::new("/run/udev/control").exists() {
return Err(
"udevd socket missing; are we running in a container without /run/udev mounted?".into(),
);
}
// There's a potential window after rereading the partition table where
// udevd hasn't yet received updates from the kernel, settle will return
// immediately, and lsblk won't pick up partition labels. Try to sleep
// our way out of this.
sleep(Duration::from_millis(200));
let status = Command::new("udevadm")
.arg("settle")
.status()
.chain_err(|| "running udevadm settle")?;
if !status.success() {
bail!("udevadm settle failed");
}
Ok(())
}
|
use std::fmt::Debug;
use range_minimum_query::RangeMinimumQuery;
#[macro_use]
extern crate serde;
extern crate num;
pub mod bitvector;
pub mod range_minimum_query;
pub mod suffix_array;
pub mod suffix_tree;
#[cfg(test)]
mod tests {
#[test]
fn it_works() {
assert_eq!(2 + 2, 4);
}
}
/// Struct to carry out longest common prefix between two prefix of two strings.
/// Specifically, for two input string `xs` and `ys`,
/// This data structure answers the following query in O(1) time.
/// - Input: `i` (an index of `xs`) and `j` (an index of `ys`).
/// - Output: `LCP(&xs[i..], &ys[j..])`
#[derive(Debug, Clone)]
pub struct LongestCommonPrefix {
range_minimum_query: range_minimum_query::LinearRangeMinimumQuery<usize>,
inverse_suffix_array: Vec<usize>,
lcp_array: Vec<usize>,
xslen: usize,
}
impl LongestCommonPrefix {
pub fn new<T: Clone + Eq + Ord + Debug>(xs: &[T], ys: &[T], alphabet: &[T]) -> Self {
let xslen = xs.len();
// First, concatenate them.
let alphabet: Vec<(_, u8)> = alphabet
.iter()
.enumerate()
.map(|(i, t)| (t, i as u8 + 1))
.collect();
let to_lex = |c: &T| alphabet.iter().find(|r| r.0 == c).unwrap().1;
let xs = xs.iter().map(to_lex);
let ys = ys.iter().map(to_lex);
let input: Vec<_> = xs.chain(std::iter::once(0)).chain(ys).collect();
let alphabet: Vec<_> = (0..alphabet.len() as u8 + 1).collect();
let sa = suffix_array::SuffixArray::new(&input, &alphabet);
let inverse_sa = sa.inverse();
let lcp_array = suffix_array::longest_common_prefix(&input, &sa, &inverse_sa);
let rmq = range_minimum_query::LinearRangeMinimumQuery::new(&lcp_array);
Self {
xslen,
range_minimum_query: rmq,
inverse_suffix_array: inverse_sa,
lcp_array,
}
}
pub fn lcp(&self, xs_start: usize, ys_start: usize) -> usize {
let x_suffix_rank = self.inverse_suffix_array[xs_start];
let y_suffix_rank = self.inverse_suffix_array[self.xslen + 1 + ys_start];
let (start, end) = match x_suffix_rank < y_suffix_rank {
true => (x_suffix_rank, y_suffix_rank),
false => (y_suffix_rank, x_suffix_rank),
};
let min_exact_match_idx = self.range_minimum_query.min(start + 1, end + 1);
self.lcp_array[min_exact_match_idx]
}
}
#[cfg(test)]
mod test {
use super::*;
use rand::seq::SliceRandom;
use rand::SeedableRng;
use rand_xoshiro::Xoroshiro128PlusPlus;
#[test]
fn test_lcp() {
let alphabet = b"ACGT";
let xs = b"ACGTTTTT";
let ys = b"ACGTCTCC";
let lcp = LongestCommonPrefix::new(xs, ys, alphabet);
for i in 0..4 {
assert_eq!(lcp.lcp(i, i), 4 - i);
}
let len = 4;
for i in 0..len {
for j in 0..len {
let match_len = lcp.lcp(i, j);
let answer = std::iter::zip(&xs[i..], &ys[j..])
.take_while(|(x, y)| x == y)
.count();
assert_eq!(match_len, answer, "{},{}", i, j);
}
}
}
#[test]
fn test_lcp_random() {
let len = 150;
let alphabet_size = 10;
let alphabet: Vec<_> = (0..alphabet_size).collect();
// let alphabet = b"ACGT";
let mut rng: Xoroshiro128PlusPlus = SeedableRng::seed_from_u64(348203);
let xs: Vec<_> = (0..len)
.filter_map(|_| alphabet.choose(&mut rng))
.copied()
.collect();
let ys: Vec<_> = (0..len)
.filter_map(|_| alphabet.choose(&mut rng))
.copied()
.collect();
let lcp = LongestCommonPrefix::new(&xs, &ys, &alphabet);
for i in 0..len {
for j in 0..len {
let match_len = lcp.lcp(i, j);
let answer = std::iter::zip(&xs[i..], &ys[j..])
.take_while(|(x, y)| x == y)
.count();
assert_eq!(match_len, answer, "{},{}", i, j);
}
}
}
}
|
fn main() {
let x = |_, _| 0;
let _ = x(0, 0);
}
/*
thread 'rustc' panicked at 'called `Option::unwrap()` on a `None` value', prusti-viper/src/encoder/procedure_encoder.rs:2582:57
stack backtrace:
0: rust_begin_unwind
at /rustc/8007b506ac5da629f223b755f5a5391edd5f6d01/library/std/src/panicking.rs:517:5
1: core::panicking::panic_fmt
at /rustc/8007b506ac5da629f223b755f5a5391edd5f6d01/library/core/src/panicking.rs:93:14
2: core::panicking::panic
at /rustc/8007b506ac5da629f223b755f5a5391edd5f6d01/library/core/src/panicking.rs:50:5
3: prusti_viper::encoder::procedure_encoder::ProcedureEncoder::encode_impure_function_call
4: prusti_viper::encoder::procedure_encoder::ProcedureEncoder::encode_terminator
5: prusti_viper::encoder::procedure_encoder::ProcedureEncoder::encode_statement_at
6: prusti_viper::encoder::procedure_encoder::ProcedureEncoder::encode_blocks_group
7: prusti_viper::encoder::procedure_encoder::ProcedureEncoder::encode
8: prusti_viper::encoder::encoder::Encoder::encode_procedure
9: prusti_viper::encoder::encoder::Encoder::process_encoding_queue
10: prusti_viper::verifier::Verifier::verify
11: prusti_driver::verifier::verify
12: <prusti_driver::callbacks::PrustiCompilerCalls as rustc_driver::Callbacks>::after_analysis
13: rustc_interface::queries::<impl rustc_interface::interface::Compiler>::enter
14: rustc_span::with_source_map
15: rustc_interface::interface::create_compiler_and_run
16: scoped_tls::ScopedKey<T>::set
*/
|
use std::marker::PhantomData;
use std::mem::{align_of, forget, size_of};
use std::ptr;
use raw::{Boxed, CallRawOnce, FnBox, Static, Trait};
use {Array, StaticFn};
pub struct FnStackOnce<'a, A, O, D = [u8; 16]>
where
D: Array,
{
data: D,
drop: fn(*const u8),
ptr: fn(*const u8, A) -> O,
marker: PhantomData<FnOnce(A) -> O + 'a>,
}
impl<'a, A, O, D> FnStackOnce<'a, A, O, D>
where
D: Array,
{
#[inline]
pub fn new<F: 'a>(f: F) -> Self
where
F: FnOnce(A) -> O + 'a,
{
if size_of::<F>() < D::size() && align_of::<F>() <= D::align() {
FnStackOnce::from_raw(Trait(f))
} else {
FnStackOnce::from_raw(Boxed(Box::new(f)))
}
}
#[inline]
pub fn from_static<F>() -> Self
where
F: StaticFn<A, O>,
{
FnStackOnce::from_raw(Static(PhantomData::<F>))
}
#[inline]
fn from_raw<R>(raw: R) -> Self
where
R: CallRawOnce<A, O>,
{
assert!(size_of::<R>() <= D::size(), align_of::<R>() <= D::align());
unsafe {
let mut data = D::uninitialized();
ptr::write(&mut data as *mut D as *mut R, raw);
FnStackOnce {
data,
drop: R::drop_raw,
ptr: R::call_raw_once,
marker: PhantomData,
}
}
}
#[inline]
pub fn call(self, args: A) -> O {
let res = (self.ptr)(self.data.as_ptr(), args);
forget(self);
res
}
}
impl<'a, A, O, D> Drop for FnStackOnce<'a, A, O, D>
where
D: Array,
{
fn drop(&mut self) {
(self.drop)(self.data.as_ptr())
}
}
impl<'a, A, O, D, F> From<Box<F>> for FnStackOnce<'a, A, O, D>
where
D: Array,
F: FnBox<A, O> + ?Sized + 'a,
{
#[inline]
fn from(f: Box<F>) -> Self {
FnStackOnce::from_raw(Boxed(f))
}
}
#[cfg(test)]
mod tests {
use super::FnStackOnce;
#[test]
fn test_drop() {
let mut num_drops = 0;
struct Droppable<'a>(&'a mut i32);
impl<'a> Drop for Droppable<'a> {
fn drop(&mut self) {
*self.0 += 1;
}
}
{
let droppable = Droppable(&mut num_drops);
let _closure: FnStackOnce<(), ()> = FnStackOnce::new(move |()| {
let _d = droppable;
});
}
assert_eq!(num_drops, 1);
struct ExceedsLimit<'a>(Droppable<'a>, [u8; 128]);
{
let obj = ExceedsLimit(Droppable(&mut num_drops), [0; 128]);
let _closure: FnStackOnce<(), (), [u8; 16]> = FnStackOnce::new(move |()| {
let _o = obj;
});
}
assert_eq!(num_drops, 2);
}
#[test]
fn variance_check() {
fn takes_fn<'a>(f: FnStackOnce<'a, (), ()>) {
f.call(());
}
const X: usize = 5;
let x = &X;
let f: FnStackOnce<'static, _, _> = FnStackOnce::new(move |()| {
let _y = x;
});
takes_fn(f);
}
}
|
// Camel is expecte to have not ended the race.
// Count is expected to be between 1 and 3. Both inclusives.
// Si Count + position es mayor que el final, reducir al final (box 16)
pub fn move_camel(camel_id: u8, count: u8, circuit: &mut Vec<Vec<u8>>) {
let mut box_index: usize = 0;
let mut camels_index: usize = 0;
'outer: for (i, circuit_box) in circuit.iter().enumerate() {
for (j, c_id) in circuit_box.iter().enumerate() {
if c_id == &camel_id {
box_index = i;
camels_index = j;
break 'outer;
}
}
}
let mut a: Vec<u8> = vec![];
for i in &mut circuit[box_index][camels_index..] {
a.push(*i);
}
let mut last_position = box_index + count as usize;
if last_position >= 16 {
last_position = 16;
}
circuit[last_position].extend(a);
circuit[box_index].drain(camels_index..);
}
pub fn camel_won(circuit: &Vec<Vec<u8>>) -> bool {
match circuit.last() {
Some(v) => v.len() > 0,
None => false,
}
}
|
use mold::core::eval;
use mold::core::eval::Environment;
use mold::core::parse;
use mold::core::rust;
use mold::stdlib;
use std::fmt;
use std::fs;
extern crate dylib;
extern crate term;
// TODO proper error handling for user input
fn read_script_from_file(file_path: &str) -> String {
fs::read_to_string(file_path).unwrap()
}
fn get_script() -> String {
let first_argument = std::env::args().nth(1).unwrap();
match first_argument.as_str() {
"-e" => String::from(std::env::args().nth(2).unwrap()),
_ => read_script_from_file(first_argument.as_str()),
}
}
fn print_error<T: fmt::Display>(error: T) {
let mut terminal = term::stdout().unwrap();
terminal.fg(term::color::RED).unwrap();
terminal.attr(term::Attr::Bold).unwrap();
println!("{}", error);
let _ = terminal.reset();
}
fn insert_stdlib(environment: &mut Environment) {
// Obviously this needs to be magicalized
environment.rust_functions.insert(
parse::ast::Identifier::from("print"),
mold::RustFunction {
args: vec![parse::ast::Identifier::from("arg1")],
native_function: rust::NativeFunction::Static1(rust::StaticNativeFunction1 {
function: stdlib::io::print,
}),
},
);
environment.rust_functions.insert(
parse::ast::Identifier::from("read_to_string"),
mold::RustFunction {
args: vec![parse::ast::Identifier::from("file_path")],
native_function: rust::NativeFunction::Static1(rust::StaticNativeFunction1 {
function: stdlib::io::read_to_string,
}),
},
);
environment.rust_functions.insert(
parse::ast::Identifier::from("write"),
mold::RustFunction {
args: vec![
parse::ast::Identifier::from("file_path"),
parse::ast::Identifier::from("content"),
],
native_function: rust::NativeFunction::Static2(rust::StaticNativeFunction2 {
function: stdlib::io::write,
}),
},
);
}
fn main() {
let script = get_script();
let expr_result = parse::parse(&script);
if let Err(e) = &expr_result {
return print_error(e);
}
let expr = &expr_result.unwrap();
println!("{:?}", &expr);
let mut environment = Environment::new();
insert_stdlib(&mut environment);
let eval_result = eval::eval(&expr, &mut environment);
match eval_result {
Ok(value) => {
println!("{}", &value);
}
Err(e) => return print_error(e),
};
}
|
use crate::bit_set;
/// `Count` the number of bits in the container.
/// `Count` is a special case of `Rank`.
///
/// `count1` and `count0` have default implementation, but these are cycled.
/// So either `count1` or `count0` need to be redefined.
pub trait Count: bit_set::ops::Access {
/// Return the number of non-zero bits.
fn count1(&self) -> u64 {
self.size() - self.count0()
}
/// Return the number of zero bits.
fn count0(&self) -> u64 {
self.size() - self.count1()
}
}
macro_rules! impl_Count_for_words {
($($ty:ty),*) => ($(
impl Count for $ty {
#[inline]
fn count1(&self) -> u64 {
self.count_ones() as u64
}
#[inline]
fn count0(&self) -> u64 {
self.count_zeros() as u64
}
}
)*)
}
impl_Count_for_words!(u8, u16, u32, u64, u128, usize);
impl<T: bit_set::ops::Capacity + Count> Count for [T] {
/// Count the number of non-zero bits.
fn count1(&self) -> u64 {
self.iter().map(|w| w.count1()).sum()
}
}
|
//! Performs constant folding.
//!
//! # Optimizations
//!
//! This pass calculates constant instructions whenever possible.
use azuki_tac::{
builder::FuncEditor, optimizer::FunctionOptimizer, BinaryInst, BinaryOp, Inst, InstId,
InstKind, TacFunc, Value,
};
use smallvec::SmallVec;
use tracing::{debug, debug_span, trace};
pub struct ConstFolding;
impl ConstFolding {
pub fn new() -> Self {
Self
}
}
impl Default for ConstFolding {
fn default() -> Self {
Self::new()
}
}
impl FunctionOptimizer for ConstFolding {
fn name(&self) -> std::borrow::Cow<str> {
"const-folding".into()
}
fn edits_program(&self) -> bool {
true
}
fn optimize_func(
&mut self,
_env: &mut azuki_tac::optimizer::OptimizeEnvironment,
func: &mut azuki_tac::TacFunc,
) {
let _span = debug_span!("const_folding", %func.name).entered();
if func.first_block.is_none() {
debug!("Empty function");
return;
}
debug!("Parsing function");
// In most cases, applying constant folding for one time is enough.
let mut cursor = FuncEditor::new(func);
cursor.set_current_bb(cursor.func.first_block.unwrap());
// yeah i know, do-while pattern
while {
while cursor.move_forward() {
let inst = cursor.current_inst().unwrap();
let replaced = match &inst.kind {
InstKind::Binary(b) => {
// First, we try to simplify the instruction itself
match eval_binary_inst(b, &cursor.func).map(InstKind::Assign) {
Some(x) => Some(x),
None => {
// If there's no luck, we try to simplify
let ty = inst.ty.clone();
let v = eval_binary_deep(b, cursor.func);
match v {
Some((mut i, n)) => {
if let Some(n) = n {
let idx = cursor
.insert_before_current_place(Inst { kind: n, ty });
cursor.move_forward();
i.replace_dest(InstId::from_bits(u64::MAX), idx);
}
Some(i)
}
None => None,
}
}
}
}
InstKind::Assign(t) => Some(InstKind::Assign(eval_val(*t, &cursor.func))),
_ => None,
};
if let Some(r) = replaced {
trace!(
"replaced %{} with {:?}",
cursor.current_idx().unwrap().slot(),
r
);
cursor.current_inst_mut().unwrap().kind = r;
}
}
let next = cursor.current_bb().next;
match next {
Some(next) => {
cursor.set_current_bb(next);
true
}
None => false,
}
} {}
}
}
/// Evaluates a binary instruction to a simple value, if possible. Returns
/// `Some(Value)` if this instruction can be reduced into an assignment, else
/// return `None`.
fn eval_binary_inst(binary: &BinaryInst, f: &TacFunc) -> Option<Value> {
use BinaryOp::*;
use Value::*;
let lhs = eval_val(binary.lhs, f);
let rhs = eval_val(binary.rhs, f);
match (binary.op, lhs, rhs) {
// Constant op
(op, Imm(lhs), Imm(rhs)) => eval_binary(op, lhs, rhs).map(Imm),
// No-op
(Add, Imm(0), v)
| (Add, v, Imm(0))
| (Sub, v, Imm(0))
| (Mul, Imm(1), v)
| (Mul, v, Imm(1))
| (Div, v, Imm(1)) => Some(v),
// Zero-op
(Mul, Imm(0), _) | (Mul, _, Imm(0)) | (Div, Imm(0), _) => Some(Imm(0)),
(Sub, Dest(a), Dest(b)) if a == b => Some(Imm(0)),
// Divide by 0
(Div, _, Imm(0)) => None,
// Constant division (because div-0 is UB)
(Div, Dest(a), Dest(b)) if a == b => Some(Imm(1)),
// Others
_ => None,
}
}
/// Evaluate binary instructions that may reduce into simpler forms.
///
/// # Returns
///
/// - `None` if no possible reduction can be found.
/// - `Some(inst1, None)` if the operation can be reduced to one instruction.
/// - `Some(inst1, Some(inst2))` if this operation can be reduced to two instructions.
///
/// An `InstId::from_bits(u64::max_value())` refers to the id of `inst2`, if exists.
///
/// # Example
///
/// ```plaintext
/// (%3 i32 add %1 1)
/// (%4 i32 add %2 2)
/// (%5 i32 add %3 %4)
/// ```
///
/// Can be reduced into
///
/// ```plaintext
/// (%6 i32 add %1 %2)
/// (%5 i32 add %6 3)
/// ```
fn eval_binary_deep(binary: &BinaryInst, f: &TacFunc) -> Option<(InstKind, Option<InstKind>)> {
if !is_additive(binary.op) {
return None;
}
// Check if there's any possible combination that can be optimized.
// Gets all operands of this operation.
let mut operands = SmallVec::<[_; 4]>::new();
match binary.lhs {
Value::Dest(i) => match &f.inst_get(i).kind {
InstKind::Binary(b) if is_additive(b.op) => {
operands.push((false, b.lhs));
operands.push((b.op == BinaryOp::Sub, b.rhs));
}
_ => operands.push((false, Value::Dest(i))),
},
i @ Value::Imm(_) => operands.push((false, i)),
};
let is_sub = binary.op == BinaryOp::Sub;
match binary.rhs {
Value::Dest(i) => match &f.inst_get(i).kind {
InstKind::Binary(b) if is_additive(b.op) => {
operands.push((is_sub, b.lhs));
operands.push((is_sub ^ (b.op == BinaryOp::Sub), b.rhs));
}
_ => operands.push((is_sub, Value::Dest(i))),
},
i @ Value::Imm(_) => operands.push((is_sub, i)),
};
if operands.iter().all(|x| matches!(x, &(_, Value::Dest(_)))) {
// No way to reduce operands
return None;
}
let constant = operands
.iter()
.filter_map(|(is_neg, v)| v.get_imm().map(|x| (*is_neg, x)))
.fold(0i64, |acc, (is_neg, v)| {
if is_neg {
acc.wrapping_sub(v)
} else {
acc.wrapping_add(v)
}
});
let mut variables = operands
.iter()
.filter_map(|(is_neg, v)| v.get_inst().map(|x| (*is_neg, x)))
.collect::<SmallVec<[_; 4]>>();
if variables.len() == 4 {
// "a + b + c + d" type, No way to reduce.
None
} else if variables.len() == 3 {
// "a + b + c + constant" type. if constant == 0 and a positive term
// exists, we can reduce that to "pos +/- (b + c)"
let first_positive_term = variables
.iter()
.enumerate()
.find_map(|x| (!x.1 .0).then(|| x.0));
if constant == 0 && first_positive_term.is_some() {
let (_, pos_term) = variables.remove(first_positive_term.unwrap());
let op = ((variables[0].0) ^ (variables[1].0))
.then(|| BinaryOp::Sub)
.unwrap_or(BinaryOp::Add);
let second_inst = InstKind::Binary(BinaryInst {
op,
lhs: variables[0].1.into(),
rhs: variables[1].1.into(),
});
let op = variables[0]
.0
.then(|| BinaryOp::Add)
.unwrap_or(BinaryOp::Sub);
let first_inst = InstKind::Binary(BinaryInst {
op,
lhs: Value::Dest(pos_term),
rhs: Value::Dest(InstId::from_bits(u64::max_value())),
});
Some((first_inst, Some(second_inst)))
} else {
None
}
} else if variables.len() == 2 {
// "(a + b) + constant" type
if constant == 0 && variables.iter().any(|x| !x.0) {
let op = ((variables[0].0) ^ (variables[1].0))
.then(|| BinaryOp::Sub)
.unwrap_or(BinaryOp::Add);
let inst = InstKind::Binary(BinaryInst {
op,
lhs: variables[0].1.into(),
rhs: variables[1].1.into(),
});
Some((inst, None))
} else {
let op = ((variables[0].0) ^ (variables[1].0))
.then(|| BinaryOp::Sub)
.unwrap_or(BinaryOp::Add);
if !variables[0].0 {
variables.swap(0, 1);
}
let second_inst = InstKind::Binary(BinaryInst {
op,
lhs: variables[0].1.into(),
rhs: variables[1].1.into(),
});
let op = variables[0]
.0
.then(|| BinaryOp::Sub)
.unwrap_or(BinaryOp::Add);
let first_inst = InstKind::Binary(BinaryInst {
op,
lhs: Value::Imm(constant),
rhs: Value::Dest(InstId::from_bits(u64::max_value())),
});
Some((first_inst, Some(second_inst)))
}
} else if variables.len() == 1 {
// "a + constant" type
if constant == 0 && variables[0].0 {
Some((InstKind::Assign(Value::Dest(variables[0].1)), None))
} else {
let op = variables[0]
.0
.then(|| BinaryOp::Sub)
.unwrap_or(BinaryOp::Add);
let first_inst = InstKind::Binary(BinaryInst {
op,
lhs: Value::Imm(constant),
rhs: variables[0].1.into(),
});
Some((first_inst, None))
}
} else if variables.is_empty() {
Some((InstKind::Assign(Value::Imm(constant)), None))
} else {
unreachable!("We've covered all possible conditions")
}
}
fn is_additive(op: BinaryOp) -> bool {
op == BinaryOp::Add || op == BinaryOp::Sub
}
fn eval_val(val: Value, f: &TacFunc) -> Value {
match val {
Value::Dest(inst) => eval_inst(inst, f),
i @ Value::Imm(_) => i,
}
}
fn eval_inst(inst: InstId, f: &TacFunc) -> Value {
let i = f.inst_get(inst);
if let Some(val) = i.kind.as_assign() {
match val {
Value::Dest(d) => eval_inst(*d, f),
Value::Imm(i) => Value::Imm(*i),
}
} else {
Value::Dest(inst)
}
}
fn eval_binary(op: BinaryOp, lhs: i64, rhs: i64) -> Option<i64> {
Some(match op {
BinaryOp::Add => lhs.wrapping_add(rhs),
BinaryOp::Sub => lhs.wrapping_sub(rhs),
BinaryOp::Mul => lhs.wrapping_mul(rhs),
BinaryOp::Div => lhs.checked_div(rhs)?,
BinaryOp::Lt => (lhs < rhs) as i64,
BinaryOp::Gt => (lhs > rhs) as i64,
BinaryOp::Le => (lhs <= rhs) as i64,
BinaryOp::Ge => (lhs >= rhs) as i64,
BinaryOp::Eq => (lhs == rhs) as i64,
BinaryOp::Ne => (lhs != rhs) as i64,
})
}
#[cfg(test)]
mod test {
#[test]
fn test_const_folding() {
let input = r"
(fn return_three () i32
(bb0 (
(%0 i32 1)
(%1 i32 mul 2 %0))
(%2 i32 add %1 1)
(return %2))
)
";
}
}
|
/*!
```rudra-poc
[target]
crate = "gfwx"
version = "0.3.0"
[[target.peer]]
crate = "crossbeam-utils"
version = "0.8.0"
[report]
issue_url = "https://github.com/Devolutions/gfwx-rs/issues/7"
issue_date = 2020-12-08
rustsec_url = "https://github.com/RustSec/advisory-db/pull/610"
rustsec_id = "RUSTSEC-2020-0104"
[[bugs]]
analyzer = "SendSyncVariance"
bug_class = "SendSyncVariance"
bug_count = 2
rudra_report_locations = [
"src/processing/image.rs:160:1: 160:48",
"src/processing/image.rs:161:1: 161:48",
]
```
!*/
#![forbid(unsafe_code)]
use gfwx::processing::image::Image;
use crossbeam_utils::thread;
use std::cell::Cell;
// A simple tagged union used to demonstrate problems with data races in Cell.
#[derive(Debug, Clone, Copy)]
enum RefOrInt {
Ref(&'static u64),
Int(u64),
}
static SOME_INT: u64 = 123;
fn main() {
let cell = Cell::new(RefOrInt::Ref(&SOME_INT));
let slice = &mut [&cell];
let image = Image::from_slice(slice, (1, 1), 1);
let mut chunk_iterator = image.into_chunks_mut(1, 1);
let image_chunk = chunk_iterator.next().unwrap();
thread::scope(|s| {
s.spawn(|_| {
let smuggled_cell = image_chunk[(0, 0)];
loop {
// Repeatedly write Ref(&addr) and Int(0xdeadbeef) into the cell.
smuggled_cell.set(RefOrInt::Ref(&SOME_INT));
smuggled_cell.set(RefOrInt::Int(0xdeadbeef));
}
});
loop {
if let RefOrInt::Ref(addr) = cell.get() {
// Hope that between the time we pattern match the object as a
// `Ref`, it gets written to by the other thread.
if addr as *const u64 == &SOME_INT as *const u64 {
continue;
}
println!("Pointer is now: {:p}", addr);
println!("Dereferencing addr will now segfault: {}", *addr);
}
}
});
}
|
use error::Error;
use std::collections::HashMap;
use syntax::{Instr, Op2, Val, Printable};
enum FreeList {
Nil,
Node(usize, usize, Box<FreeList>),
}
struct State {
heap: Vec<i32>,
registers: Vec<i32>,
free_list: FreeList,
alloc_blocks: HashMap<usize, usize>,
}
struct Env {
instructions: HashMap<i32, Instr>,
}
fn eval_val(reg: &[i32], v: &Val) -> i32 {
match *v {
Val::Imm(n) => n,
Val::Reg(i) => reg[i],
}
}
fn eval_op2(op2: &Op2, m: i32, n: i32) -> i32 {
match op2 {
Op2::Add => m + n,
Op2::Sub => m - n,
Op2::Mul => m * n,
Op2::Div => m / n,
Op2::Mod => m % n,
Op2::LT => if m < n { 1 } else { 0 },
Op2::Eq => if m == n { 1 } else { 0 }
}
}
type R = Result<i32, Error>;
fn print_printable(st: &mut State, p: &Printable) {
match p {
Printable::Id(s) => println!("{}", s),
Printable::Val(v) => println!("{}", eval_val(&st.registers, &v)),
Printable::Array(v1, v2) => {
let ptr = eval_val(&st.registers, &v1) as usize;
let len = eval_val(&st.registers, &v2) as usize;
if ptr + len >= st.heap.len() {
println!(
"attempted to print invalid address {}",
ptr + len
);
} else {
let idx_list = ptr..(ptr + len);
let vals = idx_list;
print!("[");
for val in vals {
print!("{:?}; ", val);
}
println!("]");
}
}
}
}
fn malloc(free_list: FreeList, size: usize) -> Option<(FreeList, usize)> {
match free_list {
FreeList::Nil => None,
FreeList::Node(base, free_size, rest) => {
if size == free_size {
Some((*rest, base))
} else if size < free_size {
Some((
FreeList::Node(base + size, free_size - base, rest),
base,
))
} else {
match malloc(*rest, size) {
None => None,
Some((rest2, base2)) => Some((
FreeList::Node(base, free_size, Box::new(rest2)),
base2,
)),
}
}
}
}
}
fn free(free_list: FreeList, ptr: usize, size: usize) -> FreeList {
match free_list {
FreeList::Nil => FreeList::Node(ptr, size, Box::new(FreeList::Nil)),
FreeList::Node(base1, size1, rest1) => {
if ptr + size == base1 {
FreeList::Node(ptr, size + size1, rest1)
} else if ptr + size < base1 {
FreeList::Node(
ptr,
size,
Box::new(FreeList::Node(base1, size1, rest1)),
)
} else if ptr == base1 + size1 {
free(*rest1, base1, size + size1)
} else {
FreeList::Node(base1, size1, Box::new(free(*rest1, ptr, size)))
}
}
}
}
fn eval_rec(st: &mut State, env: &Env, instr: &Instr) -> R {
match instr {
Instr::Copy(r, v, rest) => {
st.registers[*r] = eval_val(&st.registers, &v);
eval_rec(st, env, rest)
}
Instr::Op2(r, op, v1, v2, rest) => {
let m = eval_val(&st.registers, &v1);
let n = eval_val(&st.registers, &v2);
st.registers[*r] = eval_op2(&op, m, n);
eval_rec(st, env, rest)
}
Instr::Load(r, v, rest) => {
let ptr = eval_val(&st.registers, v) as usize;
if ptr >= st.heap.len() {
return Err(Error::Runtime(format!(
"{} = *{:?} invalid address {}",
r, v, ptr
)));
}
st.registers[*r] = st.heap[ptr];
eval_rec(st, env, rest)
}
Instr::Store(r, v, rest) => {
let ptr = st.registers[*r] as usize;
if ptr >= st.heap.len() {
return Err(Error::Runtime(format!(
"*{} = {:?} invalid address {}",
r, v, ptr
)));
}
st.heap[ptr] = eval_val(&st.registers, v);
eval_rec(st, env, rest)
}
Instr::Goto(v) => {
let code_ptr = eval_val(&st.registers, v);
match env.instructions.get(&code_ptr) {
Option::Some(instr) => eval_rec(st, env, instr),
Option::None => Err(Error::Runtime(format!(
"goto({}) invalid code address",
code_ptr
))),
}
}
Instr::Print(p, rest) => {
print_printable(st, p);
eval_rec(st, env, rest)
}
Instr::Exit(v) => Result::Ok(eval_val(&st.registers, v)),
Instr::Abort() => Result::Err(Error::Runtime("called abort".to_string())),
Instr::IfZ(v, true_part, false_part) => {
if eval_val(&st.registers, v) == 0 {
eval_rec(st, env, true_part)
} else {
eval_rec(st, env, false_part)
}
}
Instr::Malloc(r, v, rest) => {
let n = eval_val(&st.registers, v) as usize;
if n == 0 {
st.registers[*r] = 0;
}
else {
let mut nil_list = FreeList::Nil;
std::mem::swap(&mut st.free_list, &mut nil_list);
let (free_list2, ptr) = try!(
malloc(nil_list, n)
.ok_or(Error::Runtime("malloc OOM".to_string()))
);
st.free_list = free_list2;
st.registers[*r] = ptr as i32;
st.alloc_blocks.insert(ptr, n);
}
eval_rec(st, env, rest)
}
Instr::Free(r, rest) => {
let ptr = st.registers[*r] as usize;
let mut nil_list = FreeList::Nil;
let size = *try!(
st.alloc_blocks
.get(&ptr)
.ok_or(Error::Runtime("free bad ptr".to_string()))
);
std::mem::swap(&mut st.free_list, &mut nil_list);
st.free_list = free(nil_list, ptr, size);
eval_rec(st, env, rest)
}
}
}
pub fn eval(
heap_size: usize,
num_registers: usize,
blocks: HashMap<i32, Instr>,
) -> R {
let mut st = State {
heap: vec![0; heap_size],
registers: vec![0; num_registers],
free_list: FreeList::Node(1, heap_size - 1, Box::new(FreeList::Nil)),
alloc_blocks: HashMap::new(),
};
let env = Env {
instructions: blocks,
};
env.instructions
.get(&0)
.ok_or(Error::Usage("Expected block 0".to_string()))
.and_then(|instr| eval_rec(&mut st, &env, instr))
}
|
use std;
import std::fs;
#[test]
fn test_connect() {
let slash = fs::path_sep();
log_err fs::connect("a", "b");
assert (fs::connect("a", "b") == "a" + slash + "b");
assert (fs::connect("a" + slash, "b") == "a" + slash + "b");
}
// Issue #712
#[test]
fn test_list_dir_no_invalid_memory_access() { fs::list_dir("."); }
#[test]
fn list_dir() {
let dirs = fs::list_dir(".");
// Just assuming that we've got some contents in the current directory
assert (std::vec::len(dirs) > 0u);
for dir in dirs { log dir; }
}
#[test]
fn file_is_dir() {
assert (fs::file_is_dir("."));
assert (!fs::file_is_dir("test/stdtest/fs.rs"));
}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(non_camel_case_types)]
#![allow(unused_imports)]
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DeviceServiceProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub notes: Option<String>,
#[serde(rename = "startDate", default, skip_serializing_if = "Option::is_none")]
pub start_date: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub quantity: Option<i64>,
#[serde(rename = "billingDomainName", default, skip_serializing_if = "Option::is_none")]
pub billing_domain_name: Option<String>,
#[serde(rename = "adminDomainName", default, skip_serializing_if = "Option::is_none")]
pub admin_domain_name: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DeviceService {
#[serde(flatten)]
pub tracked_resource: TrackedResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub etag: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<DeviceServiceProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Resource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TrackedResource {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub location: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ProxyResource {
#[serde(flatten)]
pub resource: Resource,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<OperationEntity>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationEntity {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub display: Option<OperationDisplayInfo>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationDisplayInfo {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub operation: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub provider: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub resource: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorDetails {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub target: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub details: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DeviceServiceDescriptionListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<DeviceService>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DeviceServiceCheckNameAvailabilityParameters {
pub name: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DeviceServiceNameAvailabilityInfo {
#[serde(rename = "nameAvailable", default, skip_serializing_if = "Option::is_none")]
pub name_available: Option<bool>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub reason: Option<device_service_name_availability_info::Reason>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
}
pub mod device_service_name_availability_info {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Reason {
Invalid,
AlreadyExists,
}
}
|
//! src/scanner.rs
//! The scanner for the haumea language
use std::str::Chars; // We need to bring the Chars struct into scope
use std::iter::Peekable;
/// The scanner struct
#[derive(Debug)]
pub struct Scanner<'a> {
/// The source &str used to create the scanner.
///
/// Scanner doesn't do anything with it currently, but it is kept in case clients
/// want to get back the source code and, more importantly,
/// to keep it in scope so that the source_chars iterator can work
pub source_str: &'a str,
/// An iterator of chars over the source str
source_chars: Peekable<Chars<'a>>,
/// A vector of chars that can be in operators
operator_chars: Vec<char>,
/// A vector of allowed operators
operators: Vec<&'static str>,
/// A vector of chars that can be in identifiers
ident_chars: Vec<char>,
// A vector of keywords in haumea
reserved_words: Vec<&'static str>,
/// The look ahead char
pub peek: Option<char>,
/// The column the scanner is on in the source
pub column: u32,
/// The line the scanner is on in the source
pub line: u32,
}
/// A structure containing the state of the scanner when it found a token
#[derive(Debug)]
#[derive(Copy, Clone)]
pub struct ScanState {
/// The line the scanner was on
pub line: u32,
/// The column the scanner was on
pub column: u32,
}
impl ScanState {
/// Constructs a new ScanState
pub fn new(line: u32, column: u32) -> ScanState {
ScanState { line: line, column: column }
}
/// Constructs an empty ScanState
pub fn empty() -> ScanState {
ScanState::new(0, 0)
}
}
/// An enum representing the various tokens that can occur
#[derive(Debug)]
#[derive(Clone)]
pub enum Token {
/// An integer number
///
/// The content is the number read as an i64
Number(i32, ScanState),
/// An identifier
///
/// The content is the name of the identifier
Ident(String, ScanState),
/// A reserved word (or keyword)
///
/// The content is the name of the keyword
Keyword(String, ScanState),
/// An operator
///
/// The content is the name of the operator
Operator(String, ScanState),
/// Left parens
Lp(ScanState),
/// Right parens
Rp(ScanState),
/// A comma
Comma(ScanState),
/// An unexpected char was read
///
/// The content is the char read
Error(char, ScanState),
/// End of input
EOF(ScanState),
}
impl Token {
pub fn state(self) -> ScanState {
use self::Token::*;
match self {
Number(_, s) => s,
Ident(_, s) => s,
Keyword(_, s) => s,
Operator(_, s) => s,
Error(_, s) => s,
Lp(s) => s,
Rp(s) => s,
Comma(s) => s,
EOF(s) => s,
}
}
}
impl PartialEq for Token {
fn eq(&self, other: &Token) -> bool {
use self::Token::*;
match (self, other) {
(&Number(ref a, _), &Number(ref b, _)) => a == b,
(&Ident(ref a, _), &Ident(ref b, _)) => a == b,
(&Keyword(ref a, _), &Keyword(ref b, _)) => a == b,
(&Operator(ref a, _), &Operator(ref b, _)) => a == b,
(&Lp(_), &Lp(_)) => true,
(&Rp(_), &Rp(_)) => true,
(&Comma(_), &Comma(_)) => true,
(&Error(ref a, _), &Error(ref b, _)) => a == b,
(&EOF(_), &EOF(_)) => true,
_ => false,
}
}
}
impl<'a> Scanner<'a> {
/// Constructs a new Scanner from a source &str
///
/// # Examples
/// ```
/// # use haumea::scanner::{Scanner, Token};
/// let source = "1 + 1";
/// let scanner = Scanner::new(source);
/// assert_eq!(scanner.source_str, source);
/// assert_eq!(scanner.peek, Some(' '));
/// ```
pub fn new(source: &'a str) -> Scanner {
let chars = source.chars().peekable();
let peek = Some(' ');
Scanner {
source_str: source,
source_chars: chars,
operator_chars: vec!['+', '=', '-', '*', '/', '<', '>', '~', '|', '&', '(', ')', '!'],
operators: vec!["+", "=", "-", "*", "/", "<", ">", ">=", "<=",
"~", "|", "&", "and", "or", "not", "(", ")", "!=", "modulo"],
ident_chars: "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_".chars().collect::<Vec<_>>(),
reserved_words: vec!["to", "with", "is", "return", "do", "end",
"if", "then", "else", "let", "be", "forever",
"while", "for", "each", "in",
"set", "to", "through", "change", "by", "variable"],
peek: peek,
column: 0,
line: 1,
}
}
/// Returns the next token in the source. Token::EOF means that all the input has been read
///
/// # Examples
/// ```
/// # use haumea::scanner::{Scanner, Token, ScanState};
/// let mut s = Scanner::new("1 + 1");
/// assert_eq!(s.next_token(), Token::Number(1, ScanState::empty()));
/// assert_eq!(s.next_token(), Token::Operator("+".to_string(), ScanState::empty()));
/// assert_eq!(s.next_token(), Token::Number(1, ScanState::empty()));
/// assert_eq!(s.next_token(), Token::EOF(ScanState::empty()));
/// ```
pub fn next_token(&mut self) -> Token {
self.skip_white();
let state = ScanState::new(self.line, self.column);
match self.peek {
Some(c) => {
if self.ident_chars.contains(&c) {
self.get_ident_token(state)
} else if c.is_digit(10) {
Token::Number(self.get_num(), state)
} else if c == '(' {
self.get_char();
Token::Lp(state)
} else if c == ')' {
self.get_char();
Token::Rp(state)
} else if c == ',' {
self.get_char();
Token::Comma(state)
} else if self.operator_chars.contains(&c) {
Token::Operator(self.get_op(), state)
} else {
self.get_char();
Token::Error(c, state)
}
},
None => Token::EOF(state),
}
}
/// Sets self.peek to be the next char in self.source_chars
fn get_char(&mut self) {
self.peek = self.source_chars.next();
self.column += 1;
if let Some('\n') = self.peek {
self.line += 1;
self.column = 1;
};
}
/// Skips over whitespace in self.source_chars
fn skip_white(&mut self) {
loop {
match self.peek {
Some(c) if c.is_whitespace() => {
self.get_char()
}
_ => break,
}
}
self.skip_comments();
loop {
match self.peek {
Some(c) if c.is_whitespace() => {
self.get_char()
}
_ => break,
}
}
}
/// Skips over comments in self.source_chars
fn skip_comments(&mut self) {
let should_skip = match self.peek {
Some(c) if c == '/' => {
if let Some(n) = self.source_chars.peek() {
n == &'*'
} else {
false
}
},
_ => false
};
if should_skip {
self.skip_until_comment_end()
}
}
/// Skips until the end of a comment
fn skip_until_comment_end(&mut self) {
self.get_char(); // Skip the ? in the start of the comment
loop {
self.get_char();
match self.peek {
Some(c) if c == '*' => {
if let Some(n) = self.source_chars.peek() {
if n == &'/' {
break;
}
}
},
Some(c) if c == '/' => self.skip_comments(),
_ => ()
}
}
self.get_char();
self.get_char();
}
/// Returns the next number that can be found in self.source_chars
fn get_num(&mut self) -> i32 {
let mut s = String::new();
s.push(self.peek.unwrap());
loop {
self.get_char();
match self.peek {
Some(c) if c.is_digit(10) => s.push(c),
_ => break,
}
}
s.parse::<i32>().unwrap()
}
/// Returns an Token that contains the next identifier in self.source_chars
///
/// It can be one of three Tokens:
/// 1. Token::Keyword (if the identifier is a reserved word)
/// 2. Token::Operator (if the identifier is the name of an operator like `and` or `or`)
/// 3. Token::Ident (otherwise)
fn get_ident_token(&mut self, state: ScanState) -> Token {
let mut s = String::new();
s.push(self.peek.unwrap());
loop {
self.get_char();
match self.peek {
Some(c) if self.ident_chars.contains(&c) => s.push(c),
_ => break,
}
};
if self.reserved_words.contains(&&s[..]) {
Token::Keyword(s, state)
} else if self.operators.contains(&&s[..]) {
Token::Operator(s, state)
} else {
Token::Ident(s, state)
}
}
/// Returns a String containing the next symbol spelt operator
fn get_op(&mut self) -> String {
let mut s = String::new();
s.push(self.peek.unwrap());
loop {
self.get_char();
match self.peek {
Some(c) if self.operator_chars.contains(&c) => s.push(c),
_ => break,
}
};
s
}
}
// Implement Iterator for Scanner
impl<'a> Iterator for Scanner<'a> {
type Item = Token;
/// Returns the next token as an Option<Token>
///
/// Token::EOF is translated into the end of the iteration
///
/// # Examples
///```
/// # use haumea::scanner::{Scanner, Token, ScanState};
/// let s = Scanner::new("1 + 1");
/// assert_eq!(s.next(), Some(Token::Number(1, ScanState::empty())));
/// assert_eq!(s.next(), Some(Token::Operator("+".to_string(), ScanState::empty())));
/// assert_eq!(s.next(), Some(Token::Number(1, ScanState::empty())));
/// assert_eq!(s.next(), None);
///```
fn next(&mut self) -> Option<Token> {
let tok = self.next_token();
match tok {
Token::EOF(_) => None,
_ => Some(tok),
}
}
}
|
#[doc = "Register `SYSCFG_ITLINE11` reader"]
pub type R = crate::R<SYSCFG_ITLINE11_SPEC>;
#[doc = "Field `DMAMUX` reader - DMAMUX interrupt request pending"]
pub type DMAMUX_R = crate::BitReader;
impl R {
#[doc = "Bit 0 - DMAMUX interrupt request pending"]
#[inline(always)]
pub fn dmamux(&self) -> DMAMUX_R {
DMAMUX_R::new((self.bits & 1) != 0)
}
}
#[doc = "SYSCFG interrupt line 11 status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`syscfg_itline11::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct SYSCFG_ITLINE11_SPEC;
impl crate::RegisterSpec for SYSCFG_ITLINE11_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`syscfg_itline11::R`](R) reader structure"]
impl crate::Readable for SYSCFG_ITLINE11_SPEC {}
#[doc = "`reset()` method sets SYSCFG_ITLINE11 to value 0"]
impl crate::Resettable for SYSCFG_ITLINE11_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use minifb::{ Window, WindowOptions };
use super::super::window_backend::WindowBackend;
use crate::engine::domain::frame::Frame;
pub struct MiniFb {
window: Window
}
impl WindowBackend for MiniFb {
fn new(window_name: &str, width: usize, height: usize) -> MiniFb {
let window = match Window::new(
window_name,
width,
height,
WindowOptions {
resize: true,
..WindowOptions::default()
}
) {
Ok(win) => win,
Err(err) => panic!("Unable to create window {}", err)
};
MiniFb { window }
}
fn get_dimensions(&self) -> (usize, usize) {
self.window.get_size()
}
fn is_open(&self) -> bool {
self.window.is_open()
}
fn draw(&mut self, frame: Frame) {
let as_backend_buffer: Vec<u32> = frame.as_buffer(
&|pixel| { ((pixel.color.r as u32) << 16) | ((pixel.color.g as u32) << 8) | (pixel.color.b as u32) }
);
let (width, height) = self.get_dimensions();
self.window.update_with_buffer(&as_backend_buffer, width, height).unwrap();
}
} |
mod dfa;
#[macro_use]
extern crate clap;
use std::fs::File;
use std::io::prelude::*;
fn main() {
let matches = clap_app!(PyLexer =>
(version: "sqrt(7)")
(author: "GGlavan <https://github.com/glavangeorge>")
(about: "Lexer for python")
(@arg INPUT: +required "File to parse")
(@arg OUTPUT: +required "File to write")
).get_matches();
let in_filename = matches.value_of("INPUT").unwrap();
let out_filename = matches.value_of("OUTPUT").unwrap();
let mut f = File::open(in_filename).expect("input file not found");
let mut contents = String::new();
f.read_to_string(&mut contents)
.expect("something went wrong reading the file");
let mut df = dfa::Tokenizer::new(String::from(contents));
let mut err_message = String::new();
while let Some((t, val, poz)) = df.get_token() {
println!("Got token {} - {:?}", val, t);
if val == String::from("err") {
err_message = format!("Got error {} at {}", t, poz);
}
}
let mut o_f = File::create(out_filename).expect("output file not found");
for ((t, val), positions) in df.get_table() {
write!(o_f, "{} - {:?} {:?} \n", t, val, positions).unwrap();
}
if err_message.len() > 0 {
write!(o_f, "{}", err_message).unwrap();
}
}
|
use super::manager::Manager;
use gtk::prelude::*;
use gtk::{self, Builder, Window};
macro_rules! clone {
(@param _) => (_);
(@param $x:ident)=>($x);
($($n:ident),+ => move || $body:expr)=>(
{
$( let $n =$n.clone();)+
move || $body
}
($($n:ident),+ => move |$($p::tt),+| &body:expr) => (
{
$(let $n=$n.clone(); )+
move || $body
}
);
);
}
pub fn user_interface(file_name: &'static str) {
match gtk::init() {
Ok(_) => {
println!("gtk initialize complete");
}
Err(_) => {
panic!("fial to initialize");
}
}
let show_file = Manager::open(file_name);
let glade_src = include_str!("have_to_do.glade");
let builder = Builder::new_from_string(glade_src);
let window: Window = builder.get_object("window1").unwrap();
let manage_name = builder.get_object("manage_name").unwrap();
}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(non_camel_case_types)]
#![allow(unused_imports)]
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ConnectedRegistryListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<ConnectedRegistry>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ConnectedRegistry {
#[serde(flatten)]
pub proxy_resource: ProxyResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<ConnectedRegistryProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SystemData {
#[serde(rename = "createdBy", default, skip_serializing_if = "Option::is_none")]
pub created_by: Option<String>,
#[serde(rename = "createdByType", default, skip_serializing_if = "Option::is_none")]
pub created_by_type: Option<system_data::CreatedByType>,
#[serde(rename = "createdAt", default, skip_serializing_if = "Option::is_none")]
pub created_at: Option<String>,
#[serde(rename = "lastModifiedBy", default, skip_serializing_if = "Option::is_none")]
pub last_modified_by: Option<String>,
#[serde(rename = "lastModifiedByType", default, skip_serializing_if = "Option::is_none")]
pub last_modified_by_type: Option<system_data::LastModifiedByType>,
#[serde(rename = "lastModifiedAt", default, skip_serializing_if = "Option::is_none")]
pub last_modified_at: Option<String>,
}
pub mod system_data {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum CreatedByType {
User,
Application,
ManagedIdentity,
Key,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum LastModifiedByType {
User,
Application,
ManagedIdentity,
Key,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ConnectedRegistryProperties {
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<connected_registry_properties::ProvisioningState>,
pub mode: connected_registry_properties::Mode,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub version: Option<String>,
#[serde(rename = "connectionState", default, skip_serializing_if = "Option::is_none")]
pub connection_state: Option<connected_registry_properties::ConnectionState>,
#[serde(rename = "lastActivityTime", default, skip_serializing_if = "Option::is_none")]
pub last_activity_time: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub activation: Option<ActivationProperties>,
pub parent: ParentProperties,
#[serde(rename = "clientTokenIds", default, skip_serializing_if = "Vec::is_empty")]
pub client_token_ids: Vec<String>,
#[serde(rename = "loginServer", default, skip_serializing_if = "Option::is_none")]
pub login_server: Option<LoginServerProperties>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub logging: Option<LoggingProperties>,
#[serde(rename = "statusDetails", default, skip_serializing_if = "Vec::is_empty")]
pub status_details: Vec<StatusDetailProperties>,
#[serde(rename = "notificationsList", default, skip_serializing_if = "Vec::is_empty")]
pub notifications_list: Vec<String>,
}
pub mod connected_registry_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ProvisioningState {
Creating,
Updating,
Deleting,
Succeeded,
Failed,
Canceled,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Mode {
ReadWrite,
ReadOnly,
Registry,
Mirror,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ConnectionState {
Online,
Offline,
Syncing,
Unhealthy,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ActivationProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<activation_properties::Status>,
}
pub mod activation_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Status {
Active,
Inactive,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ParentProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(rename = "syncProperties")]
pub sync_properties: SyncProperties,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct LoginServerProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub host: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tls: Option<TlsProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct LoggingProperties {
#[serde(rename = "logLevel", default, skip_serializing_if = "Option::is_none")]
pub log_level: Option<logging_properties::LogLevel>,
#[serde(rename = "auditLogStatus", default, skip_serializing_if = "Option::is_none")]
pub audit_log_status: Option<logging_properties::AuditLogStatus>,
}
pub mod logging_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum LogLevel {
Debug,
Information,
Warning,
Error,
None,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum AuditLogStatus {
Enabled,
Disabled,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct StatusDetailProperties {
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub timestamp: Option<String>,
#[serde(rename = "correlationId", default, skip_serializing_if = "Option::is_none")]
pub correlation_id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SyncProperties {
#[serde(rename = "tokenId")]
pub token_id: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub schedule: Option<String>,
#[serde(rename = "syncWindow", default, skip_serializing_if = "Option::is_none")]
pub sync_window: Option<String>,
#[serde(rename = "messageTtl")]
pub message_ttl: String,
#[serde(rename = "lastSyncTime", default, skip_serializing_if = "Option::is_none")]
pub last_sync_time: Option<String>,
#[serde(rename = "gatewayEndpoint", default, skip_serializing_if = "Option::is_none")]
pub gateway_endpoint: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TlsProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<tls_properties::Status>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub certificate: Option<TlsCertificateProperties>,
}
pub mod tls_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Status {
Enabled,
Disabled,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TlsCertificateProperties {
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<tls_certificate_properties::Type>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub location: Option<String>,
}
pub mod tls_certificate_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Type {
LocalDirectory,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ConnectedRegistryUpdateParameters {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<ConnectedRegistryUpdateProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ConnectedRegistryUpdateProperties {
#[serde(rename = "syncProperties", default, skip_serializing_if = "Option::is_none")]
pub sync_properties: Option<SyncUpdateProperties>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub logging: Option<LoggingProperties>,
#[serde(rename = "clientTokenIds", default, skip_serializing_if = "Vec::is_empty")]
pub client_token_ids: Vec<String>,
#[serde(rename = "notificationsList", default, skip_serializing_if = "Vec::is_empty")]
pub notifications_list: Vec<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SyncUpdateProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub schedule: Option<String>,
#[serde(rename = "syncWindow", default, skip_serializing_if = "Option::is_none")]
pub sync_window: Option<String>,
#[serde(rename = "messageTtl", default, skip_serializing_if = "Option::is_none")]
pub message_ttl: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ExportPipelineListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<ExportPipeline>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ExportPipeline {
#[serde(flatten)]
pub proxy_resource: ProxyResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub location: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub identity: Option<IdentityProperties>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<ExportPipelineProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct IdentityProperties {
#[serde(rename = "principalId", default, skip_serializing_if = "Option::is_none")]
pub principal_id: Option<String>,
#[serde(rename = "tenantId", default, skip_serializing_if = "Option::is_none")]
pub tenant_id: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<identity_properties::Type>,
#[serde(rename = "userAssignedIdentities", default, skip_serializing_if = "Option::is_none")]
pub user_assigned_identities: Option<serde_json::Value>,
}
pub mod identity_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Type {
SystemAssigned,
UserAssigned,
#[serde(rename = "SystemAssigned, UserAssigned")]
SystemAssignedUserAssigned,
None,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ExportPipelineProperties {
pub target: ExportPipelineTargetProperties,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub options: Vec<String>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<export_pipeline_properties::ProvisioningState>,
}
pub mod export_pipeline_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ProvisioningState {
Creating,
Updating,
Deleting,
Succeeded,
Failed,
Canceled,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct UserIdentityProperties {
#[serde(rename = "principalId", default, skip_serializing_if = "Option::is_none")]
pub principal_id: Option<String>,
#[serde(rename = "clientId", default, skip_serializing_if = "Option::is_none")]
pub client_id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Resource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
pub location: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
#[serde(rename = "systemData", default, skip_serializing_if = "Option::is_none")]
pub system_data: Option<SystemData>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ExportPipelineTargetProperties {
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub uri: Option<String>,
#[serde(rename = "keyVaultUri")]
pub key_vault_uri: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ProxyResource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(rename = "systemData", default, skip_serializing_if = "Option::is_none")]
pub system_data: Option<SystemData>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ImportImageParameters {
pub source: ImportSource,
#[serde(rename = "targetTags", default, skip_serializing_if = "Vec::is_empty")]
pub target_tags: Vec<String>,
#[serde(rename = "untaggedTargetRepositories", default, skip_serializing_if = "Vec::is_empty")]
pub untagged_target_repositories: Vec<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub mode: Option<import_image_parameters::Mode>,
}
pub mod import_image_parameters {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Mode {
NoForce,
Force,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ImportSource {
#[serde(rename = "resourceId", default, skip_serializing_if = "Option::is_none")]
pub resource_id: Option<String>,
#[serde(rename = "registryUri", default, skip_serializing_if = "Option::is_none")]
pub registry_uri: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub credentials: Option<ImportSourceCredentials>,
#[serde(rename = "sourceImage")]
pub source_image: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ImportSourceCredentials {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub username: Option<String>,
pub password: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ImportPipelineListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<ImportPipeline>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ImportPipeline {
#[serde(flatten)]
pub proxy_resource: ProxyResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub location: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub identity: Option<IdentityProperties>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<ImportPipelineProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ImportPipelineProperties {
pub source: ImportPipelineSourceProperties,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub trigger: Option<PipelineTriggerProperties>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub options: Vec<String>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<import_pipeline_properties::ProvisioningState>,
}
pub mod import_pipeline_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ProvisioningState {
Creating,
Updating,
Deleting,
Succeeded,
Failed,
Canceled,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ImportPipelineSourceProperties {
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<import_pipeline_source_properties::Type>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub uri: Option<String>,
#[serde(rename = "keyVaultUri")]
pub key_vault_uri: String,
}
pub mod import_pipeline_source_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Type {
AzureStorageBlobContainer,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PipelineTriggerProperties {
#[serde(rename = "sourceTrigger", default, skip_serializing_if = "Option::is_none")]
pub source_trigger: Option<PipelineSourceTriggerProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PipelineSourceTriggerProperties {
pub status: pipeline_source_trigger_properties::Status,
}
pub mod pipeline_source_trigger_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Status {
Enabled,
Disabled,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<OperationDefinition>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationDefinition {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub origin: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub display: Option<OperationDisplayDefinition>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<OperationPropertiesDefinition>,
#[serde(rename = "isDataAction", default, skip_serializing_if = "Option::is_none")]
pub is_data_action: Option<bool>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationDisplayDefinition {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub provider: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub resource: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub operation: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationPropertiesDefinition {
#[serde(rename = "serviceSpecification", default, skip_serializing_if = "Option::is_none")]
pub service_specification: Option<OperationServiceSpecificationDefinition>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationServiceSpecificationDefinition {
#[serde(rename = "metricSpecifications", default, skip_serializing_if = "Vec::is_empty")]
pub metric_specifications: Vec<OperationMetricSpecificationDefinition>,
#[serde(rename = "logSpecifications", default, skip_serializing_if = "Vec::is_empty")]
pub log_specifications: Vec<OperationLogSpecificationDefinition>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationMetricSpecificationDefinition {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")]
pub display_name: Option<String>,
#[serde(rename = "displayDescription", default, skip_serializing_if = "Option::is_none")]
pub display_description: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub unit: Option<String>,
#[serde(rename = "aggregationType", default, skip_serializing_if = "Option::is_none")]
pub aggregation_type: Option<String>,
#[serde(rename = "internalMetricName", default, skip_serializing_if = "Option::is_none")]
pub internal_metric_name: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationLogSpecificationDefinition {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")]
pub display_name: Option<String>,
#[serde(rename = "blobDuration", default, skip_serializing_if = "Option::is_none")]
pub blob_duration: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RegistryNameCheckRequest {
pub name: String,
#[serde(rename = "type")]
pub type_: registry_name_check_request::Type,
}
pub mod registry_name_check_request {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Type {
#[serde(rename = "Microsoft.ContainerRegistry/registries")]
MicrosoftContainerRegistryRegistries,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RegistryNameStatus {
#[serde(rename = "nameAvailable", default, skip_serializing_if = "Option::is_none")]
pub name_available: Option<bool>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub reason: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PipelineRunListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<PipelineRun>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PipelineRun {
#[serde(flatten)]
pub proxy_resource: ProxyResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<PipelineRunProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PipelineRunProperties {
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<pipeline_run_properties::ProvisioningState>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub request: Option<PipelineRunRequest>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub response: Option<PipelineRunResponse>,
#[serde(rename = "forceUpdateTag", default, skip_serializing_if = "Option::is_none")]
pub force_update_tag: Option<String>,
}
pub mod pipeline_run_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ProvisioningState {
Creating,
Updating,
Deleting,
Succeeded,
Failed,
Canceled,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PipelineRunRequest {
#[serde(rename = "pipelineResourceId", default, skip_serializing_if = "Option::is_none")]
pub pipeline_resource_id: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub artifacts: Vec<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub source: Option<PipelineRunSourceProperties>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub target: Option<PipelineRunTargetProperties>,
#[serde(rename = "catalogDigest", default, skip_serializing_if = "Option::is_none")]
pub catalog_digest: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PipelineRunResponse {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<String>,
#[serde(rename = "importedArtifacts", default, skip_serializing_if = "Vec::is_empty")]
pub imported_artifacts: Vec<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub progress: Option<ProgressProperties>,
#[serde(rename = "startTime", default, skip_serializing_if = "Option::is_none")]
pub start_time: Option<String>,
#[serde(rename = "finishTime", default, skip_serializing_if = "Option::is_none")]
pub finish_time: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub source: Option<ImportPipelineSourceProperties>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub target: Option<ExportPipelineTargetProperties>,
#[serde(rename = "catalogDigest", default, skip_serializing_if = "Option::is_none")]
pub catalog_digest: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub trigger: Option<PipelineTriggerDescriptor>,
#[serde(rename = "pipelineRunErrorMessage", default, skip_serializing_if = "Option::is_none")]
pub pipeline_run_error_message: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PipelineRunSourceProperties {
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<pipeline_run_source_properties::Type>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
}
pub mod pipeline_run_source_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Type {
AzureStorageBlob,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PipelineRunTargetProperties {
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<pipeline_run_target_properties::Type>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
}
pub mod pipeline_run_target_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Type {
AzureStorageBlob,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ProgressProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub percentage: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PipelineTriggerDescriptor {
#[serde(rename = "sourceTrigger", default, skip_serializing_if = "Option::is_none")]
pub source_trigger: Option<PipelineSourceTriggerDescriptor>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PipelineSourceTriggerDescriptor {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub timestamp: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateEndpointConnectionListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<PrivateEndpointConnection>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateEndpointConnection {
#[serde(flatten)]
pub proxy_resource: ProxyResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<PrivateEndpointConnectionProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateEndpointConnectionProperties {
#[serde(rename = "privateEndpoint", default, skip_serializing_if = "Option::is_none")]
pub private_endpoint: Option<PrivateEndpoint>,
#[serde(rename = "privateLinkServiceConnectionState", default, skip_serializing_if = "Option::is_none")]
pub private_link_service_connection_state: Option<PrivateLinkServiceConnectionState>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<private_endpoint_connection_properties::ProvisioningState>,
}
pub mod private_endpoint_connection_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ProvisioningState {
Creating,
Updating,
Deleting,
Succeeded,
Failed,
Canceled,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateEndpoint {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateLinkServiceConnectionState {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<private_link_service_connection_state::Status>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(rename = "actionsRequired", default, skip_serializing_if = "Option::is_none")]
pub actions_required: Option<private_link_service_connection_state::ActionsRequired>,
}
pub mod private_link_service_connection_state {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Status {
Approved,
Pending,
Rejected,
Disconnected,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ActionsRequired {
None,
Recreate,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RegistryListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Registry>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Registry {
#[serde(flatten)]
pub resource: Resource,
pub sku: Sku,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub identity: Option<IdentityProperties>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<RegistryProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Sku {
pub name: sku::Name,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tier: Option<sku::Tier>,
}
pub mod sku {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Name {
Classic,
Basic,
Standard,
Premium,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Tier {
Classic,
Basic,
Standard,
Premium,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RegistryProperties {
#[serde(rename = "loginServer", default, skip_serializing_if = "Option::is_none")]
pub login_server: Option<String>,
#[serde(rename = "creationDate", default, skip_serializing_if = "Option::is_none")]
pub creation_date: Option<String>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<registry_properties::ProvisioningState>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<Status>,
#[serde(rename = "adminUserEnabled", default, skip_serializing_if = "Option::is_none")]
pub admin_user_enabled: Option<bool>,
#[serde(rename = "networkRuleSet", default, skip_serializing_if = "Option::is_none")]
pub network_rule_set: Option<NetworkRuleSet>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub policies: Option<Policies>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub encryption: Option<EncryptionProperty>,
#[serde(rename = "dataEndpointEnabled", default, skip_serializing_if = "Option::is_none")]
pub data_endpoint_enabled: Option<bool>,
#[serde(rename = "dataEndpointHostNames", default, skip_serializing_if = "Vec::is_empty")]
pub data_endpoint_host_names: Vec<String>,
#[serde(rename = "privateEndpointConnections", default, skip_serializing_if = "Vec::is_empty")]
pub private_endpoint_connections: Vec<PrivateEndpointConnection>,
#[serde(rename = "publicNetworkAccess", default, skip_serializing_if = "Option::is_none")]
pub public_network_access: Option<registry_properties::PublicNetworkAccess>,
#[serde(rename = "networkRuleBypassOptions", default, skip_serializing_if = "Option::is_none")]
pub network_rule_bypass_options: Option<registry_properties::NetworkRuleBypassOptions>,
#[serde(rename = "zoneRedundancy", default, skip_serializing_if = "Option::is_none")]
pub zone_redundancy: Option<registry_properties::ZoneRedundancy>,
#[serde(rename = "anonymousPullEnabled", default, skip_serializing_if = "Option::is_none")]
pub anonymous_pull_enabled: Option<bool>,
}
pub mod registry_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ProvisioningState {
Creating,
Updating,
Deleting,
Succeeded,
Failed,
Canceled,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum PublicNetworkAccess {
Enabled,
Disabled,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum NetworkRuleBypassOptions {
AzureServices,
None,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ZoneRedundancy {
Enabled,
Disabled,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Status {
#[serde(rename = "displayStatus", default, skip_serializing_if = "Option::is_none")]
pub display_status: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub timestamp: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct NetworkRuleSet {
#[serde(rename = "defaultAction")]
pub default_action: network_rule_set::DefaultAction,
#[serde(rename = "virtualNetworkRules", default, skip_serializing_if = "Vec::is_empty")]
pub virtual_network_rules: Vec<VirtualNetworkRule>,
#[serde(rename = "ipRules", default, skip_serializing_if = "Vec::is_empty")]
pub ip_rules: Vec<IpRule>,
}
pub mod network_rule_set {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum DefaultAction {
Allow,
Deny,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Policies {
#[serde(rename = "quarantinePolicy", default, skip_serializing_if = "Option::is_none")]
pub quarantine_policy: Option<QuarantinePolicy>,
#[serde(rename = "trustPolicy", default, skip_serializing_if = "Option::is_none")]
pub trust_policy: Option<TrustPolicy>,
#[serde(rename = "retentionPolicy", default, skip_serializing_if = "Option::is_none")]
pub retention_policy: Option<RetentionPolicy>,
#[serde(rename = "exportPolicy", default, skip_serializing_if = "Option::is_none")]
pub export_policy: Option<ExportPolicy>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct EncryptionProperty {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<encryption_property::Status>,
#[serde(rename = "keyVaultProperties", default, skip_serializing_if = "Option::is_none")]
pub key_vault_properties: Option<KeyVaultProperties>,
}
pub mod encryption_property {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Status {
#[serde(rename = "enabled")]
Enabled,
#[serde(rename = "disabled")]
Disabled,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualNetworkRule {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub action: Option<virtual_network_rule::Action>,
pub id: String,
}
pub mod virtual_network_rule {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Action {
Allow,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct IpRule {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub action: Option<ip_rule::Action>,
pub value: String,
}
pub mod ip_rule {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Action {
Allow,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct QuarantinePolicy {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<quarantine_policy::Status>,
}
pub mod quarantine_policy {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Status {
#[serde(rename = "enabled")]
Enabled,
#[serde(rename = "disabled")]
Disabled,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TrustPolicy {
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<trust_policy::Type>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<trust_policy::Status>,
}
pub mod trust_policy {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Type {
Notary,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Status {
#[serde(rename = "enabled")]
Enabled,
#[serde(rename = "disabled")]
Disabled,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RetentionPolicy {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub days: Option<i32>,
#[serde(rename = "lastUpdatedTime", default, skip_serializing_if = "Option::is_none")]
pub last_updated_time: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<retention_policy::Status>,
}
pub mod retention_policy {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Status {
#[serde(rename = "enabled")]
Enabled,
#[serde(rename = "disabled")]
Disabled,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ExportPolicy {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<export_policy::Status>,
}
pub mod export_policy {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Status {
#[serde(rename = "enabled")]
Enabled,
#[serde(rename = "disabled")]
Disabled,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct KeyVaultProperties {
#[serde(rename = "keyIdentifier", default, skip_serializing_if = "Option::is_none")]
pub key_identifier: Option<String>,
#[serde(rename = "versionedKeyIdentifier", default, skip_serializing_if = "Option::is_none")]
pub versioned_key_identifier: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub identity: Option<String>,
#[serde(rename = "keyRotationEnabled", default, skip_serializing_if = "Option::is_none")]
pub key_rotation_enabled: Option<bool>,
#[serde(rename = "lastKeyRotationTimestamp", default, skip_serializing_if = "Option::is_none")]
pub last_key_rotation_timestamp: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RegistryUpdateParameters {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub identity: Option<IdentityProperties>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub sku: Option<Sku>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<RegistryPropertiesUpdateParameters>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RegistryPropertiesUpdateParameters {
#[serde(rename = "adminUserEnabled", default, skip_serializing_if = "Option::is_none")]
pub admin_user_enabled: Option<bool>,
#[serde(rename = "networkRuleSet", default, skip_serializing_if = "Option::is_none")]
pub network_rule_set: Option<NetworkRuleSet>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub policies: Option<Policies>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub encryption: Option<EncryptionProperty>,
#[serde(rename = "dataEndpointEnabled", default, skip_serializing_if = "Option::is_none")]
pub data_endpoint_enabled: Option<bool>,
#[serde(rename = "publicNetworkAccess", default, skip_serializing_if = "Option::is_none")]
pub public_network_access: Option<registry_properties_update_parameters::PublicNetworkAccess>,
#[serde(rename = "networkRuleBypassOptions", default, skip_serializing_if = "Option::is_none")]
pub network_rule_bypass_options: Option<registry_properties_update_parameters::NetworkRuleBypassOptions>,
#[serde(rename = "anonymousPullEnabled", default, skip_serializing_if = "Option::is_none")]
pub anonymous_pull_enabled: Option<bool>,
}
pub mod registry_properties_update_parameters {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum PublicNetworkAccess {
Enabled,
Disabled,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum NetworkRuleBypassOptions {
AzureServices,
None,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RegistryUsageListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<RegistryUsage>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RegistryUsage {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub limit: Option<i64>,
#[serde(rename = "currentValue", default, skip_serializing_if = "Option::is_none")]
pub current_value: Option<i64>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub unit: Option<registry_usage::Unit>,
}
pub mod registry_usage {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Unit {
Count,
Bytes,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateLinkResourceListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<PrivateLinkResource>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateLinkResource {
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<PrivateLinkResourceProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateLinkResourceProperties {
#[serde(rename = "groupId", default, skip_serializing_if = "Option::is_none")]
pub group_id: Option<String>,
#[serde(rename = "requiredMembers", default, skip_serializing_if = "Vec::is_empty")]
pub required_members: Vec<String>,
#[serde(rename = "requiredZoneNames", default, skip_serializing_if = "Vec::is_empty")]
pub required_zone_names: Vec<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RegistryListCredentialsResult {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub username: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub passwords: Vec<RegistryPassword>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RegistryPassword {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<registry_password::Name>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub value: Option<String>,
}
pub mod registry_password {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Name {
#[serde(rename = "password")]
Password,
#[serde(rename = "password2")]
Password2,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RegenerateCredentialParameters {
pub name: regenerate_credential_parameters::Name,
}
pub mod regenerate_credential_parameters {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Name {
#[serde(rename = "password")]
Password,
#[serde(rename = "password2")]
Password2,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ReplicationListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Replication>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Replication {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<ReplicationProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ReplicationProperties {
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<replication_properties::ProvisioningState>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<Status>,
#[serde(rename = "regionEndpointEnabled", default, skip_serializing_if = "Option::is_none")]
pub region_endpoint_enabled: Option<bool>,
#[serde(rename = "zoneRedundancy", default, skip_serializing_if = "Option::is_none")]
pub zone_redundancy: Option<replication_properties::ZoneRedundancy>,
}
pub mod replication_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ProvisioningState {
Creating,
Updating,
Deleting,
Succeeded,
Failed,
Canceled,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ZoneRedundancy {
Enabled,
Disabled,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ReplicationUpdateParameters {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<ReplicationUpdateParametersProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ReplicationUpdateParametersProperties {
#[serde(rename = "regionEndpointEnabled", default, skip_serializing_if = "Option::is_none")]
pub region_endpoint_enabled: Option<bool>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ScopeMapListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<ScopeMap>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ScopeMap {
#[serde(flatten)]
pub proxy_resource: ProxyResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<ScopeMapProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ScopeMapProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(rename = "creationDate", default, skip_serializing_if = "Option::is_none")]
pub creation_date: Option<String>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<scope_map_properties::ProvisioningState>,
pub actions: Vec<String>,
}
pub mod scope_map_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ProvisioningState {
Creating,
Updating,
Deleting,
Succeeded,
Failed,
Canceled,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ScopeMapUpdateParameters {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<ScopeMapPropertiesUpdateParameters>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ScopeMapPropertiesUpdateParameters {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub actions: Vec<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TokenListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Token>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Token {
#[serde(flatten)]
pub proxy_resource: ProxyResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<TokenProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TokenProperties {
#[serde(rename = "creationDate", default, skip_serializing_if = "Option::is_none")]
pub creation_date: Option<String>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<token_properties::ProvisioningState>,
#[serde(rename = "scopeMapId", default, skip_serializing_if = "Option::is_none")]
pub scope_map_id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub credentials: Option<TokenCredentialsProperties>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<token_properties::Status>,
}
pub mod token_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ProvisioningState {
Creating,
Updating,
Deleting,
Succeeded,
Failed,
Canceled,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Status {
#[serde(rename = "enabled")]
Enabled,
#[serde(rename = "disabled")]
Disabled,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TokenCredentialsProperties {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub certificates: Vec<TokenCertificate>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub passwords: Vec<TokenPassword>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ActiveDirectoryObject {
#[serde(rename = "objectId", default, skip_serializing_if = "Option::is_none")]
pub object_id: Option<String>,
#[serde(rename = "tenantId", default, skip_serializing_if = "Option::is_none")]
pub tenant_id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TokenCertificate {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<token_certificate::Name>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub expiry: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub thumbprint: Option<String>,
#[serde(rename = "encodedPemCertificate", default, skip_serializing_if = "Option::is_none")]
pub encoded_pem_certificate: Option<String>,
}
pub mod token_certificate {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Name {
#[serde(rename = "certificate1")]
Certificate1,
#[serde(rename = "certificate2")]
Certificate2,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TokenPassword {
#[serde(rename = "creationTime", default, skip_serializing_if = "Option::is_none")]
pub creation_time: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub expiry: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<token_password::Name>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub value: Option<String>,
}
pub mod token_password {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Name {
#[serde(rename = "password1")]
Password1,
#[serde(rename = "password2")]
Password2,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TokenUpdateParameters {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<TokenUpdateProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TokenUpdateProperties {
#[serde(rename = "scopeMapId", default, skip_serializing_if = "Option::is_none")]
pub scope_map_id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<token_update_properties::Status>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub credentials: Option<TokenCredentialsProperties>,
}
pub mod token_update_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Status {
#[serde(rename = "enabled")]
Enabled,
#[serde(rename = "disabled")]
Disabled,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GenerateCredentialsParameters {
#[serde(rename = "tokenId", default, skip_serializing_if = "Option::is_none")]
pub token_id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub expiry: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<generate_credentials_parameters::Name>,
}
pub mod generate_credentials_parameters {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Name {
#[serde(rename = "password1")]
Password1,
#[serde(rename = "password2")]
Password2,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GenerateCredentialsResult {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub username: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub passwords: Vec<TokenPassword>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct WebhookListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Webhook>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Webhook {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<WebhookProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct WebhookProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<webhook_properties::Status>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub scope: Option<String>,
pub actions: Vec<String>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<webhook_properties::ProvisioningState>,
}
pub mod webhook_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Status {
#[serde(rename = "enabled")]
Enabled,
#[serde(rename = "disabled")]
Disabled,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ProvisioningState {
Creating,
Updating,
Deleting,
Succeeded,
Failed,
Canceled,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct WebhookCreateParameters {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
pub location: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<WebhookPropertiesCreateParameters>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct WebhookPropertiesCreateParameters {
#[serde(rename = "serviceUri")]
pub service_uri: String,
#[serde(rename = "customHeaders", default, skip_serializing_if = "Option::is_none")]
pub custom_headers: Option<serde_json::Value>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<webhook_properties_create_parameters::Status>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub scope: Option<String>,
pub actions: Vec<String>,
}
pub mod webhook_properties_create_parameters {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Status {
#[serde(rename = "enabled")]
Enabled,
#[serde(rename = "disabled")]
Disabled,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct WebhookUpdateParameters {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<WebhookPropertiesUpdateParameters>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct WebhookPropertiesUpdateParameters {
#[serde(rename = "serviceUri", default, skip_serializing_if = "Option::is_none")]
pub service_uri: Option<String>,
#[serde(rename = "customHeaders", default, skip_serializing_if = "Option::is_none")]
pub custom_headers: Option<serde_json::Value>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<webhook_properties_update_parameters::Status>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub scope: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub actions: Vec<String>,
}
pub mod webhook_properties_update_parameters {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Status {
#[serde(rename = "enabled")]
Enabled,
#[serde(rename = "disabled")]
Disabled,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct EventInfo {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct EventListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Event>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Event {
#[serde(flatten)]
pub event_info: EventInfo,
#[serde(rename = "eventRequestMessage", default, skip_serializing_if = "Option::is_none")]
pub event_request_message: Option<EventRequestMessage>,
#[serde(rename = "eventResponseMessage", default, skip_serializing_if = "Option::is_none")]
pub event_response_message: Option<EventResponseMessage>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct EventRequestMessage {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub content: Option<EventContent>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub headers: Option<serde_json::Value>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub method: Option<String>,
#[serde(rename = "requestUri", default, skip_serializing_if = "Option::is_none")]
pub request_uri: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub version: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct EventResponseMessage {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub content: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub headers: Option<serde_json::Value>,
#[serde(rename = "reasonPhrase", default, skip_serializing_if = "Option::is_none")]
pub reason_phrase: Option<String>,
#[serde(rename = "statusCode", default, skip_serializing_if = "Option::is_none")]
pub status_code: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub version: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct EventContent {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub timestamp: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub action: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub target: Option<Target>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub request: Option<Request>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub actor: Option<Actor>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub source: Option<Source>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Target {
#[serde(rename = "mediaType", default, skip_serializing_if = "Option::is_none")]
pub media_type: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub size: Option<i64>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub digest: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub length: Option<i64>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub repository: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub url: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tag: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub version: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Request {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub addr: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub host: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub method: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub useragent: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Actor {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Source {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub addr: Option<String>,
#[serde(rename = "instanceID", default, skip_serializing_if = "Option::is_none")]
pub instance_id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CallbackConfig {
#[serde(rename = "serviceUri")]
pub service_uri: String,
#[serde(rename = "customHeaders", default, skip_serializing_if = "Option::is_none")]
pub custom_headers: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorResponse {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<ErrorResponseBody>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorResponseBody {
pub code: String,
pub message: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub target: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub details: Option<InnerErrorDescription>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct InnerErrorDescription {
pub code: String,
pub message: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub target: Option<String>,
}
|
use core::cell::RefCell;
use core::mem;
use std::{marker::PhantomData, ops::Deref};
use crate::{Bin, SBin};
std::thread_local! {
static THREAD_LOCAL_BIN: RefCell<Option<*const ScopedRiSetup<'static>>> = RefCell::new(None);
}
/// This struct contains functions that give access to the re-integration scope.
pub struct RiScope {
// pure static
_phantom: PhantomData<()>,
}
impl RiScope {
/// Returns the re-integrated binary. See `AnyBin::try_to_re_integrate`.
///
/// This returns `None` if:
///
/// * There's no RI scope set up.
/// * `ReIntegrationFn` returned `None`.
#[inline]
pub fn try_re_integrate(slice: &[u8]) -> Option<Bin> {
THREAD_LOCAL_BIN.with(|value| {
let borrowed = value.borrow();
if let Some(value) = borrowed.deref() {
let config: &ScopedRiSetup = unsafe { &**value };
(config.re_integration_fn)(&config.binaries, slice)
} else {
None
}
})
}
/// Returns the re-integrated binary. See `AnyBin::try_to_re_integrate`.
///
/// This returns `None` if:
///
/// * There's no RI scope set up.
/// * `SyncReIntegrationSync` returned `None`.
#[inline]
pub fn try_re_integrate_sync(slice: &[u8]) -> Option<SBin> {
THREAD_LOCAL_BIN.with(|value| {
let borrowed = value.borrow();
if let Some(value) = borrowed.deref() {
let config: &ScopedRiSetup = unsafe { &**value };
(config.sync_re_integration_fn)(&config.binaries, slice)
} else {
None
}
})
}
}
pub type ReIntegrationFn = fn(binaries: &Binaries, slice: &[u8]) -> Option<Bin>;
pub type SyncReIntegrationSync = fn(binaries: &Binaries, slice: &[u8]) -> Option<SBin>;
/// A re-integration scope setup.
pub struct ScopedRiSetup<'a> {
binaries: Binaries<'a>,
re_integration_fn: ReIntegrationFn,
sync_re_integration_fn: SyncReIntegrationSync,
}
impl<'a> ScopedRiSetup<'a> {
pub fn new(
binaries: Binaries<'a>,
re_integration_fn: ReIntegrationFn,
sync_re_integration_fn: SyncReIntegrationSync,
) -> Self {
Self {
binaries,
re_integration_fn,
sync_re_integration_fn,
}
}
}
impl<'a> ScopedRiSetup<'a> {
#[inline]
pub fn scoped<TFn, TRet>(&self, fun: TFn) -> TRet
where
TFn: FnOnce() -> TRet,
{
THREAD_LOCAL_BIN.with(|tl_value| {
// save the previous value (in case we nest scopes).
let previous_value = tl_value.borrow_mut().take();
// make sure the thread-local-value is removed even if the function panics (this
// is important, since we messed with lifetimes -> 'a to 'static ... so we have
// to make sure it is NEVER accesses outside the scope). This is dropped at the
// end of this scope.
let cleanup_on_drop = CleanupOnDrop {
cell: tl_value,
previous_value,
};
// new thread-local-value
let this = unsafe {
mem::transmute::<&ScopedRiSetup<'a>, *const ScopedRiSetup<'static>>(self)
};
tl_value.replace(Some(this));
let result = fun();
// i'm not 100% sure whether the rust specification allows optimization of unused
// values (e.g eager dropping; dropping before scope ends)... but to make sure
// it does not do this, we use it here (it's a no-op).
cleanup_on_drop.done();
result
})
}
}
pub struct Binaries<'a> {
bin: Option<&'a Bin>,
sync_bin: Option<&'a SBin>,
}
impl<'a> Binaries<'a> {
pub fn new(bin: Option<&'a Bin>, sync_bin: Option<&'a SBin>) -> Self {
Self { bin, sync_bin }
}
pub fn new_bin(bin: &'a Bin) -> Self {
Self::new(Some(bin), None)
}
pub fn new_sync_bin(bin: &'a SBin) -> Self {
Self::new(None, Some(bin))
}
#[inline]
pub fn bin(&self) -> Option<&'a Bin> {
self.bin
}
#[inline]
pub fn sync_bin(&self) -> Option<&'a SBin> {
self.sync_bin
}
#[inline]
pub fn both(&self) -> (Option<&'a Bin>, Option<&'a SBin>) {
(self.bin, self.sync_bin)
}
}
/// This is required to cleanup to scope in case the scope-function panics.
struct CleanupOnDrop<'a> {
cell: &'a RefCell<Option<*const ScopedRiSetup<'static>>>,
previous_value: Option<*const ScopedRiSetup<'static>>,
}
impl<'a> CleanupOnDrop<'a> {
#[inline]
fn done(&self) {
// intentionally a no-op
}
}
impl<'a> Drop for CleanupOnDrop<'a> {
fn drop(&mut self) {
// sets the previous value
self.cell.replace(self.previous_value.take());
}
}
|
#[doc = "Reader of register DDRPHYC_PGCR"]
pub type R = crate::R<u32, super::DDRPHYC_PGCR>;
#[doc = "Writer for register DDRPHYC_PGCR"]
pub type W = crate::W<u32, super::DDRPHYC_PGCR>;
#[doc = "Register DDRPHYC_PGCR `reset()`'s with value 0x01bc_2e04"]
impl crate::ResetValue for super::DDRPHYC_PGCR {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0x01bc_2e04
}
}
#[doc = "Reader of field `ITMDMD`"]
pub type ITMDMD_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ITMDMD`"]
pub struct ITMDMD_W<'a> {
w: &'a mut W,
}
impl<'a> ITMDMD_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "Reader of field `DQSCFG`"]
pub type DQSCFG_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `DQSCFG`"]
pub struct DQSCFG_W<'a> {
w: &'a mut W,
}
impl<'a> DQSCFG_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);
self.w
}
}
#[doc = "Reader of field `DFTCMP`"]
pub type DFTCMP_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `DFTCMP`"]
pub struct DFTCMP_W<'a> {
w: &'a mut W,
}
impl<'a> DFTCMP_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);
self.w
}
}
#[doc = "Reader of field `DFTLMT`"]
pub type DFTLMT_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `DFTLMT`"]
pub struct DFTLMT_W<'a> {
w: &'a mut W,
}
impl<'a> DFTLMT_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 3)) | (((value as u32) & 0x03) << 3);
self.w
}
}
#[doc = "Reader of field `DTOSEL`"]
pub type DTOSEL_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `DTOSEL`"]
pub struct DTOSEL_W<'a> {
w: &'a mut W,
}
impl<'a> DTOSEL_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x0f << 5)) | (((value as u32) & 0x0f) << 5);
self.w
}
}
#[doc = "Reader of field `CKEN`"]
pub type CKEN_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `CKEN`"]
pub struct CKEN_W<'a> {
w: &'a mut W,
}
impl<'a> CKEN_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x07 << 9)) | (((value as u32) & 0x07) << 9);
self.w
}
}
#[doc = "Reader of field `CKDV`"]
pub type CKDV_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `CKDV`"]
pub struct CKDV_W<'a> {
w: &'a mut W,
}
impl<'a> CKDV_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 12)) | (((value as u32) & 0x03) << 12);
self.w
}
}
#[doc = "Reader of field `CKINV`"]
pub type CKINV_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `CKINV`"]
pub struct CKINV_W<'a> {
w: &'a mut W,
}
impl<'a> CKINV_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 14)) | (((value as u32) & 0x01) << 14);
self.w
}
}
#[doc = "Reader of field `IOLB`"]
pub type IOLB_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `IOLB`"]
pub struct IOLB_W<'a> {
w: &'a mut W,
}
impl<'a> IOLB_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 15)) | (((value as u32) & 0x01) << 15);
self.w
}
}
#[doc = "Reader of field `IODDRM`"]
pub type IODDRM_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `IODDRM`"]
pub struct IODDRM_W<'a> {
w: &'a mut W,
}
impl<'a> IODDRM_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 16)) | (((value as u32) & 0x03) << 16);
self.w
}
}
#[doc = "Reader of field `RANKEN`"]
pub type RANKEN_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `RANKEN`"]
pub struct RANKEN_W<'a> {
w: &'a mut W,
}
impl<'a> RANKEN_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x0f << 18)) | (((value as u32) & 0x0f) << 18);
self.w
}
}
#[doc = "Reader of field `ZKSEL`"]
pub type ZKSEL_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `ZKSEL`"]
pub struct ZKSEL_W<'a> {
w: &'a mut W,
}
impl<'a> ZKSEL_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 22)) | (((value as u32) & 0x03) << 22);
self.w
}
}
#[doc = "Reader of field `PDDISDX`"]
pub type PDDISDX_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `PDDISDX`"]
pub struct PDDISDX_W<'a> {
w: &'a mut W,
}
impl<'a> PDDISDX_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 24)) | (((value as u32) & 0x01) << 24);
self.w
}
}
#[doc = "Reader of field `RFSHDT`"]
pub type RFSHDT_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `RFSHDT`"]
pub struct RFSHDT_W<'a> {
w: &'a mut W,
}
impl<'a> RFSHDT_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x0f << 25)) | (((value as u32) & 0x0f) << 25);
self.w
}
}
#[doc = "Reader of field `LBDQSS`"]
pub type LBDQSS_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `LBDQSS`"]
pub struct LBDQSS_W<'a> {
w: &'a mut W,
}
impl<'a> LBDQSS_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 29)) | (((value as u32) & 0x01) << 29);
self.w
}
}
#[doc = "Reader of field `LBGDQS`"]
pub type LBGDQS_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `LBGDQS`"]
pub struct LBGDQS_W<'a> {
w: &'a mut W,
}
impl<'a> LBGDQS_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 30)) | (((value as u32) & 0x01) << 30);
self.w
}
}
#[doc = "Reader of field `LBMODE`"]
pub type LBMODE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `LBMODE`"]
pub struct LBMODE_W<'a> {
w: &'a mut W,
}
impl<'a> LBMODE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 31)) | (((value as u32) & 0x01) << 31);
self.w
}
}
impl R {
#[doc = "Bit 0 - ITMDMD"]
#[inline(always)]
pub fn itmdmd(&self) -> ITMDMD_R {
ITMDMD_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - DQSCFG"]
#[inline(always)]
pub fn dqscfg(&self) -> DQSCFG_R {
DQSCFG_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - DFTCMP"]
#[inline(always)]
pub fn dftcmp(&self) -> DFTCMP_R {
DFTCMP_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bits 3:4 - DFTLMT"]
#[inline(always)]
pub fn dftlmt(&self) -> DFTLMT_R {
DFTLMT_R::new(((self.bits >> 3) & 0x03) as u8)
}
#[doc = "Bits 5:8 - DTOSEL"]
#[inline(always)]
pub fn dtosel(&self) -> DTOSEL_R {
DTOSEL_R::new(((self.bits >> 5) & 0x0f) as u8)
}
#[doc = "Bits 9:11 - CKEN"]
#[inline(always)]
pub fn cken(&self) -> CKEN_R {
CKEN_R::new(((self.bits >> 9) & 0x07) as u8)
}
#[doc = "Bits 12:13 - CKDV"]
#[inline(always)]
pub fn ckdv(&self) -> CKDV_R {
CKDV_R::new(((self.bits >> 12) & 0x03) as u8)
}
#[doc = "Bit 14 - CKINV"]
#[inline(always)]
pub fn ckinv(&self) -> CKINV_R {
CKINV_R::new(((self.bits >> 14) & 0x01) != 0)
}
#[doc = "Bit 15 - IOLB"]
#[inline(always)]
pub fn iolb(&self) -> IOLB_R {
IOLB_R::new(((self.bits >> 15) & 0x01) != 0)
}
#[doc = "Bits 16:17 - IODDRM"]
#[inline(always)]
pub fn ioddrm(&self) -> IODDRM_R {
IODDRM_R::new(((self.bits >> 16) & 0x03) as u8)
}
#[doc = "Bits 18:21 - RANKEN"]
#[inline(always)]
pub fn ranken(&self) -> RANKEN_R {
RANKEN_R::new(((self.bits >> 18) & 0x0f) as u8)
}
#[doc = "Bits 22:23 - ZKSEL"]
#[inline(always)]
pub fn zksel(&self) -> ZKSEL_R {
ZKSEL_R::new(((self.bits >> 22) & 0x03) as u8)
}
#[doc = "Bit 24 - PDDISDX"]
#[inline(always)]
pub fn pddisdx(&self) -> PDDISDX_R {
PDDISDX_R::new(((self.bits >> 24) & 0x01) != 0)
}
#[doc = "Bits 25:28 - RFSHDT"]
#[inline(always)]
pub fn rfshdt(&self) -> RFSHDT_R {
RFSHDT_R::new(((self.bits >> 25) & 0x0f) as u8)
}
#[doc = "Bit 29 - LBDQSS"]
#[inline(always)]
pub fn lbdqss(&self) -> LBDQSS_R {
LBDQSS_R::new(((self.bits >> 29) & 0x01) != 0)
}
#[doc = "Bit 30 - LBGDQS"]
#[inline(always)]
pub fn lbgdqs(&self) -> LBGDQS_R {
LBGDQS_R::new(((self.bits >> 30) & 0x01) != 0)
}
#[doc = "Bit 31 - LBMODE"]
#[inline(always)]
pub fn lbmode(&self) -> LBMODE_R {
LBMODE_R::new(((self.bits >> 31) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - ITMDMD"]
#[inline(always)]
pub fn itmdmd(&mut self) -> ITMDMD_W {
ITMDMD_W { w: self }
}
#[doc = "Bit 1 - DQSCFG"]
#[inline(always)]
pub fn dqscfg(&mut self) -> DQSCFG_W {
DQSCFG_W { w: self }
}
#[doc = "Bit 2 - DFTCMP"]
#[inline(always)]
pub fn dftcmp(&mut self) -> DFTCMP_W {
DFTCMP_W { w: self }
}
#[doc = "Bits 3:4 - DFTLMT"]
#[inline(always)]
pub fn dftlmt(&mut self) -> DFTLMT_W {
DFTLMT_W { w: self }
}
#[doc = "Bits 5:8 - DTOSEL"]
#[inline(always)]
pub fn dtosel(&mut self) -> DTOSEL_W {
DTOSEL_W { w: self }
}
#[doc = "Bits 9:11 - CKEN"]
#[inline(always)]
pub fn cken(&mut self) -> CKEN_W {
CKEN_W { w: self }
}
#[doc = "Bits 12:13 - CKDV"]
#[inline(always)]
pub fn ckdv(&mut self) -> CKDV_W {
CKDV_W { w: self }
}
#[doc = "Bit 14 - CKINV"]
#[inline(always)]
pub fn ckinv(&mut self) -> CKINV_W {
CKINV_W { w: self }
}
#[doc = "Bit 15 - IOLB"]
#[inline(always)]
pub fn iolb(&mut self) -> IOLB_W {
IOLB_W { w: self }
}
#[doc = "Bits 16:17 - IODDRM"]
#[inline(always)]
pub fn ioddrm(&mut self) -> IODDRM_W {
IODDRM_W { w: self }
}
#[doc = "Bits 18:21 - RANKEN"]
#[inline(always)]
pub fn ranken(&mut self) -> RANKEN_W {
RANKEN_W { w: self }
}
#[doc = "Bits 22:23 - ZKSEL"]
#[inline(always)]
pub fn zksel(&mut self) -> ZKSEL_W {
ZKSEL_W { w: self }
}
#[doc = "Bit 24 - PDDISDX"]
#[inline(always)]
pub fn pddisdx(&mut self) -> PDDISDX_W {
PDDISDX_W { w: self }
}
#[doc = "Bits 25:28 - RFSHDT"]
#[inline(always)]
pub fn rfshdt(&mut self) -> RFSHDT_W {
RFSHDT_W { w: self }
}
#[doc = "Bit 29 - LBDQSS"]
#[inline(always)]
pub fn lbdqss(&mut self) -> LBDQSS_W {
LBDQSS_W { w: self }
}
#[doc = "Bit 30 - LBGDQS"]
#[inline(always)]
pub fn lbgdqs(&mut self) -> LBGDQS_W {
LBGDQS_W { w: self }
}
#[doc = "Bit 31 - LBMODE"]
#[inline(always)]
pub fn lbmode(&mut self) -> LBMODE_W {
LBMODE_W { w: self }
}
}
|
use crate::properties;
use serde::Deserialize;
#[derive(Debug, Deserialize)]
pub struct MultiDimensionalKeyframed {
#[serde(rename = "k")]
pub keyframes: Vec<properties::OffsetKeyframe>,
#[serde(rename = "x")]
pub expression: Option<String>,
#[serde(rename = "ix")]
pub index: i64,
}
|
#[derive(Debug)]
struct Rectangle {
length : u32,
width : u32
}
impl Rectangle {
fn area_impl(&self)->u32{
self.length * self.width
}
fn get_length(&self)->u32{
self.length
}
}
fn main() {
//calculate the area of rectangle without using struct
let tate : u32 = 5 ;
let yoko : u32 = 6 ;
let rectangle = area(tate,yoko);
println!("the area of rectangle:{}",rectangle);
//using tuple
let Rec2 = (5,6);
println!("the area of Rec2:{}",area_(Rec2));
//using struct
let Rec1 = Rectangle{length : 5,width : 6};
println!("the area of Rec1:{}",area(Rec1.length,Rec1.width));
//using struct and calculating by reference of instance
let Rec3 = Rectangle{length:5,width:6};
println!("the area of Rec3:{}",area_reference(&Rec3));
//using the Rectangle's method that is declared by impl
let Rec4 = Rectangle{length : 5,width : 6};
println!("{}",Rec4.area_impl());
println!("length:{}",Rec4.get_length());
}
fn area(length:u32,width:u32)->u32{
length * width
}
fn area_(dimensions:(u32,u32)) -> u32{
dimensions.0 * dimensions.1
}
fn area_reference(rectangle : &Rectangle)->u32 {
rectangle.length * rectangle.width
}
|
extern crate ansi_term;
macro_rules! color_print
{
($color: expr, $($x: expr),*) =>
{
println!("{}", $color.paint(format!($($x),*)));
};
}
macro_rules! color_printnoln
{
($color: expr, $($x: expr),*) =>
{
print!("{}", $color.paint(format!($($x),*)));
};
}
#[cfg(feature="cpu_log")]
macro_rules! cpu_print
{
($y: expr) => {color_print!($crate::ansi_term::Colour::Green, concat!("[CPU] ", $y))};
($y: expr, $($x: expr),*) =>
{
color_print!($crate::ansi_term::Colour::Green, concat!("[CPU] ", $y), $($x),*)
};
}
#[cfg(not(feature="cpu_log"))]
macro_rules! cpu_print {($($x: expr),*) => {()};}
macro_rules! disas_print
{
($y: expr) => {color_print!($crate::ansi_term::Colour::Blue, concat!("[DISAS] ", $y))};
($y: expr, $($x: expr),*) =>
{
color_print!($crate::ansi_term::Colour::Blue, concat!("[DISAS] ", $y), $($x),*);
};
}
#[cfg(feature="bios_log")]
macro_rules! bios_print
{
($y: expr) => {color_print!($crate::ansi_term::Colour::Yellow, concat!("[BIOS] ", $y))};
($y: expr, $($x: expr),*) =>
{
color_print!($crate::ansi_term::Colour::Yellow, concat!("[BIOS] ", $y), $($x),*);
};
}
#[cfg(not(feature="bios_log"))]
macro_rules! bios_print {($($x: expr),*) => {()};}
#[cfg(feature="storage_log")]
macro_rules! storage_print
{
($y: expr) => {color_print!($crate::ansi_term::Colour::Cyan, concat!("[STORAGE] ", $y))};
($y: expr, $($x: expr),*) =>
{
color_print!($crate::ansi_term::Colour::Cyan, concat!("[STORAGE] ", $y), $($x),*);
};
}
#[cfg(not(feature="storage_log"))]
macro_rules! storage_print {($($x: expr),*) => {()};}
macro_rules! debug_print
{
($y: expr) => {color_print!($crate::ansi_term::Colour::Purple, concat!("[DEBUG] ", $y))};
($y: expr, $($x: expr),*) =>
{
color_print!($crate::ansi_term::Colour::Purple, concat!("[DEBUG] ", $y), $($x),*);
};
}
#[cfg(feature="serial_log")]
macro_rules! serial_print
{
($y: expr) => {color_print!($crate::ansi_term::Colour::Fixed(100), concat!("[SERIAL] ", $y))};
($y: expr, $($x: expr),*) =>
{
color_print!($crate::ansi_term::Colour::Fixed(100), concat!("[SERIAL] ", $y), $($x),*);
};
}
#[cfg(not(feature="serial_log"))]
macro_rules! serial_print {($($x: expr),*) => {()};}
#[cfg(feature="mouse_log")]
macro_rules! mouse_print
{
($y: expr) => {color_print!($crate::ansi_term::Colour::Fixed(120), concat!("[MOUSE] ", $y))};
($y: expr, $($x: expr),*) =>
{
color_print!($crate::ansi_term::Colour::Fixed(120), concat!("[MOUSE] ", $y), $($x),*);
};
}
#[cfg(not(feature="mouse_log"))]
macro_rules! mouse_print {($($x: expr),*) => {()};}
#[cfg(feature="keyboard_log")]
macro_rules! keyboard_print
{
($y: expr) => {color_print!($crate::ansi_term::Colour::Fixed(140), concat!("[KEYBOARD] ", $y))};
($y: expr, $($x: expr),*) =>
{
color_print!($crate::ansi_term::Colour::Fixed(140), concat!("[KEYBOARD] ", $y), $($x),*);
};
}
#[cfg(not(feature="keyboard_log"))]
macro_rules! keyboard_print {($($x: expr),*) => {()};}
#[cfg(feature="display_log")]
macro_rules! display_print
{
($y: expr) => {color_print!($crate::ansi_term::Colour::Fixed(160), concat!("[DISPLAY] ", $y))};
($y: expr, $($x: expr),*) =>
{
color_print!($crate::ansi_term::Colour::Fixed(160), concat!("[DISPLAY] ", $y), $($x),*);
};
}
#[cfg(not(feature="display_log"))]
macro_rules! display_print {($($x: expr),*) => {()};}
#[cfg(feature="machine_log")]
macro_rules! machine_print
{
($y: expr) => {color_print!($crate::ansi_term::Colour::Fixed(180), concat!("[MACHINE] ", $y))};
($y: expr, $($x: expr),*) =>
{
color_print!($crate::ansi_term::Colour::Fixed(180), concat!("[MACHINE] ", $y), $($x),*);
};
}
#[cfg(not(feature="machine_log"))]
macro_rules! machine_print {($($x: expr),*) => {()};} |
use validaten::creditcard;
fn main() {
println!("4844161459546175 => {:?}", creditcard::which_card("4844161459546175"));
println!("6796265520244 => {:?}", creditcard::which_card("6796265520244"));
println!("6007221111111110 => {:?}", creditcard::which_card("6007221111111110"));
println!("5019118545073184 => {:?}", creditcard::which_card("5019118545073184"));
println!("4035300539804083 => {:?}", creditcard::which_card("4035300539804083"));
println!("5463113589982388 => {:?}", creditcard::which_card("5463113589982388"));
println!("370789709084107 => {:?}", creditcard::which_card("370789709084107"));
println!("3022143741431999 => {:?}", creditcard::which_card("3022143741431999"));
println!("30043277253245 => {:?}", creditcard::which_card("30043277253245"));
println!("3860847190349 => {:?}", creditcard::which_card("3860847190349"));
println!("6011575126600688 => {:?}", creditcard::which_card("6011575126600688"));
println!("62600094752489242 => {:?}", creditcard::which_card("62600094752489242"));
println!("3588337499926343 => {:?}", creditcard::which_card("3588337499926343"));
} |
#[derive(Debug)]
enum Ops {
AND,
OR,
NOT,
LSHIFT,
RSHIFT,
}
fn parse(line: &str) -> Vec<&str> {
let line = line.replace(" -> ", "|");
let statements: Vec<&str> = line.split('|').collect();
statements
}
fn get_tokens(line: &str) {
let tokens: Vec<&str> = line.split(' ').collect();
for (i, token) in tokens.iter().enumerate() {
if token.parse::<i32>().is_ok() {
let value = token.parse::<i32>().unwrap();
dbg!(value);
} else if token.len() == 1 {
let var = token.chars().next().unwrap();
dbg!(var);
} else if token == &"->" {
let dest = token;
let dest_var = &tokens.get(i + 1).expect("no destination variable");
dbg!(dest, dest_var);
} else {
let op = match token {
&"AND" => Some(Ops::AND),
&"OR" => Some(Ops::OR),
&"NOT" => Some(Ops::NOT),
&"LSHIFT" => Some(Ops::LSHIFT),
&"RSHIFT" => Some(Ops::RSHIFT),
_ => None,
};
dbg!(op.unwrap());
}
}
}
fn process(a: i32, b: i32, op: Ops) -> i32 {
match op {
Ops::AND => a & b,
Ops::OR => a | b,
Ops::NOT => ! a,
Ops::LSHIFT => a >> b,
Ops::RSHIFT => a << b,
}
}
fn main() {
let input = String::from("123 -> x
456 -> y
x AND y -> d
x OR y -> e
x LSHIFT 2 -> f
y RSHIFT 2 -> g
NOT x -> h
NOT y -> i");
for line in input.lines() {
let statements = parse(line);
// let tokens = get_tokens(line);
}
}
|
use crate::gc::Gc;
use crate::rerrs::{ErrorKind, SteelErr};
use crate::rvals::{Result, SteelVal};
use crate::stop;
use im_rc::HashMap;
use crate::primitives::ListOperations;
use crate::primitives::VectorOperations;
use crate::primitives::utils::SliceExt;
pub struct HashMapOperations {}
impl HashMapOperations {
pub fn hm_construct() -> SteelVal {
SteelVal::FuncV(|args: &[SteelVal]| -> Result<SteelVal> {
let mut hm = HashMap::new();
let mut arg_iter = args.iter().cloned();
loop {
match (arg_iter.next(), arg_iter.next()) {
(Some(key), Some(value)) => {
if key.is_hashable() {
hm.insert(key, value);
} else {
stop!(TypeMismatch => "hash key not hashable!");
}
}
(None, None) => break,
_ => {
stop!(ArityMismatch => "hash map must have a value for every key!");
}
}
}
Ok(SteelVal::HashMapV(Gc::new(hm)))
})
}
pub fn hm_insert() -> SteelVal {
SteelVal::FuncV(|args: &[SteelVal]| -> Result<SteelVal> {
if args.len() != 3 {
stop!(ArityMismatch => "hm insert takes 3 arguments")
}
let hashmap = args.get_clone(0);
let key = args.get_clone(1);
let value = args.get_clone(2);
if let SteelVal::HashMapV(hm) = hashmap {
let mut hm = hm.unwrap();
if key.is_hashable() {
hm.insert(key, value);
} else {
stop!(TypeMismatch => "hash key not hashable!");
}
Ok(SteelVal::HashMapV(Gc::new(hm)))
} else {
stop!(TypeMismatch => "hm insert takes a hashmap")
}
})
}
pub fn hm_get() -> SteelVal {
SteelVal::FuncV(|args: &[SteelVal]| -> Result<SteelVal> {
if args.len() != 2 {
stop!(ArityMismatch => "hm get takes 2 arguments")
}
let hashmap = &args[0];
let key = &args[1];
if let SteelVal::HashMapV(hm) = hashmap {
match hm.get(key) {
Some(v) => Ok(v.clone()),
None => stop!(Generic => "hash map key not found!"),
}
} else {
stop!(TypeMismatch => "hm-insert takes a hashmap")
}
})
}
pub fn hm_try_get() -> SteelVal {
SteelVal::FuncV(|args: &[SteelVal]| -> Result<SteelVal> {
if args.len() != 2 {
stop!(ArityMismatch => "hm get takes 2 arguments")
}
let hashmap = &args[0];
let key = &args[1];
if let SteelVal::HashMapV(hm) = hashmap {
match hm.get(key) {
Some(v) => Ok(v.clone()),
None => Ok(SteelVal::BoolV(false)),
}
} else {
stop!(TypeMismatch => "hm-insert takes a hashmap")
}
})
}
pub fn hm_length() -> SteelVal {
SteelVal::FuncV(|args: &[SteelVal]| -> Result<SteelVal> {
if args.len() != 1 {
stop!(ArityMismatch => "hm-length takes 1 argument")
}
let hashmap = &args[0];
if let SteelVal::HashMapV(hm) = hashmap {
Ok(SteelVal::IntV(hm.len() as isize))
} else {
stop!(TypeMismatch => "hm-length takes a hashmap")
}
})
}
pub fn hm_contains() -> SteelVal {
SteelVal::FuncV(|args: &[SteelVal]| -> Result<SteelVal> {
if args.len() != 2 {
stop!(ArityMismatch => "hm-contains? get takes 2 arguments")
}
let hashmap = &args[0];
let key = &args[1];
if let SteelVal::HashMapV(hm) = hashmap {
if key.is_hashable() {
if hm.contains_key(key) {
Ok(SteelVal::BoolV(true))
} else {
Ok(SteelVal::BoolV(false))
}
} else {
stop!(TypeMismatch => "hash key not hashable!");
}
} else {
stop!(TypeMismatch => "hm-contains? takes a hashmap")
}
})
}
// keys as list
pub fn keys_to_list() -> SteelVal {
SteelVal::FuncV(|args: &[SteelVal]| -> Result<SteelVal> {
if args.len() != 1 {
stop!(ArityMismatch => "hm-keys->list takes 1 argument")
}
let hashmap = &args[0];
if let SteelVal::HashMapV(hm) = hashmap {
let keys = hm.keys().cloned().collect::<Vec<SteelVal>>();
ListOperations::built_in_list_func_flat(&keys)
} else {
stop!(TypeMismatch => "hm-keys->list takes a hashmap")
}
})
}
// values as list
pub fn values_to_list() -> SteelVal {
SteelVal::FuncV(|args: &[SteelVal]| -> Result<SteelVal> {
if args.len() != 1 {
stop!(ArityMismatch => "hm-values->list takes 1 argument")
}
let hashmap = &args[0];
if let SteelVal::HashMapV(hm) = hashmap {
let keys = hm.values().cloned().collect::<Vec<SteelVal>>();
ListOperations::built_in_list_func_flat(&keys)
} else {
stop!(TypeMismatch => "hm-values->list takes a hashmap")
}
})
}
// keys as vectors
pub fn keys_to_vector() -> SteelVal {
SteelVal::FuncV(|args: &[SteelVal]| -> Result<SteelVal> {
if args.len() != 1 {
stop!(ArityMismatch => "hm-keys->vector takes 1 argument")
}
let hashmap = &args[0];
if let SteelVal::HashMapV(hm) = hashmap {
VectorOperations::vec_construct_iter_normal(hm.keys().cloned())
} else {
stop!(TypeMismatch => "hm-keys->vector takes a hashmap")
}
})
}
pub fn values_to_vector() -> SteelVal {
SteelVal::FuncV(|args: &[SteelVal]| -> Result<SteelVal> {
if args.len() != 1 {
stop!(ArityMismatch => "hm-values->vector takes 1 argument")
}
let hashmap = &args[0];
if let SteelVal::HashMapV(hm) = hashmap {
VectorOperations::vec_construct_iter_normal(hm.values().cloned())
} else {
stop!(TypeMismatch => "hm-values->vector takes a hashmap")
}
})
}
pub fn clear() -> SteelVal {
SteelVal::FuncV(|args: &[SteelVal]| -> Result<SteelVal> {
if args.len() != 1 {
stop!(ArityMismatch => "hm-clear takes 1 argument")
}
let hashmap = &args[0];
if let SteelVal::HashMapV(hm) = hashmap {
let mut hm = hm.unwrap();
hm.clear();
Ok(SteelVal::HashMapV(Gc::new(hm)))
} else {
stop!(TypeMismatch => "hm-clear takes a hashmap")
}
})
}
pub fn hm_empty() -> SteelVal {
SteelVal::FuncV(|args: &[SteelVal]| -> Result<SteelVal> {
if args.len() != 1 {
stop!(ArityMismatch => "hash-empty? takes 1 argument")
}
let hashmap = &args[0];
if let SteelVal::HashMapV(hm) = hashmap {
Ok(SteelVal::BoolV(hm.is_empty()))
} else {
stop!(TypeMismatch => "hash-empty? takes a hashmap")
}
})
}
}
#[cfg(test)]
mod hashmap_tests {
use super::*;
use crate::throw;
use im_rc::hashmap;
use crate::rvals::SteelVal::*;
fn apply_function(func: SteelVal, args: Vec<SteelVal>) -> Result<SteelVal> {
let args: Vec<SteelVal> = args.into_iter().collect();
func.func_or_else(throw!(BadSyntax => "hash tests"))
.unwrap()(&args)
}
#[test]
fn hm_construct_normal() {
let args = vec![
StringV("foo".into()),
StringV("bar".into()),
StringV("foo2".into()),
StringV("bar2".into()),
];
let res = apply_function(HashMapOperations::hm_construct(), args);
let expected = SteelVal::HashMapV(Gc::new(hashmap! {
StringV("foo".into()) => StringV("bar".into()),
StringV("foo2".into()) => StringV("bar2".into())
}));
assert_eq!(res.unwrap(), expected);
}
#[test]
fn hm_construct_with_duplicates() {
let args = vec![
StringV("foo".into()),
StringV("bar".into()),
StringV("foo2".into()),
StringV("bar2".into()),
StringV("foo".into()),
StringV("bar".into()),
StringV("foo2".into()),
StringV("bar2".into()),
];
let res = apply_function(HashMapOperations::hm_construct(), args);
let expected = SteelVal::HashMapV(Gc::new(hashmap! {
StringV("foo".into()) => StringV("bar".into()),
StringV("foo2".into()) => StringV("bar2".into())
}));
assert_eq!(res.unwrap(), expected);
}
#[test]
fn hm_insert_from_empty() {
let args = vec![
HashMapV(Gc::new(hashmap![])),
StringV("foo".into()),
StringV("bar".into()),
];
let res = apply_function(HashMapOperations::hm_insert(), args);
let expected = SteelVal::HashMapV(Gc::new(hashmap! {
StringV("foo".into()) => StringV("bar".into())
}));
assert_eq!(res.unwrap(), expected);
}
#[test]
fn hm_get_found() {
let args = vec![
HashMapV(Gc::new(hashmap! {
StringV("foo".into()) => StringV("bar".into())
})),
StringV("foo".into()),
];
let res = apply_function(HashMapOperations::hm_get(), args);
let expected = StringV("bar".into());
assert_eq!(res.unwrap(), expected);
}
#[test]
fn hm_get_error() {
let args = vec![
HashMapV(Gc::new(hashmap! {
StringV("foo".into()) => StringV("bar".into())
})),
StringV("garbage".into()),
];
let res = apply_function(HashMapOperations::hm_get(), args);
assert!(res.is_err());
}
#[test]
fn hm_try_get_found() {
let args = vec![
HashMapV(Gc::new(hashmap! {
StringV("foo".into()) => StringV("bar".into())
})),
StringV("foo".into()),
];
let res = apply_function(HashMapOperations::hm_try_get(), args);
let expected = StringV("bar".into());
assert_eq!(res.unwrap(), expected);
}
#[test]
fn hm_try_get_error() {
let args = vec![
HashMapV(Gc::new(hashmap! {
StringV("foo".into()) => StringV("bar".into())
})),
StringV("garbage".into()),
];
let res = apply_function(HashMapOperations::hm_contains(), args);
let expected = SteelVal::BoolV(false);
assert_eq!(res.unwrap(), expected);
}
#[test]
fn hm_contains_true() {
let args = vec![
HashMapV(Gc::new(hashmap! {
StringV("foo".into()) => StringV("bar".into())
})),
StringV("foo".into()),
];
let res = apply_function(HashMapOperations::hm_contains(), args);
let expected = SteelVal::BoolV(true);
assert_eq!(res.unwrap(), expected);
}
#[test]
fn hm_contains_false() {
let args = vec![
HashMapV(Gc::new(hashmap! {
StringV("foo".into()) => StringV("bar".into())
})),
StringV("bar".into()),
];
let res = apply_function(HashMapOperations::hm_contains(), args);
let expected = SteelVal::BoolV(false);
assert_eq!(res.unwrap(), expected);
}
#[test]
fn hm_keys_to_vector_normal() {
let args = vec![HashMapV(Gc::new(hashmap! {
StringV("foo".into()) => StringV("bar".into()),
StringV("bar".into()) => StringV("baz".into()),
StringV("baz".into()) => StringV("quux".into())
}))];
let res = apply_function(HashMapOperations::keys_to_vector(), args);
let expected = SteelVal::VectorV(Gc::new(
vec![
SteelVal::StringV("foo".into()),
SteelVal::StringV("bar".into()),
SteelVal::StringV("baz".into()),
]
.into_iter()
.collect(),
));
// pull out the vectors and sort them
// let unwrapped_res: SteelVal = (*res.unwrap()).clone();
// let unwrapped_expected: SteelVal = (*expected).clone();
let mut res_vec_string: Vec<Gc<String>> = if let SteelVal::VectorV(v) = res.unwrap() {
v.iter()
.map(|x| {
if let SteelVal::StringV(ref s) = x {
s.clone()
} else {
panic!("test failed")
}
})
.collect()
} else {
panic!("test failed")
};
let mut expected_vec_string: Vec<Gc<String>> = if let SteelVal::VectorV(v) = expected {
v.iter()
.map(|x| {
if let SteelVal::StringV(ref s) = x {
s.clone()
} else {
panic!("test failed")
}
})
.collect()
} else {
panic!("test failed")
};
res_vec_string.sort();
expected_vec_string.sort();
assert_eq!(res_vec_string, expected_vec_string);
}
#[test]
fn hm_values_to_vector_normal() {
let args = vec![HashMapV(Gc::new(hashmap! {
StringV("foo".into()) => StringV("bar".into()),
StringV("bar".into()) => StringV("baz".into()),
StringV("baz".into()) => StringV("quux".into())
}))];
let res = apply_function(HashMapOperations::values_to_vector(), args);
let expected = SteelVal::VectorV(Gc::new(
vec![
SteelVal::StringV("bar".into()),
SteelVal::StringV("baz".into()),
SteelVal::StringV("quux".into()),
]
.into_iter()
.collect(),
));
// pull out the vectors and sort them
let mut res_vec_string: Vec<Gc<String>> = if let SteelVal::VectorV(v) = res.unwrap() {
v.iter()
.map(|x| {
if let SteelVal::StringV(ref s) = x {
s.clone()
} else {
panic!("test failed")
}
})
.collect()
} else {
panic!("test failed")
};
let mut expected_vec_string: Vec<Gc<String>> = if let SteelVal::VectorV(v) = expected {
v.iter()
.map(|x| {
if let SteelVal::StringV(ref s) = x {
s.clone()
} else {
panic!("test failed")
}
})
.collect()
} else {
panic!("test failed")
};
res_vec_string.sort();
expected_vec_string.sort();
assert_eq!(res_vec_string, expected_vec_string);
}
}
|
extern crate time;
mod virtual_machine;
mod opcodes;
mod assembler;
mod disassemble;
mod result;
mod mem_iterator;
pub mod hardware;
pub use virtual_machine::*;
pub use opcodes::*;
pub use assembler::*;
pub use disassemble::*;
pub use result::*;
|
use std::fmt;
use std::cell::RefCell;
use std::ops::Deref;
use std::rc::Rc;
use std::sync;
use std::collections::HashMap;
use lru_cache::LruCache;
use crate::object;
use crate::transaction;
pub mod mock;
pub mod frontend;
pub mod backend;
pub mod manager;
pub mod simple_cache;
#[derive(Debug, Clone, Copy)]
pub enum ReadError {
StoreNotFound,
ObjectNotFound
}
impl fmt::Display for ReadError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
ReadError::StoreNotFound => write!(f, "StoreNotFound"),
ReadError::ObjectNotFound => write!(f, "ObjectNotFound"),
}
}
}
#[derive(Debug, Clone, Copy)]
pub enum CommitError {
InvalidPointer
}
impl fmt::Display for CommitError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
CommitError::InvalidPointer => write!(f, "InvalidPointer"),
}
}
}
#[derive(Debug, Clone, Copy)]
pub enum AllocationError {
NoSpace
}
impl fmt::Display for AllocationError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
AllocationError::NoSpace => write!(f, "NoSpace"),
}
}
}
/// Uniquely identifies a data store
///
/// StoreIds are composed of the pool UUID to which the store belongs and the index of that
/// store within the pool
#[derive(PartialEq, Eq, Clone, Copy, Debug, Hash)]
pub struct Id {
/// UUID of the storage pool this store belongs to
pub pool_uuid: uuid::Uuid,
/// Index of this store within the pool
pub pool_index: u8
}
impl fmt::Display for Id {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "StoreId({}, {})", self.pool_uuid, self.pool_index)
}
}
/// Optional component of an ObjectPointer that may be used to assist with locating an object
/// slice within a DataStore. For example, a flat-file store with fixed segment sizes could encode
/// the segment offset within a StorePointer
///
/// This wraps a Bytes instance to take advantage of both the API the bytes crate provides as well
/// as the support for inline embedding of small data within the bytes instance rather than always
/// allocating on the heap as a Vec<u8> would.
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum Pointer {
None {
pool_index: u8
},
Short {
pool_index: u8,
nbytes: u8,
content: [u8; 22]
},
Long {
pool_index: u8,
content: Vec<u8>
}
}
impl Pointer {
pub fn new(pool_index: u8, content: Option<&[u8]>) -> Pointer {
match content {
Some(c) => {
if c.len() < 22 {
let s = Pointer::Short {
pool_index,
nbytes: c.len() as u8,
content: [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
};
if let Pointer::Short {mut content, ..} = s {
content.copy_from_slice(c);
}
s
} else {
let mut v = Vec::with_capacity(c.len());
v.extend_from_slice(c);
Pointer::Long {
pool_index,
content: v
}
}
}
None => Pointer::None { pool_index }
}
}
pub fn encoded_len(&self) -> usize {
self.content_len() + 1
}
pub fn content_len(&self) -> usize {
match self {
Pointer::None{..} => 0,
Pointer::Short{nbytes, ..} => *nbytes as usize,
Pointer::Long{content, ..} => content.len()
}
}
pub fn pool_index(&self) -> u8 {
match self {
Pointer::None{pool_index, ..} => *pool_index,
Pointer::Short{pool_index, ..} => *pool_index,
Pointer::Long{pool_index, ..} => *pool_index
}
}
}
impl fmt::Display for Pointer {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Pointer::None{pool_index} => write!(f, "NoPointer(store:{})", pool_index),
Pointer::Short{pool_index, nbytes, content} => write!(f, "ShortPointer(store:{}, len:{}, hash:{})", pool_index, nbytes,
crate::util::quick_hash(&content[0 .. *nbytes as usize])),
Pointer::Long{pool_index, content} => write!(f, "LongPointer(store:{}, len:{}, hash:{})", pool_index, content.len(),
crate::util::quick_hash(&content))
}?;
Ok(())
}
}
/// Pair of the object Id and optional store pointer
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct Locater {
pub object_id: object::Id,
pub pointer: Pointer
}
#[derive(Debug, Clone, Copy)]
pub struct Crc32(pub u32);
impl fmt::Display for Crc32 {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "Crc32({})", self.0)
}
}
/// Represents the current state of an object
#[derive(Debug)]
pub struct State {
pub id: object::Id,
pub store_pointer: Pointer,
pub metadata: object::Metadata,
pub object_kind: object::Kind,
/// Used to track the number of references currently working on this object
/// the object is not allowed to exit the cache until this number drops to
/// zero. The TxStateRef smart-pointer wrapper is used to increment/decrement
/// this value.
pub transaction_references: u32,
pub locked_to_transaction: Option<transaction::Id>,
pub data: sync::Arc<Vec<u8>>,
pub max_size: Option<u32>,
pub kv_state: Option<Box<object::KVObjectState>>,
}
impl State {
pub fn kv_state(&mut self) -> Option<&mut Box<object::KVObjectState>> {
self.kv_state.as_mut()
}
pub fn commit_state(&self) -> backend::CommitState {
backend::CommitState {
id: self.id,
store_pointer: self.store_pointer.clone(),
metadata: self.metadata.clone(),
object_kind: self.object_kind,
data: self.data.clone()
}
}
}
/// Smart pointer for State objects that increment/decrement the state's
/// transaction_references attribute when they are crated/deleted. Each
/// transaction holding a reference to the object will do so though
/// an instance of this class
pub struct TxStateRef {
state: Rc<RefCell<State>>
}
impl TxStateRef {
pub fn new(state: &Rc<RefCell<State>>) -> TxStateRef {
state.borrow_mut().transaction_references += 1;
TxStateRef {
state: state.clone()
}
}
}
impl Drop for TxStateRef {
fn drop(&mut self) {
self.state.borrow_mut().transaction_references -= 1;
}
}
impl Deref for TxStateRef {
type Target = Rc<RefCell<State>>;
fn deref(&self) -> &Rc<RefCell<State>> {
&self.state
}
}
/// Public interface for object cache implementations
pub trait ObjectCache {
/// Clears the cache. Primarily intended for testing
fn clear(&mut self);
fn get(&mut self, object_id: &object::Id) -> Option<&Rc<RefCell<State>>>;
/// Inserts the given State object and optionally displaces one from
/// the cache
fn insert(&mut self, state: Rc<RefCell<State>>) -> Option<Rc<RefCell<State>>>;
/// Used only for aborted allocations
fn remove(&mut self, object_id: &object::Id);
}
pub struct UnboundedObjectCache {
cache: HashMap<object::Id, Rc<RefCell<State>>>
}
impl UnboundedObjectCache {
fn new() -> Box<dyn ObjectCache> {
Box::new( UnboundedObjectCache { cache: HashMap::new() } )
}
}
impl ObjectCache for UnboundedObjectCache {
fn clear(&mut self) {
self.cache.clear();
}
fn get(&mut self, object_id: &object::Id) -> Option<&Rc<RefCell<State>>> {
self.cache.get(object_id)
}
fn insert(&mut self, state: Rc<RefCell<State>>) -> Option<Rc<RefCell<State>>> {
self.cache.insert(state.borrow().id, state.clone())
}
fn remove(&mut self, object_id: &object::Id) {
self.cache.remove(object_id);
}
}
pub struct LruObjectCache {
cache: LruCache<object::Id, Rc<RefCell<State>>>
}
impl LruObjectCache {
fn new(size: usize) -> Box<dyn ObjectCache> {
Box::new(LruObjectCache{
cache: LruCache::new(size)
})
}
}
impl ObjectCache for LruObjectCache {
fn clear(&mut self) {
self.cache.clear();
}
fn get(&mut self, object_id: &object::Id) -> Option<&Rc<RefCell<State>>> {
match self.cache.get_mut(object_id) {
None => None,
Some(r) => Some(r)
}
}
fn insert(&mut self, state: Rc<RefCell<State>>) -> Option<Rc<RefCell<State>>> {
self.cache.insert(state.borrow().id, state.clone())
}
fn remove(&mut self, object_id: &object::Id) {
self.cache.remove(object_id);
}
}
#[derive(Debug, Clone)]
pub struct ReadState {
pub id: object::Id,
pub metadata: object::Metadata,
pub object_kind: object::Kind,
pub data: sync::Arc<Vec<u8>>,
}
|
use std::net::{TcpListener, TcpStream};
use std::thread;
use server::connection::Connection;
pub struct TestServer {
listener: TcpListener,
listen_address: String,
listen_port: u16
}
impl TestServer {
pub fn new(port: u16, address: &str) -> TestServer {
TestServer {
listener: TcpListener::bind((address as &str, port)).unwrap(),
listen_address: address.to_string(),
listen_port: port
}
}
pub fn listen(self) {
info!("Listening on host: {} port {}", self.listen_address, self.listen_port);
for stream in self.listener.incoming() {
self.new_connection(stream.unwrap());
}
}
fn new_connection(&self, stream: TcpStream) {
match stream.peer_addr() {
Ok(addr) => {
info!("Incoming connection from {}", addr);
let addr_ = addr.clone();
thread::spawn(move || {
let mut con = Connection::new(stream);
match con.handle() {
Ok(_) => info!("Connection from {} closed", addr_),
Err(x) => error!("Error while reading from connection from {}: {}", addr_, x)
};
});
},
Err(x) => {
error!("Could not retrieve peer address: {}", x)
}
}
}
} |
fn main() {
let mut s = String::from("Hello, World");
let index = get_first_word(&s);
println!("{}", index);
s.clear();
println!("{}", &s[0..2]);
println!("{}", index);
}
fn get_first_word(str: &String) -> &str {
let str_bytes = str.as_bytes();
for (index, &string_char) in str_bytes.iter().enumerate() {
if string_char == b' ' {
return &str[0..index];
}
}
&str[0..]
}
|
#[macro_export]
macro_rules! chain_cmp {
($cmp:expr) => {
$cmp
};
($cmp:expr, $res:expr) => {{
use std::cmp::Ordering::Equal;
match $cmp {
Some(Equal) => $res,
e => e,
}
}};
($cmp:expr, $res:expr, $resb:expr) => {
chain_cmp!($cmp, chain_cmp!($res, $resb))
};
($cmp:expr, $res:expr, $resb:expr, $resc:expr) => {
chain_cmp!($cmp, $res, chain_cmp!($resb, $resc))
};
($cmp:expr, $res:expr, $resb:expr, $resc:expr, $resd:expr) => {
chain_cmp!($cmp, $res, chain_cmp!($resb, $resc, $resd))
};
}
|
use async_trait::async_trait;
use common::result::Result;
use crate::domain::catalogue::Collection;
#[async_trait]
pub trait CollectionService: Sync + Send {
async fn get_by_id(&self, id: &str) -> Result<Collection>;
}
|
pub mod dummy;
pub mod flowanalysis;
#[derive(Clone, Debug, PartialEq)]
pub enum SubCommandError {
RegularExpressionInvalid,
InputFolderDoesNotExist,
}
pub trait SubCommand {
fn execute(&self) -> bool;
} |
#[doc = "Register `APB1RSTR2` reader"]
pub type R = crate::R<APB1RSTR2_SPEC>;
#[doc = "Register `APB1RSTR2` writer"]
pub type W = crate::W<APB1RSTR2_SPEC>;
#[doc = "Field `LPUART1RST` reader - Low-power UART 1 reset"]
pub type LPUART1RST_R = crate::BitReader<LPUART1RST_A>;
#[doc = "Low-power UART 1 reset\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum LPUART1RST_A {
#[doc = "1: Reset the selected module"]
Reset = 1,
}
impl From<LPUART1RST_A> for bool {
#[inline(always)]
fn from(variant: LPUART1RST_A) -> Self {
variant as u8 != 0
}
}
impl LPUART1RST_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<LPUART1RST_A> {
match self.bits {
true => Some(LPUART1RST_A::Reset),
_ => None,
}
}
#[doc = "Reset the selected module"]
#[inline(always)]
pub fn is_reset(&self) -> bool {
*self == LPUART1RST_A::Reset
}
}
#[doc = "Field `LPUART1RST` writer - Low-power UART 1 reset"]
pub type LPUART1RST_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, LPUART1RST_A>;
impl<'a, REG, const O: u8> LPUART1RST_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Reset the selected module"]
#[inline(always)]
pub fn reset(self) -> &'a mut crate::W<REG> {
self.variant(LPUART1RST_A::Reset)
}
}
#[doc = "Field `I2C4RST` reader - I2C4 reset"]
pub use LPUART1RST_R as I2C4RST_R;
#[doc = "Field `LPTIM2RST` reader - Low-power timer 2 reset"]
pub use LPUART1RST_R as LPTIM2RST_R;
#[doc = "Field `LPTIM3RST` reader - LPTIM3RST"]
pub use LPUART1RST_R as LPTIM3RST_R;
#[doc = "Field `FDCAN1RST` reader - FDCAN1RST"]
pub use LPUART1RST_R as FDCAN1RST_R;
#[doc = "Field `USBFSRST` reader - USBFSRST"]
pub use LPUART1RST_R as USBFSRST_R;
#[doc = "Field `UCPD1RST` reader - UCPD1RST"]
pub use LPUART1RST_R as UCPD1RST_R;
#[doc = "Field `I2C4RST` writer - I2C4 reset"]
pub use LPUART1RST_W as I2C4RST_W;
#[doc = "Field `LPTIM2RST` writer - Low-power timer 2 reset"]
pub use LPUART1RST_W as LPTIM2RST_W;
#[doc = "Field `LPTIM3RST` writer - LPTIM3RST"]
pub use LPUART1RST_W as LPTIM3RST_W;
#[doc = "Field `FDCAN1RST` writer - FDCAN1RST"]
pub use LPUART1RST_W as FDCAN1RST_W;
#[doc = "Field `USBFSRST` writer - USBFSRST"]
pub use LPUART1RST_W as USBFSRST_W;
#[doc = "Field `UCPD1RST` writer - UCPD1RST"]
pub use LPUART1RST_W as UCPD1RST_W;
impl R {
#[doc = "Bit 0 - Low-power UART 1 reset"]
#[inline(always)]
pub fn lpuart1rst(&self) -> LPUART1RST_R {
LPUART1RST_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - I2C4 reset"]
#[inline(always)]
pub fn i2c4rst(&self) -> I2C4RST_R {
I2C4RST_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 5 - Low-power timer 2 reset"]
#[inline(always)]
pub fn lptim2rst(&self) -> LPTIM2RST_R {
LPTIM2RST_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bit 6 - LPTIM3RST"]
#[inline(always)]
pub fn lptim3rst(&self) -> LPTIM3RST_R {
LPTIM3RST_R::new(((self.bits >> 6) & 1) != 0)
}
#[doc = "Bit 9 - FDCAN1RST"]
#[inline(always)]
pub fn fdcan1rst(&self) -> FDCAN1RST_R {
FDCAN1RST_R::new(((self.bits >> 9) & 1) != 0)
}
#[doc = "Bit 21 - USBFSRST"]
#[inline(always)]
pub fn usbfsrst(&self) -> USBFSRST_R {
USBFSRST_R::new(((self.bits >> 21) & 1) != 0)
}
#[doc = "Bit 23 - UCPD1RST"]
#[inline(always)]
pub fn ucpd1rst(&self) -> UCPD1RST_R {
UCPD1RST_R::new(((self.bits >> 23) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - Low-power UART 1 reset"]
#[inline(always)]
#[must_use]
pub fn lpuart1rst(&mut self) -> LPUART1RST_W<APB1RSTR2_SPEC, 0> {
LPUART1RST_W::new(self)
}
#[doc = "Bit 1 - I2C4 reset"]
#[inline(always)]
#[must_use]
pub fn i2c4rst(&mut self) -> I2C4RST_W<APB1RSTR2_SPEC, 1> {
I2C4RST_W::new(self)
}
#[doc = "Bit 5 - Low-power timer 2 reset"]
#[inline(always)]
#[must_use]
pub fn lptim2rst(&mut self) -> LPTIM2RST_W<APB1RSTR2_SPEC, 5> {
LPTIM2RST_W::new(self)
}
#[doc = "Bit 6 - LPTIM3RST"]
#[inline(always)]
#[must_use]
pub fn lptim3rst(&mut self) -> LPTIM3RST_W<APB1RSTR2_SPEC, 6> {
LPTIM3RST_W::new(self)
}
#[doc = "Bit 9 - FDCAN1RST"]
#[inline(always)]
#[must_use]
pub fn fdcan1rst(&mut self) -> FDCAN1RST_W<APB1RSTR2_SPEC, 9> {
FDCAN1RST_W::new(self)
}
#[doc = "Bit 21 - USBFSRST"]
#[inline(always)]
#[must_use]
pub fn usbfsrst(&mut self) -> USBFSRST_W<APB1RSTR2_SPEC, 21> {
USBFSRST_W::new(self)
}
#[doc = "Bit 23 - UCPD1RST"]
#[inline(always)]
#[must_use]
pub fn ucpd1rst(&mut self) -> UCPD1RST_W<APB1RSTR2_SPEC, 23> {
UCPD1RST_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "APB1 peripheral reset register 2\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`apb1rstr2::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`apb1rstr2::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct APB1RSTR2_SPEC;
impl crate::RegisterSpec for APB1RSTR2_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`apb1rstr2::R`](R) reader structure"]
impl crate::Readable for APB1RSTR2_SPEC {}
#[doc = "`write(|w| ..)` method takes [`apb1rstr2::W`](W) writer structure"]
impl crate::Writable for APB1RSTR2_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets APB1RSTR2 to value 0"]
impl crate::Resettable for APB1RSTR2_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
// https://doc.rust-lang.org/std/vec/struct.Vec.html#method.reverse
use std::iter;
use std::char;
fn main() {
proconio::input! {
n: usize,
mut a: [i32; n],
}
a.sort();
a.reverse();
// println!("{:?}", a);
let a = a;
let alice_sum = a.iter()
.enumerate()
.filter(|&(i, x)| i % 2 == 0)
.map(|(i, x)| x)
.sum::<i32>();
let bob_sum = a.iter()
.enumerate()
.filter(|&(i, x)| i % 2 == 1)
.map(|(i, x)| x)
.sum::<i32>();
println!("{}", alice_sum - bob_sum);
}
|
//! Online[<sup>1</sup>] variant of the EAX mode.
//!
//! # Authentication
//! Due to *AE* (authenticated encryption) nature of EAX, it is vital to verify
//! that both public (also called *associated*) and privacy-protected
//! (encrypted) data has not been tampered with.
//!
//! Because of this, it is required for the consumers to explicitly call
//! [`finish`] after the encryption/decryption operation is complete.
//! This will either return a *tag* (when encrypting) used to authenticate data
//! or a `Result` (when decrypting) that signifies whether the data is authentic,
//! which is when the resulting tag is equal to the one created during encryption.
//!
//! ## Example
//! ```
//! use eax::{Error, online::{Eax, Decrypt, Encrypt}, cipher::generic_array::GenericArray};
//! use aes::Aes256;
//!
//! let key = GenericArray::from_slice(b"an example very very secret key.");
//! let nonce = GenericArray::from_slice(b"my unique nonces"); // 128-bits; unique per message
//! let assoc = b"my associated data";
//! let plaintext = b"plaintext message";
//! let mut buffer: [u8; 17] = *plaintext;
//!
//!// Encrypt a simple message
//! let mut cipher = Eax::<Aes256, Encrypt>::with_key_and_nonce(key, nonce);
//! cipher.update_assoc(&assoc[..]);
//! cipher.encrypt(&mut buffer[..9]);
//! cipher.encrypt(&mut buffer[9..]);
//! let tag = cipher.finish();
//!
//! assert_ne!(buffer, *plaintext);
//!
//! let mut cloned = buffer;
//!
//! // Now decrypt it, using the same key and nonce
//! let mut cipher = Eax::<Aes256, Decrypt>::with_key_and_nonce(key, nonce);
//! cipher.update_assoc(&assoc[..]);
//! cipher.decrypt_unauthenticated_hazmat(&mut buffer[..5]);
//! cipher.decrypt_unauthenticated_hazmat(&mut buffer[5..10]);
//! cipher.decrypt_unauthenticated_hazmat(&mut buffer[10..]);
//! let res = cipher.finish(&tag);
//!
//! assert_eq!(res, Ok(()));
//! assert_eq!(buffer, *plaintext);
//!
//! // Decrypting the ciphertext with tampered associated data should fail
//! let mut cipher = Eax::<Aes256, Decrypt>::with_key_and_nonce(key, nonce);
//! cipher.update_assoc(b"tampered");
//! cipher.decrypt_unauthenticated_hazmat(&mut cloned);
//! let res = cipher.finish(&tag);
//!
//! assert_eq!(res, Err(Error));
//! ```
//! [<sup>1</sup>]: https://en.wikipedia.org/wiki/Online_algorithm
//! [`Eax`]: struct.Eax.html
//! [`Decrypt`]: struct.Decrypt.html
//! [`finish`]: #method.finish
use crate::{Cmac, Error, Nonce, Tag, TagSize};
use aead::consts::U16;
use cipher::{
generic_array::functional::FunctionalSequence, BlockCipher, BlockEncrypt, Key, KeyInit,
KeyIvInit, StreamCipher,
};
use cmac::Mac;
use core::marker::PhantomData;
pub use Eax as EaxOnline;
/// Marker trait denoting whether the EAX stream is used for encryption/decryption.
pub trait CipherOp {}
/// Marker struct for EAX stream used in encryption mode.
pub struct Encrypt;
impl CipherOp for Encrypt {}
/// Marker struct for EAX stream used in decryption mode.
pub struct Decrypt;
impl CipherOp for Decrypt {}
/// Online[<sup>1</sup>] variant of the EAX mode.
///
/// This type is generic to support substituting alternative cipher
/// implementations.
///
/// In contrast to [`Eax`], can be used in an online[<sup>1</sup>] fashion and
/// operates in-place.
///
/// # Authentication
/// Due to *AE* (authenticated encryption) nature of EAX, it is vital to verify
/// that both public (also called *associated*) and privacy-protected
/// (encrypted) data has not been tampered with.
///
/// Because of this, it is required for the consumers to explicitly call
/// [`finish`] after the encryption/decryption operation is complete.
/// This will either return a *tag* (when encrypting) used to authenticate data
/// or a `Result` (when decrypting) that signifies whether the data is authentic,
/// which is when the resulting tag is equal to the one created during encryption.
///
/// ## Example
/// ```
/// use eax::{Error, online::{Eax, Decrypt, Encrypt}, cipher::generic_array::GenericArray};
/// use aes::Aes256;
///
/// let key = GenericArray::from_slice(b"an example very very secret key.");
///
/// let nonce = GenericArray::from_slice(b"my unique nonces"); // 128-bits; unique per message
///
/// let assoc = b"my associated data";
/// let plaintext = b"plaintext message";
///
/// let mut buffer: [u8; 17] = *plaintext;
///
/// // Encrypt a simple message
/// let mut cipher = Eax::<Aes256, Encrypt>::with_key_and_nonce(key, nonce);
/// cipher.update_assoc(&assoc[..]);
/// cipher.encrypt(&mut buffer[..9]);
/// cipher.encrypt(&mut buffer[9..]);
/// let tag = cipher.finish();
///
/// assert_ne!(buffer, *plaintext);
///
/// let mut cloned = buffer;
///
/// // Now decrypt it, using the same key and nonce
/// let mut cipher = Eax::<Aes256, Decrypt>::with_key_and_nonce(key, nonce);
/// cipher.update_assoc(&assoc[..]);
/// cipher.decrypt_unauthenticated_hazmat(&mut buffer[..5]);
/// cipher.decrypt_unauthenticated_hazmat(&mut buffer[5..10]);
/// cipher.decrypt_unauthenticated_hazmat(&mut buffer[10..]);
/// let res = cipher.finish(&tag);
///
/// assert_eq!(res, Ok(()));
/// assert_eq!(buffer, *plaintext);
///
/// // Decrypting the ciphertext with tampered associated data should fail
/// let mut cipher = Eax::<Aes256, Decrypt>::with_key_and_nonce(key, nonce);
///
/// cipher.update_assoc(b"tampered");
/// cipher.decrypt_unauthenticated_hazmat(&mut cloned);
/// let res = cipher.finish(&tag);
///
/// assert_eq!(res, Err(Error));
/// ```
///
/// [<sup>1</sup>]: https://en.wikipedia.org/wiki/Online_algorithm
/// [`Eax`]: ../struct.Eax.html
/// [`Decrypt`]: struct.Decrypt.html
/// [`finish`]: #method.finish
pub struct Eax<Cipher, Op, M = U16>
where
Cipher: BlockCipher<BlockSize = U16> + BlockEncrypt + Clone + KeyInit,
Op: CipherOp,
M: TagSize,
{
imp: EaxImpl<Cipher, M>,
/// Denotes whether this stream is used for encryption or decryption.
marker: PhantomData<Op>,
}
impl<Cipher, Op, M> Eax<Cipher, Op, M>
where
Cipher: BlockCipher<BlockSize = U16> + BlockEncrypt + Clone + KeyInit,
Op: CipherOp,
M: TagSize,
{
/// Creates a stateful EAX instance that is capable of processing both
/// the associated data and the plaintext in an "on-line" fashion.
pub fn with_key_and_nonce(key: &Key<Cipher>, nonce: &Nonce<Cipher::BlockSize>) -> Self {
let imp = EaxImpl::<Cipher, M>::with_key_and_nonce(key, nonce);
Self {
imp,
marker: PhantomData,
}
}
/// Process the associated data (AD).
#[inline]
pub fn update_assoc(&mut self, aad: &[u8]) {
self.imp.update_assoc(aad);
}
/// Derives the tag from the encrypted/decrypted message so far.
///
/// If the encryption/decryption operation is finished, [`finish`] method
/// *must* be called instead.
///
///[`finish`]: #method.finish
#[inline]
pub fn tag_clone(&self) -> Tag<M> {
self.imp.tag_clone()
}
}
impl<Cipher, M> Eax<Cipher, Encrypt, M>
where
Cipher: BlockCipher<BlockSize = U16> + BlockEncrypt + Clone + KeyInit,
M: TagSize,
{
/// Applies encryption to the plaintext.
#[inline]
pub fn encrypt(&mut self, msg: &mut [u8]) {
self.imp.encrypt(msg)
}
/// Finishes the encryption stream, returning the derived tag.
///
/// This *must* be called after the stream encryption is finished.
#[must_use = "tag must be saved to later verify decrypted data"]
#[inline]
pub fn finish(self) -> Tag<M> {
self.imp.tag()
}
}
impl<Cipher, M> Eax<Cipher, Decrypt, M>
where
Cipher: BlockCipher<BlockSize = U16> + BlockEncrypt + Clone + KeyInit,
M: TagSize,
{
/// Applies decryption to the ciphertext **without** verifying the
/// authenticity of decrypted message.
///
/// To correctly verify the authenticity, use the [`finish`] associated
/// function.
///
/// # ☣️ BEWARE! ☣️
/// This is a low-level operation that simultaneously decrypts the data and
/// calculates an intermediate tag used to verify the authenticity of the
/// data (used when the online decryption is finished).
///
/// Because this is exposed solely as a building block operation, an extra
/// care must be taken when using this function.
///
/// Specifically, when misused this may be vulnerable to a chosen-ciphertext
/// attack (IND-CCA). Due to online nature of this function, the decryption
/// and partial tag calculation is done simultaneously, per chunk.
/// An attacker might choose ciphertexts to be decrypted and, while the
/// final decryption will fail because the attacker can't calculate tag
/// authenticating the message, obtained decryptions may leak information
/// about the decryption scheme (e.g. leaking parts of the secret key).
///
/// [`finish`]: #method.finish
#[inline]
pub fn decrypt_unauthenticated_hazmat(&mut self, msg: &mut [u8]) {
self.imp.decrypt(msg)
}
/// Finishes the decryption stream, verifying whether the associated and
/// decrypted data stream has not been tampered with.
///
/// This *must* be called after the stream decryption is finished.
#[must_use = "decrypted data stream must be verified for authenticity"]
pub fn finish(self, expected: &Tag<M>) -> Result<(), Error> {
self.imp.verify_ct(expected)
}
}
/// Implementation of the raw EAX operations.
///
/// Main reason behind extracting the logic to a single, separate type is to
/// facilitate testing of the internal logic.
#[doc(hidden)]
struct EaxImpl<Cipher, M>
where
Cipher: BlockCipher<BlockSize = U16> + BlockEncrypt + Clone + KeyInit,
M: TagSize,
{
nonce: Nonce<Cipher::BlockSize>,
data: Cmac<Cipher>,
message: Cmac<Cipher>,
ctr: ctr::Ctr128BE<Cipher>,
// HACK: Needed for the test harness due to AEAD trait online/offline interface mismatch
#[cfg(test)]
key: Key<Cipher>,
_tag_size: PhantomData<M>,
}
impl<Cipher, M> EaxImpl<Cipher, M>
where
Cipher: BlockCipher<BlockSize = U16> + BlockEncrypt + Clone + KeyInit,
M: TagSize,
{
/// Creates a stateful EAX instance that is capable of processing both
/// the associated data and the plaintext in an "on-line" fashion.
fn with_key_and_nonce(key: &Key<Cipher>, nonce: &Nonce<Cipher::BlockSize>) -> Self {
let prepend_cmac = |key, init_val, data| {
let mut cmac = <Cmac<Cipher> as Mac>::new(key);
cmac.update(&[0; 15]);
cmac.update(&[init_val]);
cmac.update(data);
cmac
};
// https://crypto.stackexchange.com/questions/26948/eax-cipher-mode-with-nonce-equal-header
// has an explanation of eax.
// l = block cipher size = 128 (for AES-128) = 16 byte
// 1. n ← OMAC(0 || Nonce)
// (the 0 means the number zero in l bits)
let n = prepend_cmac(key, 0, nonce);
let n = n.finalize().into_bytes();
// NOTE: These can be updated online later
// 2. h ← OMAC(1 || associated data)
let h = prepend_cmac(key, 1, &[]);
// 3. c ← OMAC(2 || enc)
let c = prepend_cmac(key, 2, &[]);
let cipher = ctr::Ctr128BE::<Cipher>::new(key, &n);
Self {
nonce: n,
data: h,
message: c,
ctr: cipher,
#[cfg(test)]
key: key.clone(),
_tag_size: Default::default(),
}
}
/// Process the associated data (AD).
#[inline]
pub fn update_assoc(&mut self, aad: &[u8]) {
self.data.update(aad);
}
/// Applies encryption to the plaintext.
#[inline]
fn encrypt(&mut self, msg: &mut [u8]) {
self.ctr.apply_keystream(msg);
self.message.update(msg);
}
/// Applies decryption to the ciphertext.
#[inline]
fn decrypt(&mut self, msg: &mut [u8]) {
self.message.update(msg);
self.ctr.apply_keystream(msg);
}
/// Derives the tag from the encrypted/decrypted message so far.
#[inline]
fn tag(self) -> Tag<M> {
let h = self.data.finalize().into_bytes();
let c = self.message.finalize().into_bytes();
let full_tag = self.nonce.zip(h, |a, b| a ^ b).zip(c, |a, b| a ^ b);
Tag::<M>::clone_from_slice(&full_tag[..M::to_usize()])
}
/// Derives the tag from the encrypted/decrypted message so far.
#[inline]
fn tag_clone(&self) -> Tag<M> {
let h = self.data.clone().finalize().into_bytes();
let c = self.message.clone().finalize().into_bytes();
let full_tag = self.nonce.zip(h, |a, b| a ^ b).zip(c, |a, b| a ^ b);
Tag::<M>::clone_from_slice(&full_tag[..M::to_usize()])
}
/// Finishes the decryption stream, verifying whether the associated and
/// decrypted data stream has not been tampered with.
fn verify_ct(self, expected: &Tag<M>) -> Result<(), Error> {
// Check MAC using secure comparison
use subtle::ConstantTimeEq;
let resulting_tag = &self.tag()[..expected.len()];
if resulting_tag.ct_eq(expected).into() {
Ok(())
} else {
Err(Error)
}
}
}
// Because the current AEAD test harness expects the types to implement both
// `KeyInit` and `AeadMutInPlace` traits, do so here so that we can test the
// internal logic used by the public interface for the online EAX variant.
// These are not publicly implemented in general, because the traits are
// designed for offline usage and are somewhat wasteful when used in online mode.
#[cfg(test)]
mod test_impl {
use super::*;
use aead::{
consts::U0, generic_array::GenericArray, AeadCore, AeadMutInPlace, KeyInit, KeySizeUser,
};
impl<Cipher, M> KeySizeUser for EaxImpl<Cipher, M>
where
Cipher: BlockCipher<BlockSize = U16> + BlockEncrypt + Clone + KeyInit,
M: TagSize,
{
type KeySize = Cipher::KeySize;
}
impl<Cipher, M> KeyInit for EaxImpl<Cipher, M>
where
Cipher: BlockCipher<BlockSize = U16> + BlockEncrypt + Clone + KeyInit,
M: TagSize,
{
fn new(key: &Key<Cipher>) -> Self {
// HACK: The nonce will be initialized by the appropriate
// decrypt/encrypt functions from `AeadMutInPlace` implementation.
// This is currently done so because that trait only implements
// offline operations and thus need to re-initialize the `EaxImpl`
// instance.
let nonce = GenericArray::default();
Self::with_key_and_nonce(key, &nonce)
}
}
impl<Cipher, M> AeadCore for super::EaxImpl<Cipher, M>
where
Cipher: BlockCipher<BlockSize = U16> + BlockEncrypt + Clone + KeyInit,
M: TagSize,
{
type NonceSize = Cipher::BlockSize;
type TagSize = M;
type CiphertextOverhead = U0;
}
impl<Cipher, M> AeadMutInPlace for super::EaxImpl<Cipher, M>
where
Cipher: BlockCipher<BlockSize = U16> + BlockEncrypt + Clone + KeyInit,
M: TagSize,
{
fn encrypt_in_place_detached(
&mut self,
nonce: &Nonce<Self::NonceSize>,
associated_data: &[u8],
buffer: &mut [u8],
) -> Result<Tag<M>, Error> {
// HACK: Reinitialize the instance
*self = Self::with_key_and_nonce(&self.key.clone(), nonce);
self.update_assoc(associated_data);
self.encrypt(buffer);
Ok(self.tag_clone())
}
fn decrypt_in_place_detached(
&mut self,
nonce: &Nonce<Self::NonceSize>,
associated_data: &[u8],
buffer: &mut [u8],
expected_tag: &Tag<M>,
) -> Result<(), Error> {
// HACK: Reinitialize the instance
*self = Self::with_key_and_nonce(&self.key.clone(), nonce);
self.update_assoc(associated_data);
self.decrypt(buffer);
let tag = self.tag_clone();
// Check mac using secure comparison
use subtle::ConstantTimeEq;
if expected_tag.ct_eq(&tag).into() {
Ok(())
} else {
Err(Error)
}
}
}
}
|
extern crate portmidi;
#[test]
fn test_midiin() {
let result = portmidi::initialize();
assert_eq!(result, Ok(()));
let nbdevice = portmidi::count_devices();
println!("portmidi nb device {:?}", nbdevice);
let defdevin = portmidi::get_default_input_device_id().unwrap();
println!("portmidi default input device {:?}", defdevin);
let defdevout = portmidi::get_default_output_device_id().unwrap();
println!("portmidi default output device {:?}", defdevout);
let ininfo = portmidi::get_device_info(defdevin);
println!("portmidi default input device info {:?}", ininfo);
let outinfo = portmidi::get_device_info(defdevout);
println!("portmidi default output device info {:?}", outinfo);
let mut inport = portmidi::InputPort::new(defdevin, 0);
let result = inport.open();
assert_eq!(result, Ok(()));
let mut outport = portmidi::OutputPort::new(defdevout, 100);
let result = outport.open();
assert_eq!(result, Ok(()));
let read_midi = inport.read();
println!("portmidi input note {:?}", read_midi);
match read_midi {
Ok(Some(notes)) => println!("portmidi read midi note {:?}", notes),
Ok(None) => println!("portmidi read midi no note"),
Err(err) => println!("portmidi read midi error {:?}", err)
}
let result = inport.poll();
assert_eq!(result, Ok(false));
//send note
let note1 = portmidi::MidiEvent {
message : portmidi::MidiMessage {
status : 1 | 0x90, //chanell and note on
data1 : 36, //note number
data2 : 90, // velocity
},
timestamp : 0
};
let result = outport.write_event(note1);
assert_eq!(result, Ok(()));
let note2 = portmidi::MidiMessage {
status : 1 | 0x80, //chanell and note off
data1 : 36, //note number
data2 : 0, // velocity
};
let result = outport.write_message(note2);
assert_eq!(result, Ok(()));
//close out port
let result = outport.close();
assert_eq!(result, Ok(()));
//close in port
let result = inport.close();
assert_eq!(result, Ok(()));
//terminate midi
let result = portmidi::terminate();
assert_eq!(result, Ok(()));
}
|
#[doc = "Register `DINR9` reader"]
pub type R = crate::R<DINR9_SPEC>;
#[doc = "Field `DIN9` reader - Input data received from MDIO Master during write frames"]
pub type DIN9_R = crate::FieldReader<u16>;
impl R {
#[doc = "Bits 0:15 - Input data received from MDIO Master during write frames"]
#[inline(always)]
pub fn din9(&self) -> DIN9_R {
DIN9_R::new((self.bits & 0xffff) as u16)
}
}
#[doc = "MDIOS input data register 9\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dinr9::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct DINR9_SPEC;
impl crate::RegisterSpec for DINR9_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`dinr9::R`](R) reader structure"]
impl crate::Readable for DINR9_SPEC {}
#[doc = "`reset()` method sets DINR9 to value 0"]
impl crate::Resettable for DINR9_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
//! Pointers to higher order structures.
use std::marker::PhantomData;
/// Points to bool.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub struct Bool(usize);
/// Points to a scalar.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub struct Point1<T>(pub(crate) usize, PhantomData<T>);
/// Points to a 2D point.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub struct Point2<T>(pub(crate) usize, PhantomData<T>);
/// Points to a 3D point.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub struct Point3<T>(pub(crate) usize, PhantomData<T>);
/// Points to a 4D point.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub struct Point4<T>(pub(crate) usize, PhantomData<T>);
/// Points to a spline.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub struct Spline<T>(usize, PhantomData<T>);
/// Points to a surface.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub struct Surface<T>(usize, PhantomData<T>);
/// Points to a color.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub struct Color(usize);
/// Points to a color spline.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub struct ColorSpline(usize);
/// Points to a bone.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub struct Bone<T>(usize, PhantomData<T>);
macro_rules! from_impl {
($point:ident) => {
impl<T> From<usize> for $point<T> {
fn from(val: usize) -> $point<T> {$point(val, PhantomData)}
}
impl<T> From<$point<T>> for usize {
fn from(val: $point<T>) -> usize {val.0}
}
}
}
from_impl!{Point1}
from_impl!{Point2}
from_impl!{Point3}
from_impl!{Point4}
from_impl!{Spline}
from_impl!{Surface}
from_impl!{Bone}
impl From<usize> for Bool {
fn from(val: usize) -> Bool {Bool(val)}
}
impl From<Bool> for usize {
fn from(val: Bool) -> usize {val.0}
}
impl From<usize> for Color {
fn from(val: usize) -> Color {Color(val)}
}
impl From<Color> for usize {
fn from(val: Color) -> usize {val.0}
}
impl From<usize> for ColorSpline {
fn from(val: usize) -> ColorSpline {ColorSpline(val)}
}
impl From<ColorSpline> for usize {
fn from(val: ColorSpline) -> usize {val.0}
}
|
extern crate itertools;
use std::io::Read;
use itertools::Itertools;
fn main() {
let mut file = std::fs::File::open("./input").unwrap();
let mut contents = String::new();
file.read_to_string(&mut contents).unwrap();
let lines = contents.split("\n");
let checksum1: u32 = lines.clone().map(|line| calc_line_difference(line)).sum();
let checksum2: u32 = lines.map(|line| calc_div(line)).sum();
println!("The checksum of the lines is: {}", checksum1);
println!("The ccheksum of the lines is: {}", checksum2);
}
fn calc_div(line: &str) -> u32 {
let elements: Vec<i32> = line.split_whitespace()
.map(|x| x.parse::<i32>().expect("Input was not an i32 number"))
.collect::<Vec<i32>>();
let combinations = elements.iter().cartesian_product(elements.iter());
let divisibles = combinations
.filter(|&(x, y)| (x != y) && (x % y) == 0)
.next()
.unwrap_or((&0, &1));
let (x, y) = divisibles;
(x / y) as u32
}
fn calc_line_difference(line: &str) -> u32 {
let elements: Vec<i32> = line.split_whitespace()
.map(|x| x.parse::<i32>().expect("Input was not an i32 number"))
.collect();
let diff = elements.iter().max().unwrap_or(&0) - elements.iter().min().unwrap_or(&0);
diff as u32
}
|
#[doc = "Register `APB1SECSR1` reader"]
pub type R = crate::R<APB1SECSR1_SPEC>;
#[doc = "Field `TIM2SECF` reader - TIM2SECF"]
pub type TIM2SECF_R = crate::BitReader;
#[doc = "Field `TIM3SECF` reader - TIM3SECF"]
pub type TIM3SECF_R = crate::BitReader;
#[doc = "Field `TIM4SECF` reader - TIM4SECF"]
pub type TIM4SECF_R = crate::BitReader;
#[doc = "Field `TIM5SECF` reader - TIM5SECF"]
pub type TIM5SECF_R = crate::BitReader;
#[doc = "Field `TIM6SECF` reader - TIM6SECF"]
pub type TIM6SECF_R = crate::BitReader;
#[doc = "Field `TIM7SECF` reader - TIM7SECF"]
pub type TIM7SECF_R = crate::BitReader;
#[doc = "Field `RTCAPBSECF` reader - RTCAPBSECF"]
pub type RTCAPBSECF_R = crate::BitReader;
#[doc = "Field `WWDGSECF` reader - WWDGSECF"]
pub type WWDGSECF_R = crate::BitReader;
#[doc = "Field `SPI2SECF` reader - SPI2SECF"]
pub type SPI2SECF_R = crate::BitReader;
#[doc = "Field `SPI3SECF` reader - SPI3SECF"]
pub type SPI3SECF_R = crate::BitReader;
#[doc = "Field `UART2SECF` reader - UART2SECF"]
pub type UART2SECF_R = crate::BitReader;
#[doc = "Field `UART3SECF` reader - UART3SECF"]
pub type UART3SECF_R = crate::BitReader;
#[doc = "Field `UART4SECF` reader - UART4SECF"]
pub type UART4SECF_R = crate::BitReader;
#[doc = "Field `UART5SECF` reader - UART5SECF"]
pub type UART5SECF_R = crate::BitReader;
#[doc = "Field `I2C1SECF` reader - I2C1SECF"]
pub type I2C1SECF_R = crate::BitReader;
#[doc = "Field `I2C2SECF` reader - I2C2SECF"]
pub type I2C2SECF_R = crate::BitReader;
#[doc = "Field `I2C3SECF` reader - I2C3SECF"]
pub type I2C3SECF_R = crate::BitReader;
#[doc = "Field `CRSSECF` reader - CRSSECF"]
pub type CRSSECF_R = crate::BitReader;
#[doc = "Field `PWRSECF` reader - PWRSECF"]
pub type PWRSECF_R = crate::BitReader;
#[doc = "Field `DACSECF` reader - DACSECF"]
pub type DACSECF_R = crate::BitReader;
#[doc = "Field `OPAMPSECF` reader - OPAMPSECF"]
pub type OPAMPSECF_R = crate::BitReader;
#[doc = "Field `LPTIM1SECF` reader - LPTIM1SECF"]
pub type LPTIM1SECF_R = crate::BitReader;
impl R {
#[doc = "Bit 0 - TIM2SECF"]
#[inline(always)]
pub fn tim2secf(&self) -> TIM2SECF_R {
TIM2SECF_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - TIM3SECF"]
#[inline(always)]
pub fn tim3secf(&self) -> TIM3SECF_R {
TIM3SECF_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - TIM4SECF"]
#[inline(always)]
pub fn tim4secf(&self) -> TIM4SECF_R {
TIM4SECF_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - TIM5SECF"]
#[inline(always)]
pub fn tim5secf(&self) -> TIM5SECF_R {
TIM5SECF_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - TIM6SECF"]
#[inline(always)]
pub fn tim6secf(&self) -> TIM6SECF_R {
TIM6SECF_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 5 - TIM7SECF"]
#[inline(always)]
pub fn tim7secf(&self) -> TIM7SECF_R {
TIM7SECF_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bit 10 - RTCAPBSECF"]
#[inline(always)]
pub fn rtcapbsecf(&self) -> RTCAPBSECF_R {
RTCAPBSECF_R::new(((self.bits >> 10) & 1) != 0)
}
#[doc = "Bit 11 - WWDGSECF"]
#[inline(always)]
pub fn wwdgsecf(&self) -> WWDGSECF_R {
WWDGSECF_R::new(((self.bits >> 11) & 1) != 0)
}
#[doc = "Bit 14 - SPI2SECF"]
#[inline(always)]
pub fn spi2secf(&self) -> SPI2SECF_R {
SPI2SECF_R::new(((self.bits >> 14) & 1) != 0)
}
#[doc = "Bit 15 - SPI3SECF"]
#[inline(always)]
pub fn spi3secf(&self) -> SPI3SECF_R {
SPI3SECF_R::new(((self.bits >> 15) & 1) != 0)
}
#[doc = "Bit 17 - UART2SECF"]
#[inline(always)]
pub fn uart2secf(&self) -> UART2SECF_R {
UART2SECF_R::new(((self.bits >> 17) & 1) != 0)
}
#[doc = "Bit 18 - UART3SECF"]
#[inline(always)]
pub fn uart3secf(&self) -> UART3SECF_R {
UART3SECF_R::new(((self.bits >> 18) & 1) != 0)
}
#[doc = "Bit 19 - UART4SECF"]
#[inline(always)]
pub fn uart4secf(&self) -> UART4SECF_R {
UART4SECF_R::new(((self.bits >> 19) & 1) != 0)
}
#[doc = "Bit 20 - UART5SECF"]
#[inline(always)]
pub fn uart5secf(&self) -> UART5SECF_R {
UART5SECF_R::new(((self.bits >> 20) & 1) != 0)
}
#[doc = "Bit 21 - I2C1SECF"]
#[inline(always)]
pub fn i2c1secf(&self) -> I2C1SECF_R {
I2C1SECF_R::new(((self.bits >> 21) & 1) != 0)
}
#[doc = "Bit 22 - I2C2SECF"]
#[inline(always)]
pub fn i2c2secf(&self) -> I2C2SECF_R {
I2C2SECF_R::new(((self.bits >> 22) & 1) != 0)
}
#[doc = "Bit 23 - I2C3SECF"]
#[inline(always)]
pub fn i2c3secf(&self) -> I2C3SECF_R {
I2C3SECF_R::new(((self.bits >> 23) & 1) != 0)
}
#[doc = "Bit 24 - CRSSECF"]
#[inline(always)]
pub fn crssecf(&self) -> CRSSECF_R {
CRSSECF_R::new(((self.bits >> 24) & 1) != 0)
}
#[doc = "Bit 28 - PWRSECF"]
#[inline(always)]
pub fn pwrsecf(&self) -> PWRSECF_R {
PWRSECF_R::new(((self.bits >> 28) & 1) != 0)
}
#[doc = "Bit 29 - DACSECF"]
#[inline(always)]
pub fn dacsecf(&self) -> DACSECF_R {
DACSECF_R::new(((self.bits >> 29) & 1) != 0)
}
#[doc = "Bit 30 - OPAMPSECF"]
#[inline(always)]
pub fn opampsecf(&self) -> OPAMPSECF_R {
OPAMPSECF_R::new(((self.bits >> 30) & 1) != 0)
}
#[doc = "Bit 31 - LPTIM1SECF"]
#[inline(always)]
pub fn lptim1secf(&self) -> LPTIM1SECF_R {
LPTIM1SECF_R::new(((self.bits >> 31) & 1) != 0)
}
}
#[doc = "RCC APB1 security status register 1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`apb1secsr1::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct APB1SECSR1_SPEC;
impl crate::RegisterSpec for APB1SECSR1_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`apb1secsr1::R`](R) reader structure"]
impl crate::Readable for APB1SECSR1_SPEC {}
#[doc = "`reset()` method sets APB1SECSR1 to value 0x0400"]
impl crate::Resettable for APB1SECSR1_SPEC {
const RESET_VALUE: Self::Ux = 0x0400;
}
|
#![no_std]
//! # serial_packet
//!
//! ## Features
//!
//! This crate defines a SerialPacketParser that will receive a byte at a time and
//! parse serial packets from the stream
// Incoming Packet Bytes
// Header
// |0|1|1|1|0|0|1|1| = 's'
// |0|1|1|0|1|1|1|0| = 'n'
// |0|1|1|1|0|0|0|0| = 'p'
// |HD|B|DL3|DL2|DL1|DL0|x|x| = packet type
// HD = set if packet has data to write
// B = set if packet is a batch read/write
// DL = batch size / 4
// datalength is 0 if HD clear
// datalength is 4 if HD set and B clear (DL field ignored)
// datalength = 4*DL if HD and B set, a write request with bytes to write
// datalength = 4*DL if HD clear and B set, a read request
// |0|0|0|0|0|0|0|0| = packet address
// Data
// |0|0|0|0|0|0|0|0| = first byte of data (up to 60 bytes)
// ....
// Checksum
// |0|0|0|0|0|0|0|0| = first byte of checksum
// |0|0|0|0|0|0|0|0| = second byte of checksum
#[macro_use]
extern crate machine;
// MCU memory rep of a serial packet
// real packets don't have a datalen member
#[derive(Copy, Clone)]
pub struct USARTPacket {
pub pt: u8,
pub address: u8,
pub checksum: u16,
pub datalen: u8,
pub data: [u8; 64],
}
impl USARTPacket {
pub fn new() -> USARTPacket {
USARTPacket {
pt: 0,
address: 0,
checksum: 0,
datalen: 0,
data: [0; 64],
}
}
pub fn specified_data_size(&self) -> u8 {
let flags = self.pt & 0xC0;
match flags {
0b1000_0000 => 4,
0b1100_0000 => 4 * ((self.pt >> 2) & 0x0F),
0b0100_0000 => 4 * ((self.pt >> 2) & 0x0F),
_ => 0,
}
}
// given a packet, calculate the checksum for it
pub fn compute_checksum(&self) -> u16 {
let pt: u16 = self.pt.into();
let addr: u16 = self.address.into();
let mut sum: u16 = 0x0073 + 0x006E + 0x0070 + pt + addr;
for byte in self.data.iter() {
let b16: u16 = (*byte).into();
sum += b16;
}
sum
}
// given a packet compare stored checksum with calculated checksum
pub fn compare_checksum(&self) -> bool {
self.checksum == self.compute_checksum()
}
}
#[derive(Clone, Debug, PartialEq)]
pub enum PacketHeaderBytes {
W,
S,
N,
P,
}
// Define the machine states
machine!(
#[derive(Clone, Debug, PartialEq)]
enum PacketParser {
Wait { got: PacketHeaderBytes },
PacketType,
Address { ty: u8 },
HaveHeader { ty: u8, addr: u8 },
Data { len: u8, offset: u8 },
Checksum { first: bool, sum: u16 },
ChecksumComplete { sum: u16 },
HavePacket,
}
);
// The transition types
#[derive(Clone, Debug, PartialEq)]
pub struct Advance {
ch: u8,
}
// the allowed state, transition pairs
transitions!(PacketParser,
[
(Wait, Advance) => [PacketType, Wait],
(PacketType, Advance) => Address,
(Address, Advance) => HaveHeader,
(HaveHeader, Advance) => [Data, Checksum],
(Data, Advance) => [Data, Checksum],
(Checksum, Advance) => [ChecksumComplete, Checksum],
(ChecksumComplete, Advance) => HavePacket
]
);
// additional methods to add to the parser for retrieving data
methods!(PacketParser,
[
HaveHeader => get ty: u8,
HaveHeader => get addr: u8,
HaveHeader => fn can_collect_header(&self) -> bool,
Data => get len: u8,
Data => get offset: u8,
Data => fn can_collect_data(&self) -> bool,
ChecksumComplete => get sum: u16,
ChecksumComplete => fn can_collect_checksum(&self) -> bool,
HavePacket => fn have_complete_packet(&self) -> bool
]
);
// wait state looking for packet header of 'snp'
impl Wait {
pub fn on_advance(self, input: Advance) -> PacketParser {
let got = match (self.got, input.ch) {
(PacketHeaderBytes::W, b's') => PacketHeaderBytes::S,
(PacketHeaderBytes::S, b'n') => PacketHeaderBytes::N,
(PacketHeaderBytes::N, b'p') => PacketHeaderBytes::P,
(_, _) => PacketHeaderBytes::W,
};
if got != PacketHeaderBytes::P {
PacketParser::wait(got)
} else {
PacketParser::packet_type()
}
}
}
// After the header, we receive the packet type byte
impl PacketType {
pub fn on_advance(self, input: Advance) -> Address {
Address { ty: input.ch }
}
}
// After the packet type byte, we receive the address
impl Address {
pub fn on_advance(self, input: Advance) -> HaveHeader {
HaveHeader {
ty: self.ty,
addr: input.ch,
}
}
}
// once we have the header, type, and address, go to a state where
// outside code can retrieve the type and address, calculate the packet
// data length
impl HaveHeader {
pub fn on_advance(self, _: Advance) -> PacketParser {
let flags = self.ty & 0xC0;
match flags {
0b1000_0000 => PacketParser::data(4, 0),
0b1100_0000 => PacketParser::data(4 * ((self.ty >> 2) & 0x0F), 0),
0b0100_0000 => PacketParser::checksum(true, 0),
_ => PacketParser::checksum(true, 0),
}
}
pub fn can_collect_header(&self) -> bool {
true
}
}
// get the packet data (if any)
impl Data {
pub fn on_advance(self, _input: Advance) -> PacketParser {
if self.offset < self.len - 1 {
PacketParser::data(self.len, self.offset + 1)
} else {
PacketParser::checksum(true, 0)
}
}
pub fn can_collect_data(&self) -> bool {
true
}
}
// receive the transmitted checksum bytes
impl Checksum {
pub fn on_advance(self, input: Advance) -> PacketParser {
if self.first {
let s: u16 = input.ch.into();
PacketParser::checksum(false, s << 8)
} else {
let s: u16 = input.ch.into();
PacketParser::checksum_complete(s | self.sum)
}
}
}
// allow outside code to collect checksum
impl ChecksumComplete {
pub fn on_advance(self, _: Advance) -> HavePacket {
HavePacket {}
}
pub fn can_collect_checksum(&self) -> bool {
true
}
}
// end of state machine
impl HavePacket {
pub fn have_complete_packet(&self) -> bool {
true
}
}
impl PacketParser {
pub fn new() -> PacketParser {
PacketParser::Wait(Wait {
got: PacketHeaderBytes::W,
})
}
pub fn parse_received_byte(self, byte: u8, pkt: &mut USARTPacket) -> PacketParser {
// println!("Parser before advance {:?} byte:{}", self., byte);
match self.can_collect_data() {
Some(_) => {
// println!("Data: {}", byte);
let i: usize = (*unwrap_u8(self.offset())).into();
pkt.data[i] = byte;
}
None => {}
}
let mut p = self.on_advance(Advance { ch: byte });
// println!("Parser after advance {:?} byte:{}", p, byte);
// collect checksum
match p.can_collect_checksum() {
Some(_) => {
pkt.checksum = *unwrap_u16(p.sum());
// println!("chksum: {}", pkt.checksum);
p = p.on_advance(Advance { ch: byte });
}
None => {
// or collect header
match p.can_collect_header() {
Some(_) => {
pkt.pt = *unwrap_u8(p.ty());
pkt.address = *unwrap_u8(p.addr());
pkt.datalen = pkt.specified_data_size();
// println!("pt: {}", pkt.pt);
// println!("address: {}", pkt.address);
// advance to either checksum or data
p = p.on_advance(Advance { ch: byte });
}
None => {}
}
}
};
p
}
}
fn unwrap_u8(optional: Option<&u8>) -> &u8 {
match optional {
Some(p) => p,
None => panic!(""),
}
}
fn unwrap_u16(optional: Option<&u16>) -> &u16 {
match optional {
Some(p) => p,
None => panic!(""),
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn command_pkt() {
// a command test packet
let pktdata: [u8; 7] = [b's', b'n', b'p', 0b0000_0000, 0xCD, 0x02, 0x1e];
let mut pkt = USARTPacket {
pt: 0,
address: 0,
checksum: 0,
datalen: 0,
data: [0; 64],
};
let mut parser = PacketParser::new();
for byte in pktdata.iter() {
parser = parser.parse_received_byte(*byte, &mut pkt);
}
match parser.have_complete_packet() {
Some(t) => assert_eq!(t, true),
None => assert_eq!(false, true,),
}
assert_eq!(pkt.pt, 0b0000_0000);
assert_eq!(pkt.address, 0xCD);
assert_eq!(pkt.datalen, 0);
assert_eq!(pkt.checksum, 0x021E);
assert_eq!(pkt.compare_checksum(), true);
}
#[test]
fn multi_reg_read_pkt() {
// a multiple register read test packet
let pktdata: [u8; 7] = [b's', b'n', b'p', 0b0101_1000, 0x45, 0x01, 0xEE];
let mut pkt = USARTPacket {
pt: 0,
address: 0,
checksum: 0,
datalen: 0,
data: [0; 64],
};
let mut parser = PacketParser::new();
for byte in pktdata.iter() {
parser = parser.parse_received_byte(*byte, &mut pkt);
}
match parser.have_complete_packet() {
Some(t) => assert_eq!(t, true),
None => assert_eq!(false, true,),
}
assert_eq!(pkt.pt, 0b0101_1000);
assert_eq!(pkt.address, 0x45);
assert_eq!(pkt.datalen, 24);
assert_eq!(pkt.checksum, 0x01EE);
assert_eq!(pkt.compare_checksum(), true);
}
#[test]
fn single_reg_write_data_pkt() {
// a single register data test packet
let pktdata: [u8; 11] = [
b's',
b'n',
b'p',
0b1000_0000,
0x01,
0xAB,
0xCD,
0xEF,
0x12,
0x04,
0x4B,
];
let mut pkt = USARTPacket {
pt: 0,
address: 0,
checksum: 0,
datalen: 0,
data: [0; 64],
};
let mut parser = PacketParser::new();
for byte in pktdata.iter() {
parser = parser.parse_received_byte(*byte, &mut pkt);
}
match parser.have_complete_packet() {
Some(t) => assert_eq!(t, true),
None => assert_eq!(false, true,),
}
assert_eq!(pkt.pt, 0b1000_0000);
assert_eq!(pkt.address, 0x01);
assert_eq!(pkt.datalen, 4);
assert_eq!(pkt.data[0], 0xAB);
assert_eq!(pkt.data[1], 0xCD);
assert_eq!(pkt.data[2], 0xEF);
assert_eq!(pkt.data[3], 0x12);
assert_eq!(pkt.checksum, 0x044B);
assert_eq!(pkt.compare_checksum(), true);
}
#[test]
fn mult_reg_write_data_pkt() {
// a multiple register data test packet
let pktdata: [u8; 15] = [
b's',
b'n',
b'p',
0b1100_1000,
0x03,
0xAB,
0xCD,
0xEF,
0x12,
0xAB,
0xCD,
0xEF,
0x12,
0x07,
0x0E,
];
let mut pkt = USARTPacket {
pt: 0,
address: 0,
checksum: 0,
datalen: 0,
data: [0; 64],
};
let mut parser = PacketParser::new();
for byte in pktdata.iter() {
parser = parser.parse_received_byte(*byte, &mut pkt);
}
match parser.have_complete_packet() {
Some(t) => assert_eq!(t, true),
None => assert_eq!(false, true,),
}
assert_eq!(pkt.pt, 0b1100_1000);
assert_eq!(pkt.address, 0x03);
assert_eq!(pkt.datalen, 8);
assert_eq!(pkt.data[0], 0xAB);
assert_eq!(pkt.data[1], 0xCD);
assert_eq!(pkt.data[2], 0xEF);
assert_eq!(pkt.data[3], 0x12);
assert_eq!(pkt.data[4], 0xAB);
assert_eq!(pkt.data[5], 0xCD);
assert_eq!(pkt.data[6], 0xEF);
assert_eq!(pkt.data[7], 0x12);
assert_eq!(pkt.checksum, 0x070E);
assert_eq!(pkt.compare_checksum(), true);
}
#[test]
fn corrupted() {
// a corrupted data test packet
let pktdata: [u8; 15] = [
b's', b'n', b'b', 0xc8, 0x03, 0xAB, 0xCD, 0xEF, 0x12, 0xAB, 0xCD, 0xEF, 0x12, 0x07,
0x0E,
];
let mut pkt = USARTPacket {
pt: 0,
address: 0,
checksum: 0,
datalen: 0,
data: [0; 64],
};
let mut parser = PacketParser::new();
for byte in pktdata.iter() {
parser = parser.parse_received_byte(*byte, &mut pkt);
}
match parser.have_complete_packet() {
Some(_) => assert_eq!(false, true),
None => assert_eq!(true, true,),
}
assert_eq!(pkt.pt, 0);
assert_eq!(pkt.address, 0);
assert_eq!(pkt.datalen, 0);
assert_eq!(pkt.checksum, 0);
}
}
|
use std::ffi::CString;
use std::{ptr, slice, usize};
use crate::table_properties::TableProperties;
use crocksdb_ffi::CompactionFilterDecision as RawCompactionFilterDecision;
pub use crocksdb_ffi::CompactionFilterValueType;
pub use crocksdb_ffi::DBCompactionFilter;
use crocksdb_ffi::{
self, DBCompactionFilterContext, DBCompactionFilterFactory, DBTableFileCreationReason,
};
use libc::{c_char, c_int, c_void, malloc, memcpy, size_t};
/// Decision used in `CompactionFilter::filter`.
pub enum CompactionFilterDecision {
/// The record will be kept instead of filtered.
Keep,
/// The record will be filtered, and a tombstone will be left.
Remove,
/// The record will be kept but the value will be replaced.
ChangeValue(Vec<u8>),
/// All records between [current, `until`) will be filtered without any tombstones left.
RemoveAndSkipUntil(Vec<u8>),
}
/// `CompactionFilter` allows an application to modify/delete a key-value at
/// the time of compaction.
/// For more details, Please checkout rocksdb's documentation.
pub trait CompactionFilter {
/// The compaction process invokes this
/// method for kv that is being compacted. A return value
/// of false indicates that the kv should be preserved in the
/// output of this compaction run and a return value of true
/// indicates that this key-value should be removed from the
/// output of the compaction. The application can inspect
/// the existing value of the key and make decision based on it.
fn filter(
&mut self,
_level: usize,
_key: &[u8],
_value: &[u8],
_new_value: &mut Vec<u8>,
_value_changed: &mut bool,
) -> bool {
false
}
/// This method will overwrite `filter` if a `CompactionFilter` implements both of them.
fn featured_filter(
&mut self,
level: usize,
key: &[u8],
_seqno: u64,
value: &[u8],
value_type: CompactionFilterValueType,
) -> CompactionFilterDecision {
match value_type {
CompactionFilterValueType::Value => {
let (mut new_value, mut value_changed) = (Vec::new(), false);
if self.filter(level, key, value, &mut new_value, &mut value_changed) {
return CompactionFilterDecision::Remove;
}
if value_changed {
CompactionFilterDecision::ChangeValue(new_value)
} else {
CompactionFilterDecision::Keep
}
}
// Currently `MergeOperand` and `BlobIndex` will always be kept.
_ => CompactionFilterDecision::Keep,
}
}
/// This API accepts deletion marks. Deletion marks can only be filtered if
/// the consistency of the key doesn't matter.
/// This override `featured_filter` and `filter`.
fn unsafe_filter(
&mut self,
level: usize,
key: &[u8],
seqno: u64,
value: &[u8],
value_type: CompactionFilterValueType,
) -> CompactionFilterDecision {
if value_type != CompactionFilterValueType::Deletion {
self.featured_filter(level, key, seqno, value, value_type)
} else {
CompactionFilterDecision::Keep
}
}
}
#[repr(C)]
struct CompactionFilterProxy<C: CompactionFilter> {
name: CString,
filter: C,
}
extern "C" fn name<C: CompactionFilter>(filter: *mut c_void) -> *const c_char {
unsafe { (*(filter as *mut CompactionFilterProxy<C>)).name.as_ptr() }
}
extern "C" fn destructor<C: CompactionFilter>(filter: *mut c_void) {
unsafe {
let _ = Box::from_raw(filter as *mut CompactionFilterProxy<C>);
}
}
extern "C" fn filter<C: CompactionFilter>(
filter: *mut c_void,
level: c_int,
key: *const u8,
key_len: size_t,
seqno: u64,
value_type: CompactionFilterValueType,
value: *const u8,
value_len: size_t,
new_value: *mut *mut u8,
new_value_len: *mut size_t,
skip_until: *mut *mut u8,
skip_until_length: *mut size_t,
) -> RawCompactionFilterDecision {
unsafe {
*new_value = ptr::null_mut();
*new_value_len = 0;
*skip_until = ptr::null_mut();
*skip_until_length = 0;
let filter = &mut (*(filter as *mut CompactionFilterProxy<C>)).filter;
let key = slice::from_raw_parts(key, key_len);
let value = slice::from_raw_parts(value, value_len);
match filter.unsafe_filter(level as usize, key, seqno, value, value_type) {
CompactionFilterDecision::Keep => RawCompactionFilterDecision::Keep,
CompactionFilterDecision::Remove => RawCompactionFilterDecision::Remove,
CompactionFilterDecision::ChangeValue(new_v) => {
*new_value_len = new_v.len();
*new_value = malloc(*new_value_len) as *mut u8;
memcpy(*new_value as _, new_v.as_ptr() as _, *new_value_len);
RawCompactionFilterDecision::ChangeValue
}
CompactionFilterDecision::RemoveAndSkipUntil(until) => {
*skip_until_length = until.len();
*skip_until = malloc(*skip_until_length) as *mut u8;
memcpy(*skip_until as _, until.as_ptr() as _, *skip_until_length);
RawCompactionFilterDecision::RemoveAndSkipUntil
}
}
}
}
pub struct CompactionFilterHandle {
pub(crate) inner: *mut DBCompactionFilter,
}
impl Drop for CompactionFilterHandle {
fn drop(&mut self) {
unsafe {
crocksdb_ffi::crocksdb_compactionfilter_destroy(self.inner);
}
}
}
pub unsafe fn new_compaction_filter<C: CompactionFilter>(
c_name: CString,
f: C,
) -> CompactionFilterHandle {
let filter = new_compaction_filter_raw(c_name, f);
CompactionFilterHandle { inner: filter }
}
/// Just like `new_compaction_filter`, but returns a raw pointer instead of a RAII struct.
/// Generally used in `CompactionFilterFactory::create_compaction_filter`.
unsafe fn new_compaction_filter_raw<C: CompactionFilter>(
c_name: CString,
f: C,
) -> *mut DBCompactionFilter {
let proxy = Box::into_raw(Box::new(CompactionFilterProxy {
name: c_name,
filter: f,
}));
crocksdb_ffi::crocksdb_compactionfilter_create(
proxy as *mut c_void,
destructor::<C>,
filter::<C>,
name::<C>,
)
}
pub struct CompactionFilterContext(DBCompactionFilterContext);
impl CompactionFilterContext {
pub fn is_full_compaction(&self) -> bool {
let ctx = &self.0 as *const DBCompactionFilterContext;
unsafe { crocksdb_ffi::crocksdb_compactionfiltercontext_is_full_compaction(ctx) }
}
pub fn is_manual_compaction(&self) -> bool {
let ctx = &self.0 as *const DBCompactionFilterContext;
unsafe { crocksdb_ffi::crocksdb_compactionfiltercontext_is_manual_compaction(ctx) }
}
pub fn is_bottommost_level(&self) -> bool {
let ctx = &self.0 as *const DBCompactionFilterContext;
unsafe { crocksdb_ffi::crocksdb_compactionfiltercontext_is_bottommost_level(ctx) }
}
pub fn file_numbers(&self) -> &[u64] {
let ctx = &self.0 as *const DBCompactionFilterContext;
let (mut buffer, mut len): (*const u64, usize) = (ptr::null_mut(), 0);
unsafe {
crocksdb_ffi::crocksdb_compactionfiltercontext_file_numbers(
ctx,
&mut buffer as *mut *const u64,
&mut len as *mut usize,
);
slice::from_raw_parts(buffer, len)
}
}
pub fn table_properties(&self, offset: usize) -> &TableProperties {
let ctx = &self.0 as *const DBCompactionFilterContext;
unsafe {
let raw = crocksdb_ffi::crocksdb_compactionfiltercontext_table_properties(ctx, offset);
TableProperties::from_ptr(raw)
}
}
pub fn start_key(&self) -> &[u8] {
let ctx = &self.0 as *const DBCompactionFilterContext;
unsafe {
let mut start_key_len: usize = 0;
let start_key_ptr =
crocksdb_ffi::crocksdb_compactionfiltercontext_start_key(ctx, &mut start_key_len)
as *const u8;
slice::from_raw_parts(start_key_ptr, start_key_len)
}
}
pub fn end_key(&self) -> &[u8] {
let ctx = &self.0 as *const DBCompactionFilterContext;
unsafe {
let mut end_key_len: usize = 0;
let end_key_ptr =
crocksdb_ffi::crocksdb_compactionfiltercontext_end_key(ctx, &mut end_key_len)
as *const u8;
slice::from_raw_parts(end_key_ptr, end_key_len)
}
}
pub fn reason(&self) -> DBTableFileCreationReason {
let ctx = &self.0 as *const DBCompactionFilterContext;
unsafe { crocksdb_ffi::crocksdb_compactionfiltercontext_reason(ctx) }
}
}
pub trait CompactionFilterFactory {
type Filter: CompactionFilter;
fn create_compaction_filter(
&self,
context: &CompactionFilterContext,
) -> Option<(CString, Self::Filter)>;
/// Returns whether a thread creating table files for the specified `reason`
/// should have invoke `create_compaction_filter` and pass KVs through the returned
/// filter.
fn should_filter_table_file_creation(&self, reason: DBTableFileCreationReason) -> bool {
// For compatibility, `CompactionFilter`s by default apply during compaction.
matches!(reason, DBTableFileCreationReason::Compaction)
}
}
#[repr(C)]
struct CompactionFilterFactoryProxy<C: CompactionFilterFactory> {
name: CString,
factory: C,
}
mod factory {
use super::{CompactionFilterContext, CompactionFilterFactory, CompactionFilterFactoryProxy};
use crocksdb_ffi::{DBCompactionFilter, DBCompactionFilterContext};
use libc::{c_char, c_uchar, c_void};
use librocksdb_sys::DBTableFileCreationReason;
pub(super) extern "C" fn name<C: CompactionFilterFactory>(
factory: *mut c_void,
) -> *const c_char {
unsafe {
let proxy = &*(factory as *mut CompactionFilterFactoryProxy<C>);
proxy.name.as_ptr()
}
}
pub(super) extern "C" fn destructor<C: CompactionFilterFactory>(factory: *mut c_void) {
unsafe {
let _ = Box::from_raw(factory as *mut CompactionFilterFactoryProxy<C>);
}
}
pub(super) extern "C" fn create_compaction_filter<C: CompactionFilterFactory>(
factory: *mut c_void,
context: *const DBCompactionFilterContext,
) -> *mut DBCompactionFilter {
unsafe {
let factory = &mut *(factory as *mut CompactionFilterFactoryProxy<C>);
let context: &CompactionFilterContext = &*(context as *const CompactionFilterContext);
if let Some((name, filter)) = factory.factory.create_compaction_filter(context) {
super::new_compaction_filter_raw(name, filter)
} else {
std::ptr::null_mut()
}
}
}
pub(super) extern "C" fn should_filter_table_file_creation<C: CompactionFilterFactory>(
factory: *const c_void,
reason: DBTableFileCreationReason,
) -> c_uchar {
unsafe {
let factory = &*(factory as *const CompactionFilterFactoryProxy<C>);
factory.factory.should_filter_table_file_creation(reason) as c_uchar
}
}
}
pub struct CompactionFilterFactoryHandle {
pub(crate) inner: *mut DBCompactionFilterFactory,
}
impl Drop for CompactionFilterFactoryHandle {
fn drop(&mut self) {
unsafe {
crocksdb_ffi::crocksdb_compactionfilterfactory_destroy(self.inner);
}
}
}
pub unsafe fn new_compaction_filter_factory<C: CompactionFilterFactory>(
c_name: CString,
f: C,
) -> Result<CompactionFilterFactoryHandle, String> {
let proxy = Box::into_raw(Box::new(CompactionFilterFactoryProxy {
name: c_name,
factory: f,
}));
let factory = crocksdb_ffi::crocksdb_compactionfilterfactory_create(
proxy as *mut c_void,
self::factory::destructor::<C>,
self::factory::create_compaction_filter::<C>,
self::factory::should_filter_table_file_creation::<C>,
self::factory::name::<C>,
);
Ok(CompactionFilterFactoryHandle { inner: factory })
}
#[cfg(test)]
mod tests {
use std::ffi::CString;
use std::str;
use std::sync::mpsc::{self, SyncSender};
use std::time::Duration;
use librocksdb_sys::DBTableFileCreationReason;
use crate::{
ColumnFamilyOptions, CompactionFilter, CompactionFilterContext, CompactionFilterFactory,
DBOptions, FlushOptions, Writable, DB,
};
struct NoopFilter;
impl CompactionFilter for NoopFilter {}
struct Filter(SyncSender<()>);
impl Drop for Filter {
fn drop(&mut self) {
self.0.send(()).unwrap();
}
}
impl CompactionFilter for Filter {}
struct Factory(SyncSender<()>);
impl Drop for Factory {
fn drop(&mut self) {
self.0.send(()).unwrap();
}
}
impl CompactionFilterFactory for Factory {
type Filter = NoopFilter;
fn create_compaction_filter(
&self,
_: &CompactionFilterContext,
) -> Option<(CString, Self::Filter)> {
None
}
}
struct KeyRangeFilter;
impl CompactionFilter for KeyRangeFilter {}
struct KeyRangeFactory(SyncSender<Vec<u8>>);
impl CompactionFilterFactory for KeyRangeFactory {
type Filter = KeyRangeFilter;
fn create_compaction_filter(
&self,
context: &CompactionFilterContext,
) -> Option<(CString, Self::Filter)> {
let start_key = context.start_key();
let end_key = context.end_key();
self.0.send(start_key.to_owned()).unwrap();
self.0.send(end_key.to_owned()).unwrap();
Some((CString::new("key_range_filter").unwrap(), KeyRangeFilter))
}
}
struct FlushFactory {}
struct FlushFilter;
impl CompactionFilter for FlushFilter {
fn filter(&mut self, _: usize, _: &[u8], _: &[u8], _: &mut Vec<u8>, _: &mut bool) -> bool {
true
}
}
impl CompactionFilterFactory for FlushFactory {
type Filter = FlushFilter;
fn should_filter_table_file_creation(&self, reason: DBTableFileCreationReason) -> bool {
matches!(reason, DBTableFileCreationReason::Flush)
}
fn create_compaction_filter(
&self,
_context: &CompactionFilterContext,
) -> Option<(CString, Self::Filter)> {
let name = CString::new("flush_compaction_filter").unwrap();
Some((name, FlushFilter))
}
}
#[test]
fn test_factory_destructor() {
let (tx, rx) = mpsc::sync_channel(1);
let mut cf_opts = ColumnFamilyOptions::default();
let name = CString::new("compaction filter factory").unwrap();
let factory = Factory(tx);
cf_opts
.set_compaction_filter_factory::<CString, Factory>(name, factory)
.unwrap();
drop(cf_opts);
assert!(rx.recv_timeout(Duration::from_secs(1)).is_ok());
let dir = tempfile::Builder::new()
.prefix("compaction_filter")
.tempdir()
.unwrap();
let path = dir.path().to_str().unwrap();
let (tx, rx) = mpsc::sync_channel(1);
let mut db_opts = DBOptions::default();
db_opts.create_if_missing(true);
let mut cfds = Vec::new();
cfds.push(("default", {
let mut cf_opts = ColumnFamilyOptions::default();
let name = CString::new("compaction filter factory").unwrap();
let factory = Factory(tx);
cf_opts
.set_compaction_filter_factory::<CString, Factory>(name, factory)
.unwrap();
cf_opts
}));
let db = DB::open_cf(db_opts, path, cfds);
drop(db);
assert!(rx.recv_timeout(Duration::from_secs(1)).is_ok());
}
#[test]
fn test_filter_destructor() {
let (tx, rx) = mpsc::sync_channel(1);
let mut cf_opts = ColumnFamilyOptions::default();
let name = CString::new("compaction filter factory").unwrap();
let filter = Filter(tx);
cf_opts
.set_compaction_filter::<CString, Filter>(name, filter)
.unwrap();
drop(cf_opts);
assert!(rx.recv_timeout(Duration::from_secs(1)).is_ok());
let dir = tempfile::Builder::new()
.prefix("compaction_filter")
.tempdir()
.unwrap();
let path = dir.path().to_str().unwrap();
let (tx, rx) = mpsc::sync_channel(1);
let mut db_opts = DBOptions::default();
db_opts.create_if_missing(true);
let mut cfds = Vec::new();
cfds.push(("default", {
let mut cf_opts = ColumnFamilyOptions::default();
let name = CString::new("compaction filter factory").unwrap();
let filter = Filter(tx);
cf_opts
.set_compaction_filter::<CString, Filter>(name, filter)
.unwrap();
cf_opts
}));
let db = DB::open_cf(db_opts, path, cfds);
drop(db);
assert!(rx.recv_timeout(Duration::from_secs(1)).is_ok());
}
#[test]
fn test_compaction_filter_factory_context_keys() {
let mut cf_opts = ColumnFamilyOptions::default();
let name = CString::new("compaction filter factory").unwrap();
let (tx, rx) = mpsc::sync_channel(2);
let factory = KeyRangeFactory(tx);
cf_opts
.set_compaction_filter_factory::<CString, KeyRangeFactory>(name, factory)
.unwrap();
let mut opts = DBOptions::new();
opts.create_if_missing(true);
let path = tempfile::Builder::new()
.prefix("test_factory_context_keys")
.tempdir()
.unwrap();
let mut db = DB::open(opts, path.path().to_str().unwrap()).unwrap();
db.create_cf(("test", cf_opts)).unwrap();
let cfh = db.cf_handle("test").unwrap();
for i in 0..10 {
db.put_cf(
cfh,
format!("key{}", i).as_bytes(),
format!("value{}", i).as_bytes(),
)
.unwrap();
}
db.compact_range_cf(cfh, None, None);
let sk = rx.recv().unwrap();
let ek = rx.recv().unwrap();
let sk = str::from_utf8(&sk).unwrap();
let ek = str::from_utf8(&ek).unwrap();
assert_eq!("key0", sk);
assert_eq!("key9", ek);
}
#[test]
fn test_flush_filter() {
// cf with filter
let name = CString::new("test_flush_filter_factory").unwrap();
let factory = FlushFactory {};
let mut cf_opts_wf = ColumnFamilyOptions::default();
cf_opts_wf
.set_compaction_filter_factory::<CString, FlushFactory>(name, factory)
.unwrap();
cf_opts_wf.set_disable_auto_compactions(true);
// cf without filter
let mut cf_opts_of = ColumnFamilyOptions::default();
cf_opts_of.set_disable_auto_compactions(true);
// db
let mut opts = DBOptions::new();
opts.create_if_missing(true);
let path = tempfile::Builder::new()
.prefix("test_factory_context_keys")
.tempdir()
.unwrap();
let mut db = DB::open(opts, path.path().to_str().unwrap()).unwrap();
db.create_cf(("wf", cf_opts_wf)).unwrap();
db.create_cf(("of", cf_opts_of)).unwrap();
let cfh_wf = db.cf_handle("wf").unwrap();
let cfh_of = db.cf_handle("of").unwrap();
// put data
db.put_cf(cfh_wf, b"k", b"v").unwrap();
db.put_cf(cfh_of, b"k", b"v").unwrap();
let mut fopts = FlushOptions::default();
fopts.set_wait(true);
db.flush_cf(cfh_wf, &fopts).unwrap();
db.flush_cf(cfh_of, &fopts).unwrap();
// assert
assert!(db.get_cf(cfh_wf, b"k").unwrap().is_none());
assert!(db.get_cf(cfh_of, b"k").unwrap().is_some());
}
}
|
use std::cmp::Ordering;
// Sort paragraphs by their first word (until the first space character!).
// Also note that the search is case sensitive (upper case is "smaller" than lower case).
pub fn paragraph_sort(raw: &str) -> String {
let cleaned = raw.trim_start();
let start = &raw[0..(raw.len() - cleaned.len())];
let input = cleaned.trim_end();
let end = &cleaned[input.len()..];
let mut paragraphs: Vec<&str> = input.split("\n\n").collect();
paragraphs.sort_by(&compare_first_word);
let sorted: String = paragraphs.join("\n\n");
let mut start = start.to_string();
start.reserve(sorted.len() + end.len());
start + &sorted + end
}
fn compare_first_word(first: &&str, second: &&str) -> Ordering {
first
.trim_start()
.split(" ")
.next()
.cmp(&second.trim_start().split(" ").next())
}
#[cfg(test)]
mod tests {
static TEST_INPUT1: &'static str = "
Hello
you x
how x
are x
";
#[test]
fn start_whitespace_is_preserved() {
assert_eq!(&crate::paragraph_sort(TEST_INPUT1)[0..2], "\n\n");
}
#[test]
fn end_whitespace_is_preserved() {
let result = &crate::paragraph_sort(TEST_INPUT1);
assert_eq!(&result[result.len() - 3..], " ");
}
#[test]
fn result_is_sorted() {
let result = &crate::paragraph_sort(TEST_INPUT1);
assert_eq!(
&result
.split("\n")
.map(|x| x.trim())
.filter(|x| x.len() > 0)
.collect::<Vec<_>>()
.join(" "),
"Hello are x how x you x"
);
}
}
|
/// An enum to represent all characters in the LatinExtendedE block.
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)]
pub enum LatinExtendedE {
/// \u{ab30}: 'ꬰ'
LatinSmallLetterBarredAlpha,
/// \u{ab31}: 'ꬱ'
LatinSmallLetterAReversedDashSchwa,
/// \u{ab32}: 'ꬲ'
LatinSmallLetterBlackletterE,
/// \u{ab33}: 'ꬳ'
LatinSmallLetterBarredE,
/// \u{ab34}: 'ꬴ'
LatinSmallLetterEWithFlourish,
/// \u{ab35}: 'ꬵ'
LatinSmallLetterLenisF,
/// \u{ab36}: 'ꬶ'
LatinSmallLetterScriptGWithCrossedDashTail,
/// \u{ab37}: 'ꬷ'
LatinSmallLetterLWithInvertedLazyS,
/// \u{ab38}: 'ꬸ'
LatinSmallLetterLWithDoubleMiddleTilde,
/// \u{ab39}: 'ꬹ'
LatinSmallLetterLWithMiddleRing,
/// \u{ab3a}: 'ꬺ'
LatinSmallLetterMWithCrossedDashTail,
/// \u{ab3b}: 'ꬻ'
LatinSmallLetterNWithCrossedDashTail,
/// \u{ab3c}: 'ꬼ'
LatinSmallLetterEngWithCrossedDashTail,
/// \u{ab3d}: 'ꬽ'
LatinSmallLetterBlackletterO,
/// \u{ab3e}: 'ꬾ'
LatinSmallLetterBlackletterOWithStroke,
/// \u{ab3f}: 'ꬿ'
LatinSmallLetterOpenOWithStroke,
/// \u{ab40}: 'ꭀ'
LatinSmallLetterInvertedOe,
/// \u{ab41}: 'ꭁ'
LatinSmallLetterTurnedOeWithStroke,
/// \u{ab42}: 'ꭂ'
LatinSmallLetterTurnedOeWithHorizontalStroke,
/// \u{ab43}: 'ꭃ'
LatinSmallLetterTurnedOOpenDashO,
/// \u{ab44}: 'ꭄ'
LatinSmallLetterTurnedOOpenDashOWithStroke,
/// \u{ab45}: 'ꭅ'
LatinSmallLetterStirrupR,
/// \u{ab46}: 'ꭆ'
LatinLetterSmallCapitalRWithRightLeg,
/// \u{ab47}: 'ꭇ'
LatinSmallLetterRWithoutHandle,
/// \u{ab48}: 'ꭈ'
LatinSmallLetterDoubleR,
/// \u{ab49}: 'ꭉ'
LatinSmallLetterRWithCrossedDashTail,
/// \u{ab4a}: 'ꭊ'
LatinSmallLetterDoubleRWithCrossedDashTail,
/// \u{ab4b}: 'ꭋ'
LatinSmallLetterScriptR,
/// \u{ab4c}: 'ꭌ'
LatinSmallLetterScriptRWithRing,
/// \u{ab4d}: 'ꭍ'
LatinSmallLetterBaselineEsh,
/// \u{ab4e}: 'ꭎ'
LatinSmallLetterUWithShortRightLeg,
/// \u{ab4f}: 'ꭏ'
LatinSmallLetterUBarWithShortRightLeg,
/// \u{ab50}: 'ꭐ'
LatinSmallLetterUi,
/// \u{ab51}: 'ꭑ'
LatinSmallLetterTurnedUi,
/// \u{ab52}: 'ꭒ'
LatinSmallLetterUWithLeftHook,
/// \u{ab53}: 'ꭓ'
LatinSmallLetterChi,
/// \u{ab54}: 'ꭔ'
LatinSmallLetterChiWithLowRightRing,
/// \u{ab55}: 'ꭕ'
LatinSmallLetterChiWithLowLeftSerif,
/// \u{ab56}: 'ꭖ'
LatinSmallLetterXWithLowRightRing,
/// \u{ab57}: 'ꭗ'
LatinSmallLetterXWithLongLeftLeg,
/// \u{ab58}: 'ꭘ'
LatinSmallLetterXWithLongLeftLegAndLowRightRing,
/// \u{ab59}: 'ꭙ'
LatinSmallLetterXWithLongLeftLegWithSerif,
/// \u{ab5a}: 'ꭚ'
LatinSmallLetterYWithShortRightLeg,
/// \u{ab5b}: '꭛'
ModifierBreveWithInvertedBreve,
/// \u{ab5c}: 'ꭜ'
ModifierLetterSmallHeng,
/// \u{ab5d}: 'ꭝ'
ModifierLetterSmallLWithInvertedLazyS,
/// \u{ab5e}: 'ꭞ'
ModifierLetterSmallLWithMiddleTilde,
/// \u{ab5f}: 'ꭟ'
ModifierLetterSmallUWithLeftHook,
/// \u{ab60}: 'ꭠ'
LatinSmallLetterSakhaYat,
/// \u{ab61}: 'ꭡ'
LatinSmallLetterIotifiedE,
/// \u{ab62}: 'ꭢ'
LatinSmallLetterOpenOe,
/// \u{ab63}: 'ꭣ'
LatinSmallLetterUo,
/// \u{ab64}: 'ꭤ'
LatinSmallLetterInvertedAlpha,
/// \u{ab65}: 'ꭥ'
GreekLetterSmallCapitalOmega,
/// \u{ab66}: 'ꭦ'
LatinSmallLetterDzDigraphWithRetroflexHook,
/// \u{ab67}: 'ꭧ'
LatinSmallLetterTsDigraphWithRetroflexHook,
}
impl Into<char> for LatinExtendedE {
fn into(self) -> char {
match self {
LatinExtendedE::LatinSmallLetterBarredAlpha => 'ꬰ',
LatinExtendedE::LatinSmallLetterAReversedDashSchwa => 'ꬱ',
LatinExtendedE::LatinSmallLetterBlackletterE => 'ꬲ',
LatinExtendedE::LatinSmallLetterBarredE => 'ꬳ',
LatinExtendedE::LatinSmallLetterEWithFlourish => 'ꬴ',
LatinExtendedE::LatinSmallLetterLenisF => 'ꬵ',
LatinExtendedE::LatinSmallLetterScriptGWithCrossedDashTail => 'ꬶ',
LatinExtendedE::LatinSmallLetterLWithInvertedLazyS => 'ꬷ',
LatinExtendedE::LatinSmallLetterLWithDoubleMiddleTilde => 'ꬸ',
LatinExtendedE::LatinSmallLetterLWithMiddleRing => 'ꬹ',
LatinExtendedE::LatinSmallLetterMWithCrossedDashTail => 'ꬺ',
LatinExtendedE::LatinSmallLetterNWithCrossedDashTail => 'ꬻ',
LatinExtendedE::LatinSmallLetterEngWithCrossedDashTail => 'ꬼ',
LatinExtendedE::LatinSmallLetterBlackletterO => 'ꬽ',
LatinExtendedE::LatinSmallLetterBlackletterOWithStroke => 'ꬾ',
LatinExtendedE::LatinSmallLetterOpenOWithStroke => 'ꬿ',
LatinExtendedE::LatinSmallLetterInvertedOe => 'ꭀ',
LatinExtendedE::LatinSmallLetterTurnedOeWithStroke => 'ꭁ',
LatinExtendedE::LatinSmallLetterTurnedOeWithHorizontalStroke => 'ꭂ',
LatinExtendedE::LatinSmallLetterTurnedOOpenDashO => 'ꭃ',
LatinExtendedE::LatinSmallLetterTurnedOOpenDashOWithStroke => 'ꭄ',
LatinExtendedE::LatinSmallLetterStirrupR => 'ꭅ',
LatinExtendedE::LatinLetterSmallCapitalRWithRightLeg => 'ꭆ',
LatinExtendedE::LatinSmallLetterRWithoutHandle => 'ꭇ',
LatinExtendedE::LatinSmallLetterDoubleR => 'ꭈ',
LatinExtendedE::LatinSmallLetterRWithCrossedDashTail => 'ꭉ',
LatinExtendedE::LatinSmallLetterDoubleRWithCrossedDashTail => 'ꭊ',
LatinExtendedE::LatinSmallLetterScriptR => 'ꭋ',
LatinExtendedE::LatinSmallLetterScriptRWithRing => 'ꭌ',
LatinExtendedE::LatinSmallLetterBaselineEsh => 'ꭍ',
LatinExtendedE::LatinSmallLetterUWithShortRightLeg => 'ꭎ',
LatinExtendedE::LatinSmallLetterUBarWithShortRightLeg => 'ꭏ',
LatinExtendedE::LatinSmallLetterUi => 'ꭐ',
LatinExtendedE::LatinSmallLetterTurnedUi => 'ꭑ',
LatinExtendedE::LatinSmallLetterUWithLeftHook => 'ꭒ',
LatinExtendedE::LatinSmallLetterChi => 'ꭓ',
LatinExtendedE::LatinSmallLetterChiWithLowRightRing => 'ꭔ',
LatinExtendedE::LatinSmallLetterChiWithLowLeftSerif => 'ꭕ',
LatinExtendedE::LatinSmallLetterXWithLowRightRing => 'ꭖ',
LatinExtendedE::LatinSmallLetterXWithLongLeftLeg => 'ꭗ',
LatinExtendedE::LatinSmallLetterXWithLongLeftLegAndLowRightRing => 'ꭘ',
LatinExtendedE::LatinSmallLetterXWithLongLeftLegWithSerif => 'ꭙ',
LatinExtendedE::LatinSmallLetterYWithShortRightLeg => 'ꭚ',
LatinExtendedE::ModifierBreveWithInvertedBreve => '꭛',
LatinExtendedE::ModifierLetterSmallHeng => 'ꭜ',
LatinExtendedE::ModifierLetterSmallLWithInvertedLazyS => 'ꭝ',
LatinExtendedE::ModifierLetterSmallLWithMiddleTilde => 'ꭞ',
LatinExtendedE::ModifierLetterSmallUWithLeftHook => 'ꭟ',
LatinExtendedE::LatinSmallLetterSakhaYat => 'ꭠ',
LatinExtendedE::LatinSmallLetterIotifiedE => 'ꭡ',
LatinExtendedE::LatinSmallLetterOpenOe => 'ꭢ',
LatinExtendedE::LatinSmallLetterUo => 'ꭣ',
LatinExtendedE::LatinSmallLetterInvertedAlpha => 'ꭤ',
LatinExtendedE::GreekLetterSmallCapitalOmega => 'ꭥ',
LatinExtendedE::LatinSmallLetterDzDigraphWithRetroflexHook => 'ꭦ',
LatinExtendedE::LatinSmallLetterTsDigraphWithRetroflexHook => 'ꭧ',
}
}
}
impl std::convert::TryFrom<char> for LatinExtendedE {
type Error = ();
fn try_from(c: char) -> Result<Self, Self::Error> {
match c {
'ꬰ' => Ok(LatinExtendedE::LatinSmallLetterBarredAlpha),
'ꬱ' => Ok(LatinExtendedE::LatinSmallLetterAReversedDashSchwa),
'ꬲ' => Ok(LatinExtendedE::LatinSmallLetterBlackletterE),
'ꬳ' => Ok(LatinExtendedE::LatinSmallLetterBarredE),
'ꬴ' => Ok(LatinExtendedE::LatinSmallLetterEWithFlourish),
'ꬵ' => Ok(LatinExtendedE::LatinSmallLetterLenisF),
'ꬶ' => Ok(LatinExtendedE::LatinSmallLetterScriptGWithCrossedDashTail),
'ꬷ' => Ok(LatinExtendedE::LatinSmallLetterLWithInvertedLazyS),
'ꬸ' => Ok(LatinExtendedE::LatinSmallLetterLWithDoubleMiddleTilde),
'ꬹ' => Ok(LatinExtendedE::LatinSmallLetterLWithMiddleRing),
'ꬺ' => Ok(LatinExtendedE::LatinSmallLetterMWithCrossedDashTail),
'ꬻ' => Ok(LatinExtendedE::LatinSmallLetterNWithCrossedDashTail),
'ꬼ' => Ok(LatinExtendedE::LatinSmallLetterEngWithCrossedDashTail),
'ꬽ' => Ok(LatinExtendedE::LatinSmallLetterBlackletterO),
'ꬾ' => Ok(LatinExtendedE::LatinSmallLetterBlackletterOWithStroke),
'ꬿ' => Ok(LatinExtendedE::LatinSmallLetterOpenOWithStroke),
'ꭀ' => Ok(LatinExtendedE::LatinSmallLetterInvertedOe),
'ꭁ' => Ok(LatinExtendedE::LatinSmallLetterTurnedOeWithStroke),
'ꭂ' => Ok(LatinExtendedE::LatinSmallLetterTurnedOeWithHorizontalStroke),
'ꭃ' => Ok(LatinExtendedE::LatinSmallLetterTurnedOOpenDashO),
'ꭄ' => Ok(LatinExtendedE::LatinSmallLetterTurnedOOpenDashOWithStroke),
'ꭅ' => Ok(LatinExtendedE::LatinSmallLetterStirrupR),
'ꭆ' => Ok(LatinExtendedE::LatinLetterSmallCapitalRWithRightLeg),
'ꭇ' => Ok(LatinExtendedE::LatinSmallLetterRWithoutHandle),
'ꭈ' => Ok(LatinExtendedE::LatinSmallLetterDoubleR),
'ꭉ' => Ok(LatinExtendedE::LatinSmallLetterRWithCrossedDashTail),
'ꭊ' => Ok(LatinExtendedE::LatinSmallLetterDoubleRWithCrossedDashTail),
'ꭋ' => Ok(LatinExtendedE::LatinSmallLetterScriptR),
'ꭌ' => Ok(LatinExtendedE::LatinSmallLetterScriptRWithRing),
'ꭍ' => Ok(LatinExtendedE::LatinSmallLetterBaselineEsh),
'ꭎ' => Ok(LatinExtendedE::LatinSmallLetterUWithShortRightLeg),
'ꭏ' => Ok(LatinExtendedE::LatinSmallLetterUBarWithShortRightLeg),
'ꭐ' => Ok(LatinExtendedE::LatinSmallLetterUi),
'ꭑ' => Ok(LatinExtendedE::LatinSmallLetterTurnedUi),
'ꭒ' => Ok(LatinExtendedE::LatinSmallLetterUWithLeftHook),
'ꭓ' => Ok(LatinExtendedE::LatinSmallLetterChi),
'ꭔ' => Ok(LatinExtendedE::LatinSmallLetterChiWithLowRightRing),
'ꭕ' => Ok(LatinExtendedE::LatinSmallLetterChiWithLowLeftSerif),
'ꭖ' => Ok(LatinExtendedE::LatinSmallLetterXWithLowRightRing),
'ꭗ' => Ok(LatinExtendedE::LatinSmallLetterXWithLongLeftLeg),
'ꭘ' => Ok(LatinExtendedE::LatinSmallLetterXWithLongLeftLegAndLowRightRing),
'ꭙ' => Ok(LatinExtendedE::LatinSmallLetterXWithLongLeftLegWithSerif),
'ꭚ' => Ok(LatinExtendedE::LatinSmallLetterYWithShortRightLeg),
'꭛' => Ok(LatinExtendedE::ModifierBreveWithInvertedBreve),
'ꭜ' => Ok(LatinExtendedE::ModifierLetterSmallHeng),
'ꭝ' => Ok(LatinExtendedE::ModifierLetterSmallLWithInvertedLazyS),
'ꭞ' => Ok(LatinExtendedE::ModifierLetterSmallLWithMiddleTilde),
'ꭟ' => Ok(LatinExtendedE::ModifierLetterSmallUWithLeftHook),
'ꭠ' => Ok(LatinExtendedE::LatinSmallLetterSakhaYat),
'ꭡ' => Ok(LatinExtendedE::LatinSmallLetterIotifiedE),
'ꭢ' => Ok(LatinExtendedE::LatinSmallLetterOpenOe),
'ꭣ' => Ok(LatinExtendedE::LatinSmallLetterUo),
'ꭤ' => Ok(LatinExtendedE::LatinSmallLetterInvertedAlpha),
'ꭥ' => Ok(LatinExtendedE::GreekLetterSmallCapitalOmega),
'ꭦ' => Ok(LatinExtendedE::LatinSmallLetterDzDigraphWithRetroflexHook),
'ꭧ' => Ok(LatinExtendedE::LatinSmallLetterTsDigraphWithRetroflexHook),
_ => Err(()),
}
}
}
impl Into<u32> for LatinExtendedE {
fn into(self) -> u32 {
let c: char = self.into();
let hex = c
.escape_unicode()
.to_string()
.replace("\\u{", "")
.replace("}", "");
u32::from_str_radix(&hex, 16).unwrap()
}
}
impl std::convert::TryFrom<u32> for LatinExtendedE {
type Error = ();
fn try_from(u: u32) -> Result<Self, Self::Error> {
if let Ok(c) = char::try_from(u) {
Self::try_from(c)
} else {
Err(())
}
}
}
impl Iterator for LatinExtendedE {
type Item = Self;
fn next(&mut self) -> Option<Self> {
let index: u32 = (*self).into();
use std::convert::TryFrom;
Self::try_from(index + 1).ok()
}
}
impl LatinExtendedE {
/// The character with the lowest index in this unicode block
pub fn new() -> Self {
LatinExtendedE::LatinSmallLetterBarredAlpha
}
/// The character's name, in sentence case
pub fn name(&self) -> String {
let s = std::format!("LatinExtendedE{:#?}", self);
string_morph::to_sentence_case(&s)
}
}
|
#[doc = "Reader of register CH1_TOP"]
pub type R = crate::R<u32, super::CH1_TOP>;
#[doc = "Writer for register CH1_TOP"]
pub type W = crate::W<u32, super::CH1_TOP>;
#[doc = "Register CH1_TOP `reset()`'s with value 0xffff"]
impl crate::ResetValue for super::CH1_TOP {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0xffff
}
}
#[doc = "Reader of field `CH1_TOP`"]
pub type CH1_TOP_R = crate::R<u16, u16>;
#[doc = "Write proxy for field `CH1_TOP`"]
pub struct CH1_TOP_W<'a> {
w: &'a mut W,
}
impl<'a> CH1_TOP_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u16) -> &'a mut W {
self.w.bits = (self.w.bits & !0xffff) | ((value as u32) & 0xffff);
self.w
}
}
impl R {
#[doc = "Bits 0:15"]
#[inline(always)]
pub fn ch1_top(&self) -> CH1_TOP_R {
CH1_TOP_R::new((self.bits & 0xffff) as u16)
}
}
impl W {
#[doc = "Bits 0:15"]
#[inline(always)]
pub fn ch1_top(&mut self) -> CH1_TOP_W {
CH1_TOP_W { w: self }
}
}
|
use crate::utils::{download, download_kml, osmconvert};
use map_model::Map;
use sim::Scenario;
fn input() {
download(
"input/seattle/N47W122.hgt",
"https://dds.cr.usgs.gov/srtm/version2_1/SRTM1/Region_01/N47W122.hgt.zip",
);
download(
"input/seattle/osm/washington-latest.osm.pbf",
"http://download.geofabrik.de/north-america/us/washington-latest.osm.pbf",
);
// Soundcast data comes from https://github.com/psrc/soundcast/releases
download(
"input/seattle/parcels_urbansim.txt",
"https://www.dropbox.com/s/t9oug9lwhdwfc04/psrc_2014.zip?dl=0",
);
let bounds = geom::GPSBounds::from(
geom::LonLat::read_osmosis_polygon(abstutil::path(
"input/seattle/polygons/huge_seattle.poly",
))
.unwrap(),
);
// From http://data-seattlecitygis.opendata.arcgis.com/datasets/blockface
download_kml(
"input/seattle/blockface.bin",
"https://opendata.arcgis.com/datasets/a1458ad1abca41869b81f7c0db0cd777_0.kml",
&bounds,
true,
);
// From https://data-seattlecitygis.opendata.arcgis.com/datasets/public-garages-or-parking-lots
download_kml(
"input/seattle/offstreet_parking.bin",
"http://data-seattlecitygis.opendata.arcgis.com/datasets/8e52dfde6d5d45948f7a90654c8d50cd_0.kml",
&bounds,
true,
);
}
pub fn osm_to_raw(name: &str) {
input();
osmconvert(
"input/seattle/osm/washington-latest.osm.pbf",
format!("input/seattle/polygons/{}.poly", name),
format!("input/seattle/osm/{}.osm", name),
);
println!("- Running convert_osm");
let map = convert_osm::convert(
convert_osm::Options {
osm_input: abstutil::path(format!("input/seattle/osm/{}.osm", name)),
city_name: "seattle".to_string(),
name: name.to_string(),
clip: Some(abstutil::path(format!(
"input/seattle/polygons/{}.poly",
name
))),
map_config: map_model::MapConfig {
driving_side: map_model::raw::DrivingSide::Right,
bikes_can_use_bus_lanes: true,
},
onstreet_parking: convert_osm::OnstreetParking::Blockface(abstutil::path(
"input/seattle/blockface.bin",
)),
public_offstreet_parking: convert_osm::PublicOffstreetParking::GIS(abstutil::path(
"input/seattle/offstreet_parking.bin",
)),
private_offstreet_parking: convert_osm::PrivateOffstreetParking::FixedPerBldg(
// TODO Utter guesses
match name {
"downtown" => 5,
"lakeslice" => 3,
"south_seattle" => 5,
"udistrict" => 5,
_ => 1,
},
),
elevation: Some(abstutil::path("input/seattle/N47W122.hgt")),
},
&mut abstutil::Timer::throwaway(),
);
let output = abstutil::path(format!("input/raw_maps/{}.bin", name));
println!("- Saving {}", output);
abstutil::write_binary(output, &map);
}
// Download and pre-process data needed to generate Seattle scenarios.
#[cfg(feature = "scenarios")]
pub fn ensure_popdat_exists(
timer: &mut abstutil::Timer,
) -> (crate::soundcast::PopDat, map_model::Map) {
if abstutil::file_exists(abstutil::path_popdat()) {
println!("- {} exists, not regenerating it", abstutil::path_popdat());
return (
abstutil::read_binary(abstutil::path_popdat(), timer),
map_model::Map::new(abstutil::path_map("huge_seattle"), timer),
);
}
if !abstutil::file_exists(abstutil::path_raw_map("huge_seattle")) {
osm_to_raw("huge_seattle");
}
let huge_map = if abstutil::file_exists(abstutil::path_map("huge_seattle")) {
map_model::Map::new(abstutil::path_map("huge_seattle"), timer)
} else {
crate::utils::raw_to_map("huge_seattle", true, timer)
};
(crate::soundcast::import_data(&huge_map), huge_map)
}
pub fn adjust_private_parking(map: &mut Map, scenario: &Scenario) {
for (b, count) in scenario.count_parked_cars_per_bldg().consume() {
map.hack_override_offstreet_spots_individ(b, count);
}
map.save();
}
|
use crate::wii_memory::WiiMemory;
use fancy_slice::FancySlice;
pub(crate) fn arc_item_data(_parent_data: FancySlice, _data: FancySlice, _wii_memory: &WiiMemory) -> ArcItemData {
ArcItemData {
}
}
#[derive(Clone, Debug)]
pub struct ArcItemData {
}
|
use std::fs::{self};
use std::path::{Path, PathBuf};
use tempdir::TempDir;
use crate::db::Database;
/// Sets up a directory in which files can be dumped. This directory can be loaded as database,
/// modified and then asserted over.
#[derive(Debug)]
pub struct DatabaseTest {
dir: TempDir,
database: Option<Database>,
}
impl DatabaseTest {
pub fn new() -> Self {
let dir = TempDir::new("taskpaper_db_test").expect("Could not create tempdir.");
DatabaseTest {
dir,
database: None,
}
}
pub fn write_file(&self, path: impl AsRef<Path>, content: &str) -> PathBuf {
let file_path = self.dir.path().join(path);
fs::write(&file_path, content.as_bytes()).expect("Could not write file");
file_path
}
pub fn read_file(&self, path: impl AsRef<Path>) -> String {
let file_path = self.dir.path().join(path);
fs::read_to_string(&file_path).expect("Could not read file")
}
pub fn read_database(&mut self) -> &mut Database {
let db = Database::from_dir(self.dir.path()).expect("Could not read database.");
self.database = Some(db);
self.database.as_mut().unwrap()
}
pub fn assert_eq_to_golden(&self, golden: impl AsRef<Path>, path: impl AsRef<Path>) {
let golden_data = fs::read_to_string(golden.as_ref()).expect("Could not read golden.");
let out = fs::read_to_string(&self.dir.path().join(path.as_ref()))
.expect("Could not read golden.");
if golden_data == out {
return;
}
let tmp_path = PathBuf::from("/tmp").join(&path);
::std::fs::write(&tmp_path, &out).expect("Could not write output.");
panic!(
"{} != {}.\n\nWrote output into {}",
golden.as_ref().display(),
path.as_ref().display(),
tmp_path.display()
);
}
}
|
/**
* 代码生成器
* by shaipe 20190727
*/
#[warn(unused_parens)]
// #[macro_use]
extern crate mysql;
// use mysql::from_row;
use std::env;
mod config;
mod lang;
mod dbase;
mod table;
mod column;
// mod xx;
mod mod_x;
// use mod_x::test::xte;
use dbase::Database;
// use column::{get_databases, get_columns};
/// 启动入口
fn main() {
let args: Vec<String> = env::args().collect();
// 获取启动输入的命令行参数值
let conf_path = if args.len() > 1 {
&args[1]
}
else{
"config.json"
};
// 获取配置信息
let c = config::Config::new(conf_path);
println!("{:?}", c);
let dbc = c.database;
// 组建连接字符串
let conn_str = format!("mysql://{}:{}@{}:{}/{}", dbc.user, dbc.password, dbc.server, dbc.port, dbc.db_name);
let db = Database::new(&dbc.db_name, &conn_str);
let tb = db.get_talbe("ehr_category");
let cols = tb.get_columns();
println!("{:?}", cols);
// let dbs = get_databases(&conn_str);
// println!("{:?}", dbs);
// let cls = get_columns(&conn_str, "ehr_category", &dbc.db_name);
// println!("{:?}", cls);
// let pool = mysql::Pool::new(conn_str).unwrap();
// for row in pool.prep_exec("Select * from ehr_category", ()).unwrap() {
// println!("{:?}", row);
// }
// .map(| mut result| {
// let row = result.next().unwrap().unwrap();
// println!("{:?}", row);
// });
// pool.query("")
// let conn = pool.get_conn().unwrap();
// for row in pool.prep_exec("SELECT ?, ?", (42, 2.5)).unwrap() {
// let (a, b) = from_row(row.unwrap());
// println!("{:?} ", (a, b));
// // assert_eq!((a, b), (42u8, 2.5_f32));
// }
// pool.prep_exec("SELECT * from ehr_category", ())
// .map(|result|{
// result.map(|x| x.unwrap())
// .map(|row| {
// let (categoryname) = my::from_row(row);
// println!("{:?}", categoryname);
// categoryname
// }).collect()
// }).unwrap();
// println!("{:?}", xx);
// Let's select payments from database
// let selected_payments: Vec<String> =
// pool.prep_exec("SELECT * from ehr_category", ())
// .map(|result| {
// // In this closure we will map `QueryResult` to `Vec<Payment>`
// // `QueryResult` is iterator over `MyResult<row, err>` so first call to `map`
// // will map each `MyResult` to contained `row` (no proper error handling)
// // and second call to `map` will map each `row` to `Payment`
// println!("{:?}", result);
// result.map(|x| x.unwrap()).map(|row| {
// let (categoryname) = my::from_row(row);
// categoryname
// }).collect() // Collect payments so now `QueryResult` is mapped to `Vec<Payment>`
// }).unwrap(); // Unwrap `Vec<Payment>`
// println!("{:?}", selected_payments);
// // Let's create payment table.
// // It is temporary so we do not need `tmp` database to exist.
// // Unwap just to make sure no error happened.
// pool.prep_exec(r"CREATE TEMPORARY TABLE tmp.payment (
// customer_id int not null,
// amount int not null,
// account_name text
// )", ()).unwrap();
println!("Hello, world!");
}
|
fn main() {
println!("{}", greet("world"));
}
fn greet(who: &str) -> String {
format!("Hello, {}!", who)
}
|
/// Conceptual implementation of interned weak string pool.
///
/// The pool interns &str, and returns Rc<PooledStr>>, and PooledStr derefs to &str.
///
/// This simple implementation uses very dumb "hashset" that just uses
/// vector.
// depends on derivative = "1"
use derivative::Derivative;
use std::rc::{Rc, Weak};
use std::boxed::Box;
use std::marker::PhantomData;
use std::ops::Deref;
use std::hash::{BuildHasher, Hash, Hasher};
use std::collections::hash_map::RandomState;
// I wish it was in std
type HashValue = u64;
#[derive(Clone)]
#[derive(Debug)]
#[derive(Derivative)]
#[derivative(PartialEq)]
pub struct PooledStr<BH: BuildHasher> {
hash: HashValue,
value: String, // just use a ready container
#[derivative(PartialEq="ignore")]
_build_hasher: PhantomData<BH>,
}
impl<BH: BuildHasher> PartialEq<str> for PooledStr<BH> {
fn eq(&self, other: &str) -> bool {
self.value == other
}
}
impl<BH: BuildHasher> Deref for PooledStr<BH> {
type Target = str;
fn deref(&self) -> &Self::Target {
self.value.deref()
}
}
pub trait FromHashableSource<V: ?Sized, BH: BuildHasher> {
fn from_ref(v: &V, hasher: &mut BH::Hasher) -> Self;
fn from_hashed(hash: HashValue, v: &V) -> Self;
fn get_hash(&self) -> HashValue;
}
impl<BH: BuildHasher> FromHashableSource<str, BH> for PooledStr<BH> {
fn from_ref(val: &str, hasher: &mut BH::Hasher) -> PooledStr<BH> {
val.hash(hasher);
let hash = hasher.finish();
PooledStr {
hash,
value: String::from(val),
_build_hasher: PhantomData,
}
}
fn from_hashed(hash: HashValue, val: &str) -> PooledStr<BH> {
PooledStr {
hash,
value: String::from(val),
_build_hasher: PhantomData,
}
}
fn get_hash(&self) -> HashValue {
self.hash
}
}
// Dumb HashSet that has only linear probing
pub struct DumbSet<T, K: ?Sized, BH: BuildHasher> {
bins: Vec<Weak<T>>,
builder_hash: BH,
_phantom: PhantomData<K>,
}
impl<T, K: ?Sized, BH> DumbSet<T, K, BH>
where T: PartialEq<K> + PartialEq + Clone + FromHashableSource<K, BH>,
K: Hash,
BH: BuildHasher{
/// Create new hash; use same builder_hash if you want to
/// reuse interned values from one pool in another.
pub fn from_builder(builder_hash: BH) -> Self {
Self {
bins: Vec::new(),
builder_hash,
_phantom: PhantomData,
}
}
/// Intern key value (e.g. &str), returning new or old interned object
pub fn intern(&mut self, key: &K) -> Rc<T> {
let mut hasher = self.builder_hash.build_hasher();
key.hash(&mut hasher);
let hash = hasher.finish();
// Yep, it is very dumb.
for wc in self.bins.iter() {
if let Some(rc) = wc.upgrade() {
if rc.get_hash() == hash && rc.deref() == key {
return rc;
}
}
}
// Not found
let newval = Rc::new(T::from_hashed(hash, key));
self.bins.push(Rc::downgrade(&newval));
newval
}
/// Add another interned value, e.g. from another pool. Or even
/// reintern string from same pool.
pub fn implant(&mut self, val: &Rc<T>) {
for wc in self.bins.iter() {
if let Some(rc) = wc.upgrade() {
if rc.deref() == val.deref() {
return;
}
}
}
// Not found. Very dumb implementation: we do not even look
// for free week cell.
self.bins.push(Rc::downgrade(val));
}
// So far I have no idea how to do better without Box<dyn ...>
pub fn iter<'a>(&'a self) -> Box<dyn Iterator<Item=Rc<T>> + 'a> {
Box::new(self.bins.iter().filter_map(|wc| wc.upgrade()))
}
}
pub type Pool<BH> = DumbSet<PooledStr<BH>, str, BH>;
fn main() {
let random_builder = RandomState::new();
let mut pool = Pool::from_builder(random_builder);
let mut interns = Vec::new();
for w in String::from("Mary had a little lamb, a little lamb, a little lamb.").split_whitespace() {
interns.push(pool.intern(w));
}
for interned in pool.iter() {
println!("{:?}", interned.deref());
}
}
/*
Output:
Mary
had
a
little
lamb,
lamb.
*/
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.