text stringlengths 8 4.13M |
|---|
use crate::distribution::Discrete;
use crate::function::factorial;
use crate::statistics::*;
use crate::{Result, StatsError};
use ::nalgebra::{DMatrix, DVector};
use rand::Rng;
/// Implements the
/// [Multinomial](https://en.wikipedia.org/wiki/Multinomial_distribution)
/// distribution which is a generalization of the
/// [Binomial](https://en.wikipedia.org/wiki/Binomial_distribution)
/// distribution
///
/// # Examples
///
/// ```
/// use statrs::distribution::Multinomial;
/// use statrs::statistics::MeanN;
/// use nalgebra::DVector;
///
/// let n = Multinomial::new(&[0.3, 0.7], 5).unwrap();
/// assert_eq!(n.mean().unwrap(), DVector::from_vec(vec![1.5, 3.5]));
/// ```
#[derive(Debug, Clone, PartialEq)]
pub struct Multinomial {
p: Vec<f64>,
n: u64,
}
impl Multinomial {
/// Constructs a new multinomial distribution with probabilities `p`
/// and `n` number of trials.
///
/// # Errors
///
/// Returns an error if `p` is empty, the sum of the elements
/// in `p` is 0, or any element in `p` is less than 0 or is `f64::NAN`
///
/// # Note
///
/// The elements in `p` do not need to be normalized
///
/// # Examples
///
/// ```
/// use statrs::distribution::Multinomial;
///
/// let mut result = Multinomial::new(&[0.0, 1.0, 2.0], 3);
/// assert!(result.is_ok());
///
/// result = Multinomial::new(&[0.0, -1.0, 2.0], 3);
/// assert!(result.is_err());
/// ```
pub fn new(p: &[f64], n: u64) -> Result<Multinomial> {
if !super::internal::is_valid_multinomial(p, true) {
Err(StatsError::BadParams)
} else {
Ok(Multinomial { p: p.to_vec(), n })
}
}
/// Returns the probabilities of the multinomial
/// distribution as a slice
///
/// # Examples
///
/// ```
/// use statrs::distribution::Multinomial;
///
/// let n = Multinomial::new(&[0.0, 1.0, 2.0], 3).unwrap();
/// assert_eq!(n.p(), [0.0, 1.0, 2.0]);
/// ```
pub fn p(&self) -> &[f64] {
&self.p
}
/// Returns the number of trials of the multinomial
/// distribution
///
/// # Examples
///
/// ```
/// use statrs::distribution::Multinomial;
///
/// let n = Multinomial::new(&[0.0, 1.0, 2.0], 3).unwrap();
/// assert_eq!(n.n(), 3);
/// ```
pub fn n(&self) -> u64 {
self.n
}
}
impl ::rand::distributions::Distribution<Vec<f64>> for Multinomial {
fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> Vec<f64> {
let p_cdf = super::categorical::prob_mass_to_cdf(self.p());
let mut res = vec![0.0; self.p.len()];
for _ in 0..self.n {
let i = super::categorical::sample_unchecked(rng, &p_cdf);
let el = res.get_mut(i as usize).unwrap();
*el += 1.0;
}
res
}
}
impl MeanN<DVector<f64>> for Multinomial {
/// Returns the mean of the multinomial distribution
///
/// # Formula
///
/// ```ignore
/// n * p_i for i in 1...k
/// ```
///
/// where `n` is the number of trials, `p_i` is the `i`th probability,
/// and `k` is the total number of probabilities
fn mean(&self) -> Option<DVector<f64>> {
Some(DVector::from_vec(
self.p.iter().map(|x| x * self.n as f64).collect(),
))
}
}
impl VarianceN<DMatrix<f64>> for Multinomial {
/// Returns the variance of the multinomial distribution
///
/// # Formula
///
/// ```ignore
/// n * p_i * (1 - p_i) for i in 1...k
/// ```
///
/// where `n` is the number of trials, `p_i` is the `i`th probability,
/// and `k` is the total number of probabilities
fn variance(&self) -> Option<DMatrix<f64>> {
let cov: Vec<_> = self
.p
.iter()
.map(|x| x * self.n as f64 * (1.0 - x))
.collect();
Some(DMatrix::from_diagonal(&DVector::from_vec(cov)))
}
}
// impl Skewness<Vec<f64>> for Multinomial {
// /// Returns the skewness of the multinomial distribution
// ///
// /// # Formula
// ///
// /// ```ignore
// /// (1 - 2 * p_i) / (n * p_i * (1 - p_i)) for i in 1...k
// /// ```
// ///
// /// where `n` is the number of trials, `p_i` is the `i`th probability,
// /// and `k` is the total number of probabilities
// fn skewness(&self) -> Option<Vec<f64>> {
// Some(
// self.p
// .iter()
// .map(|x| (1.0 - 2.0 * x) / (self.n as f64 * (1.0 - x) * x).sqrt())
// .collect(),
// )
// }
// }
impl<'a> Discrete<&'a [u64], f64> for Multinomial {
/// Calculates the probability mass function for the multinomial
/// distribution
/// with the given `x`'s corresponding to the probabilities for this
/// distribution
///
/// # Panics
///
/// If the elements in `x` do not sum to `n` or if the length of `x` is not
/// equivalent to the length of `p`
///
/// # Formula
///
/// ```ignore
/// (n! / x_1!...x_k!) * p_i^x_i for i in 1...k
/// ```
///
/// where `n` is the number of trials, `p_i` is the `i`th probability,
/// `x_i` is the `i`th `x` value, and `k` is the total number of
/// probabilities
fn pmf(&self, x: &[u64]) -> f64 {
if self.p.len() != x.len() {
panic!("Expected x and p to have equal lengths.");
}
if x.iter().sum::<u64>() != self.n {
return 0.0;
}
let coeff = factorial::multinomial(self.n, x);
let val = coeff
* self
.p
.iter()
.zip(x.iter())
.fold(1.0, |acc, (pi, xi)| acc * pi.powf(*xi as f64));
val
}
/// Calculates the log probability mass function for the multinomial
/// distribution
/// with the given `x`'s corresponding to the probabilities for this
/// distribution
///
/// # Panics
///
/// If the elements in `x` do not sum to `n` or if the length of `x` is not
/// equivalent to the length of `p`
///
/// # Formula
///
/// ```ignore
/// ln((n! / x_1!...x_k!) * p_i^x_i) for i in 1...k
/// ```
///
/// where `n` is the number of trials, `p_i` is the `i`th probability,
/// `x_i` is the `i`th `x` value, and `k` is the total number of
/// probabilities
fn ln_pmf(&self, x: &[u64]) -> f64 {
if self.p.len() != x.len() {
panic!("Expected x and p to have equal lengths.");
}
if x.iter().sum::<u64>() != self.n {
return f64::NEG_INFINITY;
}
let coeff = factorial::multinomial(self.n, x).ln();
let val = coeff
+ self
.p
.iter()
.zip(x.iter())
.map(|(pi, xi)| *xi as f64 * pi.ln())
.fold(0.0, |acc, x| acc + x);
val
}
}
// TODO: fix tests
// #[rustfmt::skip]
// #[cfg(test)]
// mod tests {
// use crate::statistics::*;
// use crate::distribution::{Discrete, Multinomial};
// use crate::consts::ACC;
// fn try_create(p: &[f64], n: u64) -> Multinomial {
// let dist = Multinomial::new(p, n);
// assert!(dist.is_ok());
// dist.unwrap()
// }
// fn create_case(p: &[f64], n: u64) {
// let dist = try_create(p, n);
// assert_eq!(dist.p(), p);
// assert_eq!(dist.n(), n);
// }
// fn bad_create_case(p: &[f64], n: u64) {
// let dist = Multinomial::new(p, n);
// assert!(dist.is_err());
// }
// fn test_case<F>(p: &[f64], n: u64, expected: &[f64], eval: F)
// where F: Fn(Multinomial) -> Vec<f64>
// {
// let dist = try_create(p, n);
// let x = eval(dist);
// assert_eq!(*expected, *x);
// }
// fn test_almost<F>(p: &[f64], n: u64, expected: &[f64], acc: f64, eval: F)
// where F: Fn(Multinomial) -> Vec<f64>
// {
// let dist = try_create(p, n);
// let x = eval(dist);
// assert_eq!(expected.len(), x.len());
// for i in 0..expected.len() {
// assert_almost_eq!(expected[i], x[i], acc);
// }
// }
// fn test_almost_sr<F>(p: &[f64], n: u64, expected: f64, acc:f64, eval: F)
// where F: Fn(Multinomial) -> f64
// {
// let dist = try_create(p, n);
// let x = eval(dist);
// assert_almost_eq!(expected, x, acc);
// }
// #[test]
// fn test_create() {
// create_case(&[1.0, 1.0, 1.0], 4);
// create_case(&[1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0], 4);
// }
// #[test]
// fn test_bad_create() {
// bad_create_case(&[-1.0, 1.0], 4);
// bad_create_case(&[0.0, 0.0], 4);
// }
// #[test]
// fn test_mean() {
// let mean = |x: Multinomial| x.mean().unwrap();
// test_case(&[0.3, 0.7], 5, &[1.5, 3.5], mean);
// test_case(&[0.1, 0.3, 0.6], 10, &[1.0, 3.0, 6.0], mean);
// test_case(&[0.15, 0.35, 0.3, 0.2], 20, &[3.0, 7.0, 6.0, 4.0], mean);
// }
// #[test]
// fn test_variance() {
// let variance = |x: Multinomial| x.variance().unwrap();
// test_almost(&[0.3, 0.7], 5, &[1.05, 1.05], 1e-15, variance);
// test_almost(&[0.1, 0.3, 0.6], 10, &[0.9, 2.1, 2.4], 1e-15, variance);
// test_almost(&[0.15, 0.35, 0.3, 0.2], 20, &[2.55, 4.55, 4.2, 3.2], 1e-15, variance);
// }
// // #[test]
// // fn test_skewness() {
// // let skewness = |x: Multinomial| x.skewness().unwrap();
// // test_almost(&[0.3, 0.7], 5, &[0.390360029179413, -0.390360029179413], 1e-15, skewness);
// // test_almost(&[0.1, 0.3, 0.6], 10, &[0.843274042711568, 0.276026223736942, -0.12909944487358], 1e-15, skewness);
// // test_almost(&[0.15, 0.35, 0.3, 0.2], 20, &[0.438357003759605, 0.140642169281549, 0.195180014589707, 0.335410196624968], 1e-15, skewness);
// // }
// #[test]
// fn test_pmf() {
// let pmf = |arg: &[u64]| move |x: Multinomial| x.pmf(arg);
// test_almost_sr(&[0.3, 0.7], 10, 0.121060821, 1e-15, pmf(&[1, 9]));
// test_almost_sr(&[0.1, 0.3, 0.6], 10, 0.105815808, 1e-15, pmf(&[1, 3, 6]));
// test_almost_sr(&[0.15, 0.35, 0.3, 0.2], 10, 0.000145152, 1e-15, pmf(&[1, 1, 1, 7]));
// }
// #[test]
// #[should_panic]
// fn test_pmf_x_wrong_length() {
// let pmf = |arg: &[u64]| move |x: Multinomial| x.pmf(arg);
// let n = Multinomial::new(&[0.3, 0.7], 10).unwrap();
// n.pmf(&[1]);
// }
// #[test]
// #[should_panic]
// fn test_pmf_x_wrong_sum() {
// let pmf = |arg: &[u64]| move |x: Multinomial| x.pmf(arg);
// let n = Multinomial::new(&[0.3, 0.7], 10).unwrap();
// n.pmf(&[1, 3]);
// }
// #[test]
// fn test_ln_pmf() {
// let large_p = &[1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0];
// let n = Multinomial::new(large_p, 45).unwrap();
// let x = &[1, 2, 3, 4, 5, 6, 7, 8, 9];
// assert_almost_eq!(n.pmf(x).ln(), n.ln_pmf(x), 1e-13);
// let n2 = Multinomial::new(large_p, 18).unwrap();
// let x2 = &[1, 1, 1, 2, 2, 2, 3, 3, 3];
// assert_almost_eq!(n2.pmf(x2).ln(), n2.ln_pmf(x2), 1e-13);
// let n3 = Multinomial::new(large_p, 51).unwrap();
// let x3 = &[5, 6, 7, 8, 7, 6, 5, 4, 3];
// assert_almost_eq!(n3.pmf(x3).ln(), n3.ln_pmf(x3), 1e-13);
// }
// #[test]
// #[should_panic]
// fn test_ln_pmf_x_wrong_length() {
// let n = Multinomial::new(&[0.3, 0.7], 10).unwrap();
// n.ln_pmf(&[1]);
// }
// #[test]
// #[should_panic]
// fn test_ln_pmf_x_wrong_sum() {
// let n = Multinomial::new(&[0.3, 0.7], 10).unwrap();
// n.ln_pmf(&[1, 3]);
// }
// }
|
use std::boxed::Box;
use diesel::prelude::*;
use diesel::pg::PgConnection;
use rocket::request::{self, FromRequest, Request};
use rocket::response::{Failure, status};
use rocket::http::Status;
use rocket::Outcome;
use dbtools::s3;
use fields::Authentication;
use models::traits::passwordable::Passwordable;
use DbConn;
/// The list of types that can be passworded derived from a request header
pub enum PasswordableResource {
Image,
Video,
File
}
/// Returns a signed S3 link to the resource if the password is correct.
#[post("/<res_id>", format="text/plain", data="<submitted_password>")]
pub fn check(
res_type: PasswordableResource,
res_id: String,
submitted_password: String,
conn: DbConn)
-> Result<status::Custom<String>, Failure>
{
let resource = get_passwordable_resource_by_id(res_id, res_type, &*conn);
let resource = match resource {
Some(r) => r,
_ => return Err(Failure(Status::NotFound))
};
if resource.check_password(submitted_password, &*conn) {
let signed_location = s3::get_s3_presigned_url(resource.get_s3_location());
match signed_location {
Ok(link) => Ok(status::Custom(Status::Ok, link)),
Err(_) => Err(Failure(Status::InternalServerError))
}
} else {
Err(Failure(Status::Unauthorized))
}
}
#[put("/<res_id>", format="text/plain", data="<submitted_password>")]
pub fn set(
res_type: PasswordableResource,
res_id: String,
submitted_password: String,
auth: Authentication,
conn: DbConn)
-> Result<status::Accepted<()>, Failure>
{
let resource = get_passwordable_resource_by_id(res_id.clone(), res_type, &*conn);
let mut resource = match resource {
Some(r) => r,
None => return Err(Failure(Status::NotFound))
};
if resource.owner() != auth.get_userid() {
return Err(Failure(Status::Unauthorized));
}
let submitted_password: Option<String> = match submitted_password.as_str() {
"" => None,
other => Some(other.to_string())
};
let set_result = resource.set_password(submitted_password.clone(), &*conn);
if set_result.is_some() {
eprintln!("Error changing password for {}: {}", res_id, set_result.unwrap());
return Err(Failure(Status::InternalServerError));
}
let s3_result = if submitted_password == None {
s3::publicize_s3_resource(&resource.get_s3_location())
} else {
s3::privatize_s3_resource(&resource.get_s3_location())
};
match s3_result {
Ok(()) => Ok(status::Accepted(None)),
Err(_) => Err(Failure(Status::InternalServerError))
}
}
fn get_passwordable_resource_by_id(
res_id: String,
res_type: PasswordableResource,
conn: &PgConnection)
-> Option<Box<dyn Passwordable>>
{
match res_type {
PasswordableResource::Image => {
use ::schema::horus_images::dsl::*;
use ::models::HImage;
let img = horus_images.find(res_id).get_result::<HImage>(conn);
if img.is_err() { return None }
Some(Box::new(img.unwrap()))
},
PasswordableResource::Video => {
use ::schema::horus_videos::dsl::*;
use ::models::HVideo;
let vid = horus_videos.find(res_id).get_result::<HVideo>(conn);
if vid.is_err() { return None }
Some(Box::new(vid.unwrap()))
},
PasswordableResource::File => {
use ::schema::horus_files::dsl::*;
use ::models::HFile;
let file = horus_files.find(res_id).get_result::<HFile>(conn);
if file.is_err() { return None }
Some(Box::new(file.unwrap()))
}
}
}
impl<'a, 'r> FromRequest<'a, 'r> for PasswordableResource {
type Error = ();
fn from_request(request: &'a Request<'r>)
-> request::Outcome<PasswordableResource, Self::Error>
{
let header = request.headers().get_one("horus-resource-type");
if header.is_none() {
return Outcome::Failure((Status::BadRequest, ()));
}
let header = header.unwrap();
match header.to_lowercase().trim() {
"image" => Outcome::Success(PasswordableResource::Image),
"video" => Outcome::Success(PasswordableResource::Video),
"file" => Outcome::Success(PasswordableResource::File),
_ => Outcome::Failure((Status::BadRequest, ()))
}
}
}
|
// variables5.rs
// Make me compile! Execute the command `rustlings hint variables5` if you want a hint :)
fn main() {
let number = "holy molly this is crazy"; // don't change this line
println!("Number {}", number);
let number = number.len();
println!("Number {}", number);
}
|
// RGB Rust Library
// Written in 2019 by
// Dr. Maxim Orlovsky <dr.orlovsky@gmail.com>
// basing on ideas from the original RGB rust library by
// Alekos Filini <alekos.filini@gmail.com>
//
// To the extent possible under law, the author(s) have dedicated all
// copyright and related and neighboring rights to this software to
// the public domain worldwide. This software is distributed without
// any warranty.
//
// You should have received a copy of the MIT License
// along with this software.
// If not, see <https://opensource.org/licenses/MIT>.
//! RGB asset abstractions
use crate::*;
use bitcoin::consensus::encode::*;
use std::io::Cursor;
/// RGB asset data structure for in-memory representation of bundled asset issuence contracts and
/// chain of proofs for each of the known assets
#[derive(Clone, Debug)]
pub struct Asset<B: ContractBody> {
/// Original asset issue contract
pub contract: Contract<B>,
/// Set of asset reissue contracts (if any)
pub reissues: Vec<Contract<ReissueContractBody>>,
/// Set of all known unspent RGB proofs for a given asset (i.e. heads of the proof chains)
pub proof_chains: Vec<Proof<B>>,
}
impl<B: ContractBody> Asset<B>
where
B: Encodable<Cursor<Vec<u8>>>,
Contract<B>: OnChain<B>,
{
/// Provides unique asset_id, which is computed as a SHA256d-hash from the consensus-serialized
/// contract data
pub fn get_asset_id(&self) -> IdentityHash {
self.contract.get_identity_hash()
}
}
impl<S: Encoder, T: Encodable<S> + ContractBody> Encodable<S> for Asset<T> {
fn consensus_encode(&self, s: &mut S) -> Result<(), Error> {
self.contract.consensus_encode(s)?;
self.reissues.consensus_encode(s)?;
self.proof_chains.consensus_encode(s)
}
}
impl<D: Decoder, T: Decodable<D> + ContractBody> Decodable<D> for Asset<T> {
fn consensus_decode(d: &mut D) -> Result<Asset<T>, Error> {
let contract: Contract<T> = Decodable::consensus_decode(d)?;
let reissues: Vec<Contract<ReissueContractBody>> = Decodable::consensus_decode(d)?;
let proof_chains: Vec<Proof<T>> = Decodable::consensus_decode(d)?;
Ok(Asset {
contract,
reissues,
proof_chains,
})
}
}
|
//! Predicate existential types.
//!
//! Provides newtypes `Pet` and `PetRef`
//! for providing predicate accepted
//! values and references respectively.
#![warn(missing_docs)]
#![cfg_attr(not(feature = "std"), no_std)]
extern crate no_std_compat as std;
#[macro_use]
macro_rules! std_prelude {
() => {
#[allow(unused_imports)]
use std::prelude::v1::*;
};
}
mod pred;
pub use pred::{Pred, Pred2};
mod pet;
pub use pet::Pet;
mod petref;
pub use petref::PetRef;
mod extension;
pub use extension::PredExt;
/// Recommended prelude for this crate.
pub mod prelude {
pub use crate::{Pet, PetRef, Pred, PredExt};
}
pub mod preds;
#[cfg(test)]
mod tests;
|
//! The `NAME LIST (NLST)` command
//
// This command causes a directory listing to be sent from
// server to user site. The pathname should specify a
// directory or other system-specific file group descriptor; a
// null argument implies the current directory. The server
// will return a stream of names of files and no other
// information. The data will be transferred in ASCII or
// EBCDIC type over the data connection as valid pathname
// strings separated by <CRLF> or <NL>. (Again the user must
// ensure that the TYPE is correct.) This command is intended
// to return information that can be used by a program to
// further process the files automatically. For example, in
// the implementation of a "multiple get" function.
use crate::server::chancomms::DataChanCmd;
use crate::{
auth::UserDetail,
server::controlchan::{
command::Command,
error::ControlChanError,
handler::{CommandContext, CommandHandler},
Reply, ReplyCode,
},
storage::{Metadata, StorageBackend},
};
use async_trait::async_trait;
#[derive(Debug)]
pub struct Nlst;
#[async_trait]
impl<Storage, User> CommandHandler<Storage, User> for Nlst
where
User: UserDetail + 'static,
Storage: StorageBackend<User> + 'static,
Storage::Metadata: Metadata,
{
#[tracing_attributes::instrument]
async fn handle(&self, args: CommandContext<Storage, User>) -> Result<Reply, ControlChanError> {
let mut session = args.session.lock().await;
let (cmd, path_opt): (DataChanCmd, Option<String>) = match args.parsed_command.clone() {
Command::Nlst { path } => {
let path_clone = path.clone();
(DataChanCmd::Nlst { path }, path_clone)
}
_ => panic!("Programmer error, expected command to be NLST"),
};
let logger = args.logger;
match session.data_cmd_tx.take() {
Some(tx) => {
tokio::spawn(async move {
if let Err(err) = tx.send(cmd).await {
slog::warn!(logger, "NLST: could not notify data channel to respond with NLST. {}", err);
}
});
Ok(Reply::new(ReplyCode::FileStatusOkay, "Sending directory list"))
}
None => {
if let Some(path) = path_opt {
slog::warn!(logger, "NLST: no data connection established for NLSTing {:?}", path);
} else {
slog::warn!(logger, "NLST: no data connection established for NLST");
}
Ok(Reply::new(ReplyCode::CantOpenDataConnection, "No data connection established"))
}
}
}
}
|
///! Handle the color theme
use crate::options::SkimOptions;
use tuikit::prelude::*;
#[rustfmt::skip]
lazy_static! {
pub static ref DEFAULT_THEME: ColorTheme = ColorTheme::dark256();
}
/// The color scheme of skim's UI
///
/// <pre>
/// +----------------+
/// | >selected line | --> selected & normal(fg/bg) & matched
/// |> current line | --> cursor & current & current_match
/// | normal line |
/// |\ 8/10 | --> spinner & info
/// |> query | --> prompt & query
/// +----------------+
/// </pre>
#[rustfmt::skip]
#[derive(Copy, Clone, Debug)]
pub struct ColorTheme {
fg: Color,
bg: Color,
normal_effect: Effect,
matched: Color,
matched_bg: Color,
matched_effect: Effect,
current: Color,
current_bg: Color,
current_effect: Effect,
current_match: Color,
current_match_bg: Color,
current_match_effect: Effect,
query_fg: Color,
query_bg: Color,
query_effect: Effect,
spinner: Color,
info: Color,
prompt: Color,
cursor: Color,
selected: Color,
header: Color,
border: Color,
}
#[rustfmt::skip]
#[allow(dead_code)]
impl ColorTheme {
pub fn init_from_options(options: &SkimOptions) -> ColorTheme {
// register
if let Some(color) = options.color {
ColorTheme::from_options(color)
} else {
ColorTheme::dark256()
}
}
fn empty() -> Self {
ColorTheme {
fg: Color::Default,
bg: Color::Default,
normal_effect: Effect::empty(),
matched: Color::Default,
matched_bg: Color::Default,
matched_effect: Effect::empty(),
current: Color::Default,
current_bg: Color::Default,
current_effect: Effect::empty(),
current_match: Color::Default,
current_match_bg: Color::Default,
current_match_effect: Effect::empty(),
query_fg: Color::Default,
query_bg: Color::Default,
query_effect: Effect::empty(),
spinner: Color::Default,
info: Color::Default,
prompt: Color::Default,
cursor: Color::Default,
selected: Color::Default,
header: Color::Default,
border: Color::Default,
}
}
fn bw() -> Self {
ColorTheme {
matched_effect: Effect::UNDERLINE,
current_effect: Effect::REVERSE,
current_match_effect: Effect::UNDERLINE | Effect::REVERSE,
..ColorTheme::empty()
}
}
fn default16() -> Self {
ColorTheme {
matched: Color::GREEN,
matched_bg: Color::BLACK,
current: Color::YELLOW,
current_bg: Color::BLACK,
current_match: Color::GREEN,
current_match_bg: Color::BLACK,
spinner: Color::GREEN,
info: Color::WHITE,
prompt: Color::BLUE,
cursor: Color::RED,
selected: Color::MAGENTA,
header: Color::CYAN,
border: Color::LIGHT_BLACK,
..ColorTheme::empty()
}
}
fn dark256() -> Self {
ColorTheme {
matched: Color::AnsiValue(108),
matched_bg: Color::AnsiValue(0),
current: Color::AnsiValue(254),
current_bg: Color::AnsiValue(236),
current_match: Color::AnsiValue(151),
current_match_bg: Color::AnsiValue(236),
spinner: Color::AnsiValue(148),
info: Color::AnsiValue(144),
prompt: Color::AnsiValue(110),
cursor: Color::AnsiValue(161),
selected: Color::AnsiValue(168),
header: Color::AnsiValue(109),
border: Color::AnsiValue(59),
..ColorTheme::empty()
}
}
fn molokai256() -> Self {
ColorTheme {
matched: Color::AnsiValue(234),
matched_bg: Color::AnsiValue(186),
current: Color::AnsiValue(254),
current_bg: Color::AnsiValue(236),
current_match: Color::AnsiValue(234),
current_match_bg: Color::AnsiValue(186),
spinner: Color::AnsiValue(148),
info: Color::AnsiValue(144),
prompt: Color::AnsiValue(110),
cursor: Color::AnsiValue(161),
selected: Color::AnsiValue(168),
header: Color::AnsiValue(109),
border: Color::AnsiValue(59),
..ColorTheme::empty()
}
}
fn light256() -> Self {
ColorTheme {
matched: Color::AnsiValue(0),
matched_bg: Color::AnsiValue(220),
current: Color::AnsiValue(237),
current_bg: Color::AnsiValue(251),
current_match: Color::AnsiValue(66),
current_match_bg: Color::AnsiValue(251),
spinner: Color::AnsiValue(65),
info: Color::AnsiValue(101),
prompt: Color::AnsiValue(25),
cursor: Color::AnsiValue(161),
selected: Color::AnsiValue(168),
header: Color::AnsiValue(31),
border: Color::AnsiValue(145),
..ColorTheme::empty()
}
}
#[allow(clippy::wildcard_in_or_patterns)]
fn from_options(color: &str) -> Self {
let mut theme = ColorTheme::dark256();
for pair in color.split(',') {
let color: Vec<&str> = pair.split(':').collect();
if color.len() < 2 {
theme = match color[0] {
"molokai" => ColorTheme::molokai256(),
"light" => ColorTheme::light256(),
"16" => ColorTheme::default16(),
"bw" => ColorTheme::bw(),
"empty" => ColorTheme::empty(),
"dark" | "default" | _ => ColorTheme::dark256(),
};
continue;
}
let new_color = if color[1].len() == 7 {
// 256 color
let r = u8::from_str_radix(&color[1][1..3], 16).unwrap_or(255);
let g = u8::from_str_radix(&color[1][3..5], 16).unwrap_or(255);
let b = u8::from_str_radix(&color[1][5..7], 16).unwrap_or(255);
Color::Rgb(r, g, b)
} else {
color[1].parse::<u8>()
.map(Color::AnsiValue)
.unwrap_or(Color::Default)
};
match color[0] {
"fg" => theme.fg = new_color,
"bg" => theme.bg = new_color,
"matched" | "hl" => theme.matched = new_color,
"matched_bg" => theme.matched_bg = new_color,
"current" | "fg+" => theme.current = new_color,
"current_bg" | "bg+" => theme.current_bg = new_color,
"current_match" | "hl+" => theme.current_match = new_color,
"current_match_bg" => theme.current_match_bg = new_color,
"query" => theme.query_fg = new_color,
"query_bg" => theme.query_bg = new_color,
"spinner" => theme.spinner = new_color,
"info" => theme.info = new_color,
"prompt" => theme.prompt = new_color,
"cursor" | "pointer" => theme.cursor = new_color,
"selected" | "marker" => theme.selected = new_color,
"header" => theme.header = new_color,
"border" => theme.border = new_color,
_ => {}
}
}
theme
}
pub fn normal(&self) -> Attr {
Attr {
fg: self.fg,
bg: self.bg,
effect: self.normal_effect,
}
}
pub fn matched(&self) -> Attr {
Attr {
fg: self.matched,
bg: self.matched_bg,
effect: self.matched_effect,
}
}
pub fn current(&self) -> Attr {
Attr {
fg: self.current,
bg: self.current_bg,
effect: self.current_effect,
}
}
pub fn current_match(&self) -> Attr {
Attr {
fg: self.current_match,
bg: self.current_match_bg,
effect: self.current_match_effect,
}
}
pub fn query(&self) -> Attr {
Attr {
fg: self.query_fg,
bg: self.query_bg,
effect: self.query_effect,
}
}
pub fn spinner(&self) -> Attr {
Attr {
fg: self.spinner,
bg: self.bg,
effect: Effect::BOLD,
}
}
pub fn info(&self) -> Attr {
Attr {
fg: self.info,
bg: self.bg,
effect: Effect::empty(),
}
}
pub fn prompt(&self) -> Attr {
Attr {
fg: self.prompt,
bg: self.bg,
effect: Effect::empty(),
}
}
pub fn cursor(&self) -> Attr {
Attr {
fg: self.cursor,
bg: self.current_bg,
effect: Effect::empty(),
}
}
pub fn selected(&self) -> Attr {
Attr {
fg: self.selected,
bg: self.current_bg,
effect: Effect::empty(),
}
}
pub fn header(&self) -> Attr {
Attr {
fg: self.header,
bg: self.bg,
effect: Effect::empty(),
}
}
pub fn border(&self) -> Attr {
Attr {
fg: self.border,
bg: self.bg,
effect: Effect::empty(),
}
}
}
|
//! Provides functionality of adding inherent extrinsics to the Domain.
//! Unlike Primary chain where inherent data is first derived the block author
//! and the data is verified by the on primary runtime, domains inherents
//! short circuit the derivation and verification of inherent data
//! as the inherent data is directly taken from the primary block from which
//! domain block is being built.
//!
//! One of the first use case for this is passing Timestamp data. Before building a
//! domain block using a primary block, we take the current time from the primary runtime
//! and then create an unsigned extrinsic that is put on top the bundle extrinsics.
//!
//! Deriving these extrinsics during fraud proof verification should be possible since
//! verification environment will have access to consensus chain.
use crate::runtime_api::InherentExtrinsicConstructor;
use sp_api::ProvideRuntimeApi;
use sp_domains::DomainsApi;
use sp_runtime::traits::{Block as BlockT, NumberFor};
use std::sync::Arc;
/// Returns required inherent extrinsics for the domain block based on the primary block.
/// Note: consensus block hash must be used to construct domain block.
pub fn construct_inherent_extrinsics<Block, DomainRuntimeApi, CBlock, CClient>(
consensus_client: &Arc<CClient>,
domain_runtime_api: &DomainRuntimeApi,
consensus_block_hash: CBlock::Hash,
domain_parent_hash: Block::Hash,
) -> Result<Vec<Block::Extrinsic>, sp_blockchain::Error>
where
Block: BlockT,
CBlock: BlockT,
CClient: ProvideRuntimeApi<CBlock>,
CClient::Api: DomainsApi<CBlock, NumberFor<Block>, Block::Hash>,
DomainRuntimeApi: InherentExtrinsicConstructor<Block>,
{
let moment = consensus_client
.runtime_api()
.timestamp(consensus_block_hash)?;
let mut inherent_exts = vec![];
if let Some(inherent_timestamp) =
domain_runtime_api.construct_timestamp_inherent_extrinsic(domain_parent_hash, moment)?
{
inherent_exts.push(inherent_timestamp)
}
Ok(inherent_exts)
}
|
use crate::{DomainId, SealedBundleHeader};
use parity_scale_codec::{Decode, Encode};
use scale_info::TypeInfo;
use sp_consensus_slots::Slot;
use sp_core::H256;
use sp_runtime::traits::{BlakeTwo256, Hash as HashT, Header as HeaderT};
use sp_std::vec::Vec;
use sp_trie::StorageProof;
use subspace_core_primitives::BlockNumber;
use subspace_runtime_primitives::{AccountId, Balance};
/// A phase of a block's execution, carrying necessary information needed for verifying the
/// invalid state transition proof.
#[derive(Debug, Decode, Encode, TypeInfo, PartialEq, Eq, Clone)]
pub enum ExecutionPhase {
/// Executes the `initialize_block` hook.
InitializeBlock { domain_parent_hash: H256 },
/// Executes some extrinsic.
ApplyExtrinsic(u32),
/// Executes the `finalize_block` hook.
FinalizeBlock { total_extrinsics: u32 },
}
impl ExecutionPhase {
/// Returns the method for generating the proof.
pub fn proving_method(&self) -> &'static str {
match self {
// TODO: Replace `DomainCoreApi_initialize_block_with_post_state_root` with `Core_initalize_block`
// Should be a same issue with https://github.com/paritytech/substrate/pull/10922#issuecomment-1068997467
Self::InitializeBlock { .. } => "DomainCoreApi_initialize_block_with_post_state_root",
Self::ApplyExtrinsic(_) => "BlockBuilder_apply_extrinsic",
Self::FinalizeBlock { .. } => "BlockBuilder_finalize_block",
}
}
/// Returns the method for verifying the proof.
///
/// The difference with [`Self::proving_method`] is that the return value of verifying method
/// must contain the post state root info so that it can be used to compare whether the
/// result of execution reported in [`FraudProof`] is expected or not.
pub fn verifying_method(&self) -> &'static str {
match self {
Self::InitializeBlock { .. } => "DomainCoreApi_initialize_block_with_post_state_root",
Self::ApplyExtrinsic(_) => "DomainCoreApi_apply_extrinsic_with_post_state_root",
Self::FinalizeBlock { .. } => "BlockBuilder_finalize_block",
}
}
/// Returns the post state root for the given execution result.
pub fn decode_execution_result<Header: HeaderT>(
&self,
execution_result: Vec<u8>,
) -> Result<Header::Hash, VerificationError> {
match self {
Self::InitializeBlock { .. } | Self::ApplyExtrinsic(_) => {
let encoded_storage_root = Vec::<u8>::decode(&mut execution_result.as_slice())
.map_err(VerificationError::InitializeBlockOrApplyExtrinsicDecode)?;
Header::Hash::decode(&mut encoded_storage_root.as_slice())
.map_err(VerificationError::StorageRootDecode)
}
Self::FinalizeBlock { .. } => {
let new_header = Header::decode(&mut execution_result.as_slice())
.map_err(VerificationError::HeaderDecode)?;
Ok(*new_header.state_root())
}
}
}
}
/// Error type of fraud proof verification on consensus node.
#[derive(Debug)]
#[cfg_attr(feature = "thiserror", derive(thiserror::Error))]
pub enum VerificationError {
/// `pre_state_root` in the invalid state transition proof is invalid.
#[cfg_attr(feature = "thiserror", error("invalid `pre_state_root`"))]
InvalidPreStateRoot,
/// Hash of the consensus block being challenged not found.
#[cfg_attr(feature = "thiserror", error("consensus block hash not found"))]
ConsensusBlockHashNotFound,
/// `post_state_root` not found in the state.
#[cfg_attr(feature = "thiserror", error("`post_state_root` not found"))]
PostStateRootNotFound,
/// `post_state_root` is same as the one stored on chain.
#[cfg_attr(
feature = "thiserror",
error("`post_state_root` is same as the one on chain")
)]
SamePostStateRoot,
/// Domain extrinsic at given index not found.
#[cfg_attr(
feature = "thiserror",
error("Domain extrinsic at index {0} not found")
)]
DomainExtrinsicNotFound(u32),
/// Error occurred while building the domain extrinsics.
#[cfg_attr(
feature = "thiserror",
error("Failed to rebuild the domain extrinsic list")
)]
FailedToBuildDomainExtrinsics,
/// Failed to pass the execution proof check.
#[cfg_attr(
feature = "thiserror",
error("Failed to pass the execution proof check")
)]
BadProof(sp_std::boxed::Box<dyn sp_state_machine::Error>),
/// The `post_state_root` calculated by farmer does not match the one declared in [`FraudProof`].
#[cfg_attr(
feature = "thiserror",
error("`post_state_root` mismatches, expected: {expected}, got: {got}")
)]
BadPostStateRoot { expected: H256, got: H256 },
/// Failed to decode the return value of `initialize_block` and `apply_extrinsic`.
#[cfg_attr(
feature = "thiserror",
error(
"Failed to decode the return value of `initialize_block` and `apply_extrinsic`: {0}"
)
)]
InitializeBlockOrApplyExtrinsicDecode(parity_scale_codec::Error),
/// Failed to decode the storage root produced by verifying `initialize_block` or `apply_extrinsic`.
#[cfg_attr(
feature = "thiserror",
error(
"Failed to decode the storage root from verifying `initialize_block` and `apply_extrinsic`: {0}"
)
)]
StorageRootDecode(parity_scale_codec::Error),
/// Failed to decode the header produced by `finalize_block`.
#[cfg_attr(
feature = "thiserror",
error("Failed to decode the header from verifying `finalize_block`: {0}")
)]
HeaderDecode(parity_scale_codec::Error),
/// Transaction validity check passes.
#[cfg_attr(feature = "thiserror", error("Valid transaction"))]
ValidTransaction,
/// State not found in the storage proof.
#[cfg_attr(
feature = "thiserror",
error("State under storage key ({0:?}) not found in the storage proof")
)]
StateNotFound(Vec<u8>),
/// Decode error.
#[cfg(feature = "std")]
#[cfg_attr(feature = "thiserror", error("Decode error: {0}"))]
Decode(#[from] parity_scale_codec::Error),
/// Runtime api error.
#[cfg(feature = "std")]
#[cfg_attr(feature = "thiserror", error("Runtime api error: {0}"))]
RuntimeApi(#[from] sp_api::ApiError),
/// Runtime api error.
#[cfg(feature = "std")]
#[cfg_attr(feature = "thiserror", error("Client error: {0}"))]
Client(#[from] sp_blockchain::Error),
/// Invalid storage proof.
#[cfg(feature = "std")]
#[cfg_attr(feature = "thiserror", error("Invalid stroage proof"))]
InvalidStorageProof,
/// Can not find signer from the domain extrinsic.
#[cfg_attr(
feature = "thiserror",
error("Can not find signer from the domain extrinsic")
)]
SignerNotFound,
/// Domain state root not found.
#[cfg_attr(feature = "thiserror", error("Domain state root not found"))]
DomainStateRootNotFound,
/// Fail to get runtime code.
// The `String` here actually repersenting the `sc_executor_common::error::WasmError`
// error, but it will be improper to use `WasmError` directly here since it will make
// `sp-domain` (a runtime crate) depend on `sc_executor_common` (a client crate).
#[cfg(feature = "std")]
#[cfg_attr(feature = "thiserror", error("Failed to get runtime code: {0}"))]
RuntimeCode(String),
#[cfg(feature = "std")]
#[cfg_attr(
feature = "thiserror",
error("Oneshot error when verifying fraud proof in tx pool: {0}")
)]
Oneshot(String),
}
/// Fraud proof.
// TODO: Revisit when fraud proof v2 is implemented.
#[allow(clippy::large_enum_variant)]
#[derive(Debug, Decode, Encode, TypeInfo, PartialEq, Eq, Clone)]
pub enum FraudProof<Number, Hash> {
InvalidStateTransition(InvalidStateTransitionProof),
InvalidTransaction(InvalidTransactionProof),
BundleEquivocation(BundleEquivocationProof<Number, Hash>),
ImproperTransactionSortition(ImproperTransactionSortitionProof),
}
impl<Number, Hash> FraudProof<Number, Hash> {
pub fn domain_id(&self) -> DomainId {
match self {
Self::InvalidStateTransition(proof) => proof.domain_id,
Self::InvalidTransaction(proof) => proof.domain_id,
Self::BundleEquivocation(proof) => proof.domain_id,
Self::ImproperTransactionSortition(proof) => proof.domain_id,
}
}
}
impl<Number, Hash> FraudProof<Number, Hash>
where
Number: Encode,
Hash: Encode,
{
pub fn hash(&self) -> H256 {
BlakeTwo256::hash(&self.encode())
}
}
/// Proves an invalid state transition by challenging the trace at specific index in a bad receipt.
#[derive(Debug, Decode, Encode, TypeInfo, PartialEq, Eq, Clone)]
pub struct InvalidStateTransitionProof {
/// The id of the domain this fraud proof targeted
pub domain_id: DomainId,
/// Hash of the bad receipt in which an invalid trace occurred.
pub bad_receipt_hash: H256,
/// Parent number.
pub parent_number: BlockNumber,
/// Hash of the consensus block corresponding to `parent_number`.
///
/// Runtime code for the execution of the domain block that is being challenged
/// is retrieved on top of the consensus parent block from the consensus chain.
pub consensus_parent_hash: H256,
/// State root before the fraudulent transaction.
pub pre_state_root: H256,
/// State root after the fraudulent transaction.
pub post_state_root: H256,
/// Proof recorded during the computation.
pub proof: StorageProof,
/// Execution phase.
pub execution_phase: ExecutionPhase,
}
pub fn dummy_invalid_state_transition_proof(
domain_id: DomainId,
parent_number: u32,
) -> InvalidStateTransitionProof {
InvalidStateTransitionProof {
domain_id,
bad_receipt_hash: H256::default(),
parent_number,
consensus_parent_hash: H256::default(),
pre_state_root: H256::default(),
post_state_root: H256::default(),
proof: StorageProof::empty(),
execution_phase: ExecutionPhase::ApplyExtrinsic(0),
}
}
/// Represents a bundle equivocation proof. An equivocation happens when an executor
/// produces more than one bundle on the same slot. The proof of equivocation
/// are the given distinct bundle headers that were signed by the validator and which
/// include the slot number.
#[derive(Debug, Decode, Encode, TypeInfo, PartialEq, Eq, Clone)]
pub struct BundleEquivocationProof<Number, Hash> {
/// The id of the domain this fraud proof targeted
pub domain_id: DomainId,
/// The authority id of the equivocator.
pub offender: AccountId,
/// The slot at which the equivocation happened.
pub slot: Slot,
// TODO: The generic type should be `<Number, Hash, DomainNumber, DomainHash, Balance>`
// TODO: `SealedBundleHeader` contains `ExecutionReceipt` which make the size of the proof
// large, revisit when proceeding to fraud proof v2.
/// The first header involved in the equivocation.
pub first_header: SealedBundleHeader<Number, Hash, Number, H256, Balance>,
/// The second header involved in the equivocation.
pub second_header: SealedBundleHeader<Number, Hash, Number, H256, Balance>,
}
impl<Number: Clone + From<u32> + Encode, Hash: Clone + Default + Encode>
BundleEquivocationProof<Number, Hash>
{
/// Returns the hash of this bundle equivocation proof.
pub fn hash(&self) -> H256 {
BlakeTwo256::hash_of(self)
}
}
/// Represents an invalid transaction proof.
#[derive(Clone, Debug, Decode, Encode, Eq, PartialEq, TypeInfo)]
pub struct InvalidTransactionProof {
/// The id of the domain this fraud proof targeted
pub domain_id: DomainId,
/// Number of the block at which the invalid transaction occurred.
pub block_number: u32,
/// Hash of the domain block corresponding to `block_number`.
pub domain_block_hash: H256,
// TODO: Verifiable invalid extrinsic.
pub invalid_extrinsic: Vec<u8>,
/// Storage witness needed for verifying this proof.
pub storage_proof: StorageProof,
}
/// Represents an invalid transaction proof.
#[derive(Clone, Debug, Decode, Encode, Eq, PartialEq, TypeInfo)]
pub struct ImproperTransactionSortitionProof {
/// The id of the domain this fraud proof targeted
pub domain_id: DomainId,
}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use super::{models, API_VERSION};
#[non_exhaustive]
#[derive(Debug, thiserror :: Error)]
#[allow(non_camel_case_types)]
pub enum Error {
#[error(transparent)]
Registries_ImportImage(#[from] registries::import_image::Error),
#[error(transparent)]
Registries_CheckNameAvailability(#[from] registries::check_name_availability::Error),
#[error(transparent)]
Operations_List(#[from] operations::list::Error),
#[error(transparent)]
Registries_Get(#[from] registries::get::Error),
#[error(transparent)]
Registries_Create(#[from] registries::create::Error),
#[error(transparent)]
Registries_Update(#[from] registries::update::Error),
#[error(transparent)]
Registries_Delete(#[from] registries::delete::Error),
#[error(transparent)]
Registries_ListByResourceGroup(#[from] registries::list_by_resource_group::Error),
#[error(transparent)]
Registries_List(#[from] registries::list::Error),
#[error(transparent)]
Registries_ListCredentials(#[from] registries::list_credentials::Error),
#[error(transparent)]
Registries_RegenerateCredential(#[from] registries::regenerate_credential::Error),
#[error(transparent)]
Registries_ListUsages(#[from] registries::list_usages::Error),
#[error(transparent)]
Registries_ListPolicies(#[from] registries::list_policies::Error),
#[error(transparent)]
Registries_UpdatePolicies(#[from] registries::update_policies::Error),
#[error(transparent)]
Replications_Get(#[from] replications::get::Error),
#[error(transparent)]
Replications_Create(#[from] replications::create::Error),
#[error(transparent)]
Replications_Update(#[from] replications::update::Error),
#[error(transparent)]
Replications_Delete(#[from] replications::delete::Error),
#[error(transparent)]
Replications_List(#[from] replications::list::Error),
#[error(transparent)]
Webhooks_Get(#[from] webhooks::get::Error),
#[error(transparent)]
Webhooks_Create(#[from] webhooks::create::Error),
#[error(transparent)]
Webhooks_Update(#[from] webhooks::update::Error),
#[error(transparent)]
Webhooks_Delete(#[from] webhooks::delete::Error),
#[error(transparent)]
Webhooks_List(#[from] webhooks::list::Error),
#[error(transparent)]
Webhooks_Ping(#[from] webhooks::ping::Error),
#[error(transparent)]
Webhooks_GetCallbackConfig(#[from] webhooks::get_callback_config::Error),
#[error(transparent)]
Webhooks_ListEvents(#[from] webhooks::list_events::Error),
#[error(transparent)]
Builds_List(#[from] builds::list::Error),
#[error(transparent)]
Builds_Get(#[from] builds::get::Error),
#[error(transparent)]
Builds_Update(#[from] builds::update::Error),
#[error(transparent)]
Builds_GetLogLink(#[from] builds::get_log_link::Error),
#[error(transparent)]
Builds_Cancel(#[from] builds::cancel::Error),
#[error(transparent)]
BuildSteps_List(#[from] build_steps::list::Error),
#[error(transparent)]
BuildSteps_Get(#[from] build_steps::get::Error),
#[error(transparent)]
BuildSteps_Create(#[from] build_steps::create::Error),
#[error(transparent)]
BuildSteps_Update(#[from] build_steps::update::Error),
#[error(transparent)]
BuildSteps_Delete(#[from] build_steps::delete::Error),
#[error(transparent)]
BuildSteps_ListBuildArguments(#[from] build_steps::list_build_arguments::Error),
#[error(transparent)]
BuildTasks_List(#[from] build_tasks::list::Error),
#[error(transparent)]
BuildTasks_Get(#[from] build_tasks::get::Error),
#[error(transparent)]
BuildTasks_Create(#[from] build_tasks::create::Error),
#[error(transparent)]
BuildTasks_Update(#[from] build_tasks::update::Error),
#[error(transparent)]
BuildTasks_Delete(#[from] build_tasks::delete::Error),
#[error(transparent)]
BuildTasks_ListSourceRepositoryProperties(#[from] build_tasks::list_source_repository_properties::Error),
#[error(transparent)]
Registries_QueueBuild(#[from] registries::queue_build::Error),
#[error(transparent)]
Registries_GetBuildSourceUploadUrl(#[from] registries::get_build_source_upload_url::Error),
}
pub mod registries {
use super::{models, API_VERSION};
pub async fn import_image(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
registry_name: &str,
parameters: &models::ImportImageParameters,
) -> std::result::Result<import_image::Response, import_image::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/importImage",
operation_config.base_path(),
subscription_id,
resource_group_name,
registry_name
);
let mut url = url::Url::parse(url_str).map_err(import_image::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(import_image::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(import_image::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(import_image::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(import_image::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(import_image::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(import_image::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
Err(import_image::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod import_image {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn check_name_availability(
operation_config: &crate::OperationConfig,
subscription_id: &str,
registry_name_check_request: &models::RegistryNameCheckRequest,
) -> std::result::Result<models::RegistryNameStatus, check_name_availability::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.ContainerRegistry/checkNameAvailability",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(check_name_availability::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(check_name_availability::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(registry_name_check_request).map_err(check_name_availability::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(check_name_availability::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(check_name_availability::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::RegistryNameStatus = serde_json::from_slice(rsp_body)
.map_err(|source| check_name_availability::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(check_name_availability::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod check_name_availability {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
registry_name: &str,
) -> std::result::Result<models::Registry, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
registry_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Registry =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(get::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
registry_name: &str,
registry: &models::Registry,
) -> std::result::Result<create::Response, create::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
registry_name
);
let mut url = url::Url::parse(url_str).map_err(create::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(registry).map_err(create::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(create::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Registry =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::Registry =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create::Response::Created201(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
Err(create::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod create {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::Registry),
Created201(models::Registry),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
registry_name: &str,
registry_update_parameters: &models::RegistryUpdateParameters,
) -> std::result::Result<update::Response, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
registry_name
);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(registry_update_parameters).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Registry =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(update::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::Registry =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(update::Response::Created201(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
Err(update::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::Registry),
Created201(models::Registry),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
registry_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
registry_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(delete::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
Err(delete::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_resource_group(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
) -> std::result::Result<models::RegistryListResult, list_by_resource_group::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries",
operation_config.base_path(),
subscription_id,
resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_resource_group::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_resource_group::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_by_resource_group::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_resource_group::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::RegistryListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_resource_group::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list_by_resource_group::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list_by_resource_group {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
) -> std::result::Result<models::RegistryListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.ContainerRegistry/registries",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::RegistryListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_credentials(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
registry_name: &str,
) -> std::result::Result<models::RegistryListCredentialsResult, list_credentials::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/listCredentials",
operation_config.base_path(),
subscription_id,
resource_group_name,
registry_name
);
let mut url = url::Url::parse(url_str).map_err(list_credentials::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_credentials::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_credentials::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_credentials::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::RegistryListCredentialsResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_credentials::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list_credentials::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list_credentials {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn regenerate_credential(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
registry_name: &str,
regenerate_credential_parameters: &models::RegenerateCredentialParameters,
) -> std::result::Result<models::RegistryListCredentialsResult, regenerate_credential::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/regenerateCredential",
operation_config.base_path(),
subscription_id,
resource_group_name,
registry_name
);
let mut url = url::Url::parse(url_str).map_err(regenerate_credential::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(regenerate_credential::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(regenerate_credential_parameters).map_err(regenerate_credential::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(regenerate_credential::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(regenerate_credential::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::RegistryListCredentialsResult = serde_json::from_slice(rsp_body)
.map_err(|source| regenerate_credential::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(regenerate_credential::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod regenerate_credential {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_usages(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
registry_name: &str,
) -> std::result::Result<models::RegistryUsageListResult, list_usages::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/listUsages",
operation_config.base_path(),
subscription_id,
resource_group_name,
registry_name
);
let mut url = url::Url::parse(url_str).map_err(list_usages::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_usages::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_usages::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_usages::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::RegistryUsageListResult =
serde_json::from_slice(rsp_body).map_err(|source| list_usages::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list_usages::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list_usages {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_policies(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
registry_name: &str,
) -> std::result::Result<models::RegistryPolicies, list_policies::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/listPolicies",
operation_config.base_path(),
subscription_id,
resource_group_name,
registry_name
);
let mut url = url::Url::parse(url_str).map_err(list_policies::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_policies::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_policies::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_policies::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::RegistryPolicies =
serde_json::from_slice(rsp_body).map_err(|source| list_policies::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list_policies::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list_policies {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update_policies(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
registry_name: &str,
registry_policies_update_parameters: &models::RegistryPolicies,
) -> std::result::Result<update_policies::Response, update_policies::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/updatePolicies",
operation_config.base_path(),
subscription_id,
resource_group_name,
registry_name
);
let mut url = url::Url::parse(url_str).map_err(update_policies::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update_policies::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(registry_policies_update_parameters).map_err(update_policies::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update_policies::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(update_policies::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::RegistryPolicies = serde_json::from_slice(rsp_body)
.map_err(|source| update_policies::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(update_policies::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(update_policies::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
Err(update_policies::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod update_policies {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::RegistryPolicies),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn queue_build(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
registry_name: &str,
build_request: &models::QueueBuildRequest,
) -> std::result::Result<queue_build::Response, queue_build::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/queueBuild",
operation_config.base_path(),
subscription_id,
resource_group_name,
registry_name
);
let mut url = url::Url::parse(url_str).map_err(queue_build::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(queue_build::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(build_request).map_err(queue_build::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(queue_build::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(queue_build::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Build =
serde_json::from_slice(rsp_body).map_err(|source| queue_build::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(queue_build::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(queue_build::Response::Accepted202),
status_code => Err(queue_build::Error::DefaultResponse { status_code }),
}
}
pub mod queue_build {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::Build),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_build_source_upload_url(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
registry_name: &str,
) -> std::result::Result<models::SourceUploadDefinition, get_build_source_upload_url::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/getBuildSourceUploadUrl",
operation_config.base_path(),
subscription_id,
resource_group_name,
registry_name
);
let mut url = url::Url::parse(url_str).map_err(get_build_source_upload_url::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_build_source_upload_url::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(get_build_source_upload_url::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_build_source_upload_url::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::SourceUploadDefinition = serde_json::from_slice(rsp_body)
.map_err(|source| get_build_source_upload_url::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => Err(get_build_source_upload_url::Error::DefaultResponse { status_code }),
}
}
pub mod get_build_source_upload_url {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod operations {
use super::{models, API_VERSION};
pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<models::OperationListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/providers/Microsoft.ContainerRegistry/operations", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::OperationListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod replications {
use super::{models, API_VERSION};
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
registry_name: &str,
replication_name: &str,
) -> std::result::Result<models::Replication, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/replications/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
registry_name,
replication_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Replication =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(get::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
registry_name: &str,
replication_name: &str,
replication: &models::Replication,
) -> std::result::Result<create::Response, create::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/replications/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
registry_name,
replication_name
);
let mut url = url::Url::parse(url_str).map_err(create::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(replication).map_err(create::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(create::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Replication =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::Replication =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create::Response::Created201(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
Err(create::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod create {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::Replication),
Created201(models::Replication),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
registry_name: &str,
replication_name: &str,
replication_update_parameters: &models::ReplicationUpdateParameters,
) -> std::result::Result<update::Response, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/replications/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
registry_name,
replication_name
);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(replication_update_parameters).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Replication =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(update::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::Replication =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(update::Response::Created201(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
Err(update::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::Replication),
Created201(models::Replication),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
registry_name: &str,
replication_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/replications/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
registry_name,
replication_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(delete::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
Err(delete::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
registry_name: &str,
) -> std::result::Result<models::ReplicationListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/replications",
operation_config.base_path(),
subscription_id,
resource_group_name,
registry_name
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ReplicationListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod webhooks {
use super::{models, API_VERSION};
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
registry_name: &str,
webhook_name: &str,
) -> std::result::Result<models::Webhook, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/webhooks/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
registry_name,
webhook_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Webhook =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(get::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
registry_name: &str,
webhook_name: &str,
webhook_create_parameters: &models::WebhookCreateParameters,
) -> std::result::Result<create::Response, create::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/webhooks/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
registry_name,
webhook_name
);
let mut url = url::Url::parse(url_str).map_err(create::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(webhook_create_parameters).map_err(create::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(create::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Webhook =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::Webhook =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create::Response::Created201(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
Err(create::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod create {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::Webhook),
Created201(models::Webhook),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
registry_name: &str,
webhook_name: &str,
webhook_update_parameters: &models::WebhookUpdateParameters,
) -> std::result::Result<update::Response, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/webhooks/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
registry_name,
webhook_name
);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(webhook_update_parameters).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Webhook =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(update::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::Webhook =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(update::Response::Created201(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
Err(update::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::Webhook),
Created201(models::Webhook),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
registry_name: &str,
webhook_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/webhooks/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
registry_name,
webhook_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(delete::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
Err(delete::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
registry_name: &str,
) -> std::result::Result<models::WebhookListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/webhooks",
operation_config.base_path(),
subscription_id,
resource_group_name,
registry_name
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::WebhookListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn ping(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
registry_name: &str,
webhook_name: &str,
) -> std::result::Result<models::EventInfo, ping::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/webhooks/{}/ping",
operation_config.base_path(),
subscription_id,
resource_group_name,
registry_name,
webhook_name
);
let mut url = url::Url::parse(url_str).map_err(ping::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(ping::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(ping::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(ping::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::EventInfo =
serde_json::from_slice(rsp_body).map_err(|source| ping::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(ping::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod ping {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_callback_config(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
registry_name: &str,
webhook_name: &str,
) -> std::result::Result<models::CallbackConfig, get_callback_config::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/webhooks/{}/getCallbackConfig",
operation_config.base_path(),
subscription_id,
resource_group_name,
registry_name,
webhook_name
);
let mut url = url::Url::parse(url_str).map_err(get_callback_config::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_callback_config::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_callback_config::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_callback_config::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::CallbackConfig = serde_json::from_slice(rsp_body)
.map_err(|source| get_callback_config::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(get_callback_config::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get_callback_config {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_events(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
registry_name: &str,
webhook_name: &str,
) -> std::result::Result<models::EventListResult, list_events::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/webhooks/{}/listEvents",
operation_config.base_path(),
subscription_id,
resource_group_name,
registry_name,
webhook_name
);
let mut url = url::Url::parse(url_str).map_err(list_events::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_events::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_events::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_events::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::EventListResult =
serde_json::from_slice(rsp_body).map_err(|source| list_events::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list_events::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list_events {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod builds {
use super::{models, API_VERSION};
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
registry_name: &str,
filter: Option<&str>,
top: Option<i32>,
skip_token: Option<&str>,
) -> std::result::Result<models::BuildListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/builds",
operation_config.base_path(),
subscription_id,
resource_group_name,
registry_name
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
if let Some(top) = top {
url.query_pairs_mut().append_pair("$top", top.to_string().as_str());
}
if let Some(skip_token) = skip_token {
url.query_pairs_mut().append_pair("$skipToken", skip_token);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::BuildListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => Err(list::Error::DefaultResponse { status_code }),
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
registry_name: &str,
build_id: &str,
) -> std::result::Result<models::Build, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/builds/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
registry_name,
build_id
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Build =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => Err(get::Error::DefaultResponse { status_code }),
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
registry_name: &str,
build_id: &str,
build_update_parameters: &models::BuildUpdateParameters,
) -> std::result::Result<update::Response, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/builds/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
registry_name,
build_id
);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(build_update_parameters).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Build =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(update::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::Build =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(update::Response::Created201(rsp_value))
}
status_code => Err(update::Error::DefaultResponse { status_code }),
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::Build),
Created201(models::Build),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_log_link(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
registry_name: &str,
build_id: &str,
) -> std::result::Result<models::BuildGetLogResult, get_log_link::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/builds/{}/getLogLink",
operation_config.base_path(),
subscription_id,
resource_group_name,
registry_name,
build_id
);
let mut url = url::Url::parse(url_str).map_err(get_log_link::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_log_link::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_log_link::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_log_link::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::BuildGetLogResult =
serde_json::from_slice(rsp_body).map_err(|source| get_log_link::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => Err(get_log_link::Error::DefaultResponse { status_code }),
}
}
pub mod get_log_link {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn cancel(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
registry_name: &str,
build_id: &str,
) -> std::result::Result<cancel::Response, cancel::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/builds/{}/cancel",
operation_config.base_path(),
subscription_id,
resource_group_name,
registry_name,
build_id
);
let mut url = url::Url::parse(url_str).map_err(cancel::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(cancel::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(cancel::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(cancel::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(cancel::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(cancel::Response::Accepted202),
status_code => Err(cancel::Error::DefaultResponse { status_code }),
}
}
pub mod cancel {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod build_steps {
use super::{models, API_VERSION};
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
registry_name: &str,
build_task_name: &str,
) -> std::result::Result<models::BuildStepList, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/buildTasks/{}/steps",
operation_config.base_path(),
subscription_id,
resource_group_name,
registry_name,
build_task_name
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::BuildStepList =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => Err(list::Error::DefaultResponse { status_code }),
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
registry_name: &str,
build_task_name: &str,
step_name: &str,
) -> std::result::Result<models::BuildStep, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/buildTasks/{}/steps/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
registry_name,
build_task_name,
step_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::BuildStep =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => Err(get::Error::DefaultResponse { status_code }),
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
registry_name: &str,
build_task_name: &str,
step_name: &str,
build_step_create_parameters: &models::BuildStep,
) -> std::result::Result<create::Response, create::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/buildTasks/{}/steps/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
registry_name,
build_task_name,
step_name
);
let mut url = url::Url::parse(url_str).map_err(create::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(build_step_create_parameters).map_err(create::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(create::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::BuildStep =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::BuildStep =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create::Response::Created201(rsp_value))
}
status_code => Err(create::Error::DefaultResponse { status_code }),
}
}
pub mod create {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::BuildStep),
Created201(models::BuildStep),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
registry_name: &str,
build_task_name: &str,
step_name: &str,
build_step_update_parameters: &models::BuildStepUpdateParameters,
) -> std::result::Result<update::Response, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/buildTasks/{}/steps/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
registry_name,
build_task_name,
step_name
);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(build_step_update_parameters).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::BuildStep =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(update::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::BuildStep =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(update::Response::Created201(rsp_value))
}
status_code => Err(update::Error::DefaultResponse { status_code }),
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::BuildStep),
Created201(models::BuildStep),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
registry_name: &str,
build_task_name: &str,
step_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/buildTasks/{}/steps/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
registry_name,
build_task_name,
step_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(delete::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
status_code => Err(delete::Error::DefaultResponse { status_code }),
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_build_arguments(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
registry_name: &str,
build_task_name: &str,
step_name: &str,
) -> std::result::Result<models::BuildArgumentList, list_build_arguments::Error> {
let http_client = operation_config.http_client();
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/buildTasks/{}/steps/{}/listBuildArguments" , operation_config . base_path () , subscription_id , resource_group_name , registry_name , build_task_name , step_name) ;
let mut url = url::Url::parse(url_str).map_err(list_build_arguments::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_build_arguments::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_build_arguments::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_build_arguments::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::BuildArgumentList = serde_json::from_slice(rsp_body)
.map_err(|source| list_build_arguments::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => Err(list_build_arguments::Error::DefaultResponse { status_code }),
}
}
pub mod list_build_arguments {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod build_tasks {
use super::{models, API_VERSION};
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
registry_name: &str,
filter: Option<&str>,
skip_token: Option<&str>,
) -> std::result::Result<models::BuildTaskListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/buildTasks",
operation_config.base_path(),
subscription_id,
resource_group_name,
registry_name
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
if let Some(skip_token) = skip_token {
url.query_pairs_mut().append_pair("$skipToken", skip_token);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::BuildTaskListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => Err(list::Error::DefaultResponse { status_code }),
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
registry_name: &str,
build_task_name: &str,
) -> std::result::Result<models::BuildTask, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/buildTasks/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
registry_name,
build_task_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::BuildTask =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => Err(get::Error::DefaultResponse { status_code }),
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
registry_name: &str,
build_task_name: &str,
build_task_create_parameters: &models::BuildTask,
) -> std::result::Result<create::Response, create::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/buildTasks/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
registry_name,
build_task_name
);
let mut url = url::Url::parse(url_str).map_err(create::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(build_task_create_parameters).map_err(create::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(create::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::BuildTask =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::BuildTask =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create::Response::Created201(rsp_value))
}
status_code => Err(create::Error::DefaultResponse { status_code }),
}
}
pub mod create {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::BuildTask),
Created201(models::BuildTask),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
registry_name: &str,
build_task_name: &str,
build_task_update_parameters: &models::BuildTaskUpdateParameters,
) -> std::result::Result<update::Response, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/buildTasks/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
registry_name,
build_task_name
);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(build_task_update_parameters).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::BuildTask =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(update::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::BuildTask =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(update::Response::Created201(rsp_value))
}
status_code => Err(update::Error::DefaultResponse { status_code }),
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::BuildTask),
Created201(models::BuildTask),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
registry_name: &str,
build_task_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/buildTasks/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
registry_name,
build_task_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(delete::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => Err(delete::Error::DefaultResponse { status_code }),
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_source_repository_properties(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
registry_name: &str,
build_task_name: &str,
) -> std::result::Result<models::SourceRepositoryProperties, list_source_repository_properties::Error> {
let http_client = operation_config.http_client();
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerRegistry/registries/{}/buildTasks/{}/listSourceRepositoryProperties" , operation_config . base_path () , subscription_id , resource_group_name , registry_name , build_task_name) ;
let mut url = url::Url::parse(url_str).map_err(list_source_repository_properties::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_source_repository_properties::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_source_repository_properties::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_source_repository_properties::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::SourceRepositoryProperties = serde_json::from_slice(rsp_body)
.map_err(|source| list_source_repository_properties::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => Err(list_source_repository_properties::Error::DefaultResponse { status_code }),
}
}
pub mod list_source_repository_properties {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
|
use std::io::{self, Read, Write};
// mod rsa;
mod ed25519;
// pub use self::rsa::RSA;
pub use self::ed25519::ED25519;
pub trait KeyPair: Sync + Send {
fn system(&self) -> &'static CryptoSystem;
fn has_private(&self) -> bool;
fn verify(&self, data: &[u8], signature: &[u8]) -> Result<bool, ()>;
fn sign(&self, data: &[u8]) -> Result<Vec<u8>, ()>;
fn write_public(&self, w: &mut Write) -> io::Result<()>;
fn export(&self, w: &mut Write) -> io::Result<()>;
}
pub struct CryptoSystem {
pub id: &'static str,
pub generate_key_pair: fn(bits: Option<u32>) -> Box<KeyPair>,
pub import: fn(r: &mut Read) -> io::Result<Box<KeyPair>>,
pub read_public: fn(r: &mut Read) -> io::Result<Box<KeyPair>>,
}
|
extern crate postgres;
extern crate serde;
extern crate serde_json;
extern crate curl;
extern crate rand;
extern crate chrono;
extern crate num;
extern crate toml;
extern crate clap;
#[macro_use]
extern crate serde_derive;
#[macro_use]
extern crate prettytable;
use std::fs::File;
use std::io::prelude::*;
use std::{io, env, process, fmt};
use std::string::ToString;
use postgres::{Connection, TlsMode};
use curl::easy::{Easy, List};
use rand::{thread_rng, Rng};
use chrono::prelude::*;
use num::Float;
use prettytable::Table;
use prettytable::row::Row;
use prettytable::cell::Cell;
use clap::{App, Arg};
fn main() {
println!("
_ __
___ ___ ___ __(_)__________ / /
(_-</ _ `/ // / / __/ __/ -_) /
/___/\\_, /\\_,_/_/_/ /_/ \\__/_/
/_/
");
// args
let matches = App::new("squirrel")
.version("0.1.0")
.author("code_thief <yaojiach@gmail.com>")
.about("SQL Bot")
.arg(Arg::with_name("config")
.short("c")
.long("config")
.value_name("CONFIG")
.help("Path to config file")
.takes_value(true))
.arg(Arg::with_name("init")
.long("init")
.help("Initialize database with default data"))
.get_matches();
let conf_file = matches.value_of("config").unwrap();
let mut f = File::open(conf_file).expect("Config file not found");
let mut conf_content = String::new();
f.read_to_string(&mut conf_content).expect("Error reading config file");
let conf: Config = toml::from_str(conf_content.as_str()).unwrap();
if matches.is_present("init") {
create_data(&conf);
}
// db config
let db_uri = format!("postgresql://{}:{}@{}:{}/{}",
conf.pg.username, conf.pg.password, conf.pg.host,
conf.pg.port, conf.pg.database);
let conn = Connection::connect(db_uri, TlsMode::None).unwrap();
// wit config
let req_pre = format!("{}v={}&q=", conf.wit.baseurl, conf.wit.version);
let req_h = format!("{} {}", conf.wit.baseheader, conf.wit.token);
// main app
loop {
println!("Tell me what you want: ");
let mut q = String::new();
io::stdin().read_line(&mut q).expect("Failed to read cmd line input");
let q: &str = q.trim();
match q {
"exit" => {
println!("Shutting down!");
process::exit(0);
},
_ => {
// TODO
// translate NL to query (definetly NO clone!!!!)
let e = translate("show%20all%20customer", req_pre.clone(), req_h.clone());
println!("Compiled query: {}", e);
let rows = &conn.query(q, &[]).unwrap();
let cols = rows.columns();
let colnames = cols.iter().map(|x| Cell::new(x.name())).collect::<Vec<_>>();
let coltys = cols.iter().map(|x| Cell::new(x.type_().name())).collect::<Vec<_>>();
let coltynms = cols.iter().map(|x| x.type_().name()).collect::<Vec<_>>();
let mut table = Table::new();
table.add_row(Row::new(colnames));
table.add_row(Row::new(coltys));
for row in rows {
let l = row.len();
let mut r: Vec<String> = Vec::new();
for ll in 0..l {
let rr = match coltynms[ll] {
"int4" => {
let unpack: Option<i32> = row.get(ll);
let ret = match unpack {
Some(unpack) => unpack.to_string(),
None => "Null".to_string(),
};
ret
},
"varchar" => {
let unpack: Option<String> = row.get(ll);
unpack.unwrap_or("NULL".to_string())
},
_ => {
"Not Implemented".to_string()
},
};
r.push(rr);
}
let c = r.iter().map(|x| Cell::new(x)).collect::<Vec<_>>();
table.add_row(Row::new(c));
}
table.printstd();
},
}
}
}
// *****************
// configurations
// *****************
#[derive(Deserialize)]
struct Config {
wit: Wit,
pg: Postgres,
}
#[derive(Deserialize)]
struct Wit {
token: String,
version: String,
baseurl: String,
baseheader: String,
}
#[derive(Deserialize)]
struct Postgres {
username: String,
password: String,
host: String,
port: String,
database: String,
}
// *****************
// sql builder
// *****************
// TODO
struct SqlBody {
}
fn translate(q: &str, req_pre: String, req_h: String) -> String {
let url = format!("{}{}", req_pre, q);
let mut resp: String = String::new();
let mut handle = Easy::new();
let mut extra = List::new();
extra.append(req_h.as_str()).unwrap();
handle.http_headers(extra).unwrap();
handle.url(url.as_str()).unwrap();
{
let mut transfer = handle.transfer();
transfer.write_function(|data| {
resp.push_str(std::str::from_utf8(data).unwrap());
Ok(data.len())
}).unwrap();
transfer.perform().unwrap();
}
let j: WitRv = serde_json::from_str(resp.as_str()).unwrap();
let we: WitEntity = serde_json::from_value(j.entities).unwrap();
let act: WitData = serde_json::from_value(we.action[0].clone()).unwrap();
let val: WitData = serde_json::from_value(we.value[0].clone()).unwrap();
val.value
}
// *****************
// wit api
// *****************
#[derive(Serialize, Deserialize)]
struct WitRv {
msg_id: String,
_text: String,
entities: serde_json::Value,
}
#[derive(Serialize, Deserialize)]
struct WitEntity {
action: Vec<serde_json::Value>,
value: Vec<serde_json::Value>,
}
#[derive(Serialize, Deserialize)]
struct WitData {
confidence: f32,
value: String,
#[serde(rename = "type")]
data_type: String,
}
// *****************
// postgres
// *****************
struct Customer {
id: i32,
name: String,
age: Option<i32>,
gender: Option<String>,
}
struct Product {
id: i32,
sku: i32,
price: f32,
description: Option<String>,
}
struct Transaction {
id: i32,
datetime: DateTime<UTC>,
product_id: i32,
customer_id: i32,
amount: i32,
}
// *****************
// utils
// *****************
fn pricer<T: Float>(p: T, d: T) -> T {
// # Arguments
// * `p` - The float to round
// * `d` - A float (1.0, 10.0, 100.0) indicating the digits
(p * d).round() / d
}
fn create_data(conf: &Config) {
// tables must pre-exit in db
let db_uri = format!("postgresql://{}:{}@{}:{}/{}",
conf.pg.username, conf.pg.password, conf.pg.host,
conf.pg.port, conf.pg.database);
let conn = Connection::connect(db_uri, TlsMode::None).unwrap();
let custname = vec!["Jamie", "Morgan", "Taylor", "Alex", "Chris", "Sam"];
let custsex: Vec<Option<String>> = vec![Some("M".to_string()), Some("F".to_string()), None];
let proddesc = vec!["Rust", "Python", "R", "Scala", "Java", "Go", "Erlang", "Elixir",
"C", "C++", "Swift"];
conn.execute("DROP TABLE customer", &[]).unwrap();
conn.execute("DROP TABLE product", &[]).unwrap();
conn.execute("DROP TABLE transaction", &[]).unwrap();
conn.batch_execute("
CREATE TABLE customer (
id SERIAL PRIMARY KEY,
name VARCHAR NOT NULL,
age INT,
gender VARCHAR
);
CREATE TABLE product (
id SERIAL PRIMARY KEY,
sku INT NOT NULL,
price REAL NOT NULL,
description VARCHAR
);
CREATE TABLE transaction (
id SERIAL PRIMARY KEY,
datetime TIMESTAMP WITH TIME ZONE NOT NULL,
product_id INT NOT NULL,
customer_id INT NOT NULL,
amount INT NOT NULL
);
").unwrap();
for i in 0..10 {
let mut rng = thread_rng();
let c = Customer {
id: i,
name: rng.choose(&custname).unwrap().to_string(),
age: Some(rng.gen_range(18, 55)),
gender: rng.choose(&custsex).unwrap().to_owned()
};
let verb: String = "INSERT".to_string();
let table = "customer";
let q = format!("{} INTO {} (name, age, gender) VALUES ($1, $2, $3)", verb, table);
conn.execute(q.as_str(), &[&c.name, &c.age, &c.gender]).unwrap();
}
for i in 0..15 {
let mut rng = thread_rng();
let c = Product {
id: i,
sku: rng.gen_range(100, 999),
price: pricer(rng.gen_range(10.0f32, 100.0f32), 10.0),
description: Some(rng.choose(&proddesc).unwrap().to_string())
};
let q = format!("INSERT INTO product (sku, price, description) VALUES ($1, $2, $3)");
conn.execute(q.as_str(), &[&c.sku, &c.price, &c.description]).unwrap();
}
for i in 0..25 {
let mut rng = thread_rng();
let dd = i + 1 as u32;
let c = Transaction {
id: i as i32,
datetime: UTC.ymd(2014, 7, dd).and_hms(12, 31, 15),
product_id: rng.gen_range(1, 10),
customer_id: rng.gen_range(1, 15),
amount: rng.gen_range(1, 10)
};
let q = format!("INSERT INTO transaction (datetime, product_id, customer_id, amount) VALUES ($1, $2, $3, $4)");
conn.execute(q.as_str(), &[&c.datetime, &c.product_id, &c.customer_id, &c.amount]).unwrap();
}
}
|
use super::{Ipv4Addr, Ipv4Net, OsRng};
use rand::seq::SliceRandom;
pub fn enum_port_range(val: &str, randomize: bool) -> Result<Vec<u16>, String> {
let ports: Vec<&str> = val.split('-').collect();
if ports.len() == 2 {
let low_port: u16 = match ports[0].parse() {
Ok(p) => p,
Err(_) => return Err("Error parsing first port in the range.".to_string()),
};
let high_port: u16 = match ports[1].parse() {
Ok(p) => p,
Err(_) => return Err("Error parsing second port in the range.".to_string()),
};
if low_port > high_port {
return Err("Error: low port is higher than upper port.".to_string());
}
let mut port_vec: Vec<u16> = (low_port..=high_port).collect();
if randomize {
port_vec.shuffle(&mut OsRng);
}
Ok(port_vec)
} else if ports.len() == 1 {
if let Ok(port) = ports[0].parse() {
Ok(vec![port])
} else {
Err("Error parsing port.".to_string())
}
} else {
Err("Error parsing range (verify format matches 'LPORT-HPORT'".to_string())
}
}
pub fn enum_subnet(host_count: usize, subnet: Ipv4Net) -> Result<Vec<Ipv4Addr>, String> {
let mut ip_addresses: Vec<Ipv4Addr> = Vec::new();
for ip_address in subnet.hosts() {
ip_addresses.push(ip_address);
}
ip_addresses.truncate(host_count);
if ip_addresses.len() < host_count {
return Err("Error: too few IPs for hosts.".to_string());
}
Ok(ip_addresses)
}
pub fn exclude_addresses(exclude: Vec<Ipv4Addr>, subnet: &mut Vec<Ipv4Addr>) {
for address in exclude {
match subnet.binary_search(&address) {
Ok(index) => {
subnet.remove(index);
}
Err(_) => continue,
}
}
}
// Today I learned that factorials overflow quickly.
// Here's another way to calculate the number of combinations
// without using factorials.
// https://stackoverflow.com/a/12130280
pub fn peer_combos(mut n: usize, r: usize) -> Result<usize, String> {
if r > n {
return Err(String::from("Error calculating peer combinations."));
}
let mut combos = 1;
for i in 1..=r {
combos *= n;
n -= 1;
combos /= i;
}
Ok(combos)
}
#[cfg(test)]
mod tests {
use super::*;
macro_rules! comb_tests {
($($name:ident: $value:expr,)*) => {
$(
#[test]
fn $name() {
let (n, r, q) = $value;
assert_eq!(q, peer_combos(n, r).unwrap());
}
)*
}
}
macro_rules! enum_port_range_correctly {
($($name:ident: $value:expr,)*) => {
$(
#[test]
fn $name() {
let (q, r) = $value;
assert_eq!(r, enum_port_range(&String::from(q), false));
}
)*
}
}
macro_rules! expected_return_amounts_enum_subnet {
($($name:ident: $value:expr,)*) => {
$(
#[test]
fn $name() {
let (q, r) = $value;
assert_eq!(q, enum_subnet(q, r.parse::<Ipv4Net>().unwrap()).unwrap().len());
}
)*
}
}
// Ensure calc_combinations() is correct.
// Tuple: (population, sample, expected combinations)
comb_tests! {
fn_calc_combos_0_and_0: (0, 0, 1),
fn_calc_combos_3_and_2: (3, 2, 3),
fn_calc_combos_7_and_2: (7, 2, 21),
fn_calc_combos_12_and_2: (12, 2, 66),
fn_calc_combos_20_and_2: (20, 2, 190),
}
enum_port_range_correctly! {
fn_enum_port_range_5_to_10: (("5-7"), Ok(vec![5,6,7]) ),
fn_enum_port_range_500_to_600: (("500-502"), Ok(vec![500,501,502]) ),
fn_enum_port_range_50k_to_51k: (("50000-50002"), Ok(vec![50000,50001,50002]) ),
fn_enum_port_range_backwards: (("51002-50000"), Err(String::from("Error: low port is higher than upper port.")) ),
fn_enum_port_range_first_invalid: (("80a-800"), Err(String::from("Error parsing first port in the range.")) ),
fn_enum_port_range_second_invalid: (("800-80b"), Err(String::from("Error parsing second port in the range.")) ),
}
// Ensure that enumerating a subnet returns the expected
// number of hosts
expected_return_amounts_enum_subnet! {
fn_enum_subnet_slash24_is_254: (254, "10.0.0.0/24"),
fn_enum_subnet_slash25_is_126: (126, "10.0.0.0/25"),
fn_enum_subnet_slash27_is_30: (30, "10.0.0.0/27"),
fn_enum_subnet_slash28_is_14: (14, "10.0.0.0/28"),
fn_enum_subnet_slash29_is_6: (6, "10.0.0.0/29"),
}
}
|
#[cfg(test)]
use crate::model::Group;
use crate::solutions::i;
use crate::tests::samples;
#[test]
fn should_return_none_if_groups_is_empty() {
let groups = Vec::new();
let result = i::find_largest_group(&groups);
assert!(result.is_none());
}
#[test]
fn should_return_group_if_groups_has_only_one_element() {
let group = Group {
name: "The Misfits of Science",
members: Vec::new(),
};
let groups = vec![group];
let result = i::find_largest_group(&groups);
assert!(result.is_some());
assert_eq!(result, groups.first());
}
#[test]
fn should_return_largest_group() {
let groups = vec![samples::sinister_six(), samples::justice_league()];
let result = i::find_largest_group(&groups);
assert!(result.is_some());
assert_eq!(result, groups.last());
}
|
pub mod json;
pub mod md;
use std::io;
use super::engine::CheckSuiteResult;
use self::json::JsonReport;
use self::md::MarkdownReport;
pub enum ReportType {
Json,
Markdown,
}
pub struct Reporter<'a> {
check_suite_result: &'a CheckSuiteResult<'a>,
report_type: ReportType,
filename: Option<&'a str>,
}
impl<'a> Reporter<'a> {
pub fn new(check_suite_result: &'a CheckSuiteResult, report_type_str: &'a str) -> Reporter<'a> {
let report_type = match report_type_str {
"json" => ReportType::Json,
"markdown" => ReportType::Markdown,
_ => panic!("Mööp"),
};
Reporter {
check_suite_result: check_suite_result,
report_type: report_type,
filename: None,
}
}
pub fn with_filename(&'a mut self, filename: &'a str) -> &'a mut Reporter<'a> {
self.filename = Some(filename);
self
}
pub fn create(&self) -> Box<Report<'a> + 'a> {
match self.report_type {
ReportType::Json => {
let report = JsonReport::new(self.check_suite_result, self.filename.unwrap());
Box::new(report)
}
ReportType::Markdown => {
let report = MarkdownReport::new(self.check_suite_result, self.filename.unwrap());
Box::new(report)
}
}
}
}
pub trait Report<'a> {
fn as_string(&self) -> String;
fn write_to_file(&self) -> io::Result<()>;
}
|
use nom::IResult;
use std::time::SystemTime;
use crypto::SessionKey;
use data::{Certificate, Hash, I2PDate, LeaseSet, RouterInfo, SessionTag, TunnelId};
pub mod frame;
//
// Common structures
//
pub struct BuildRequestRecord {
to_peer: Hash,
receive_tid: TunnelId,
our_ident: Hash,
next_tid: TunnelId,
next_ident: Hash,
layer_key: SessionKey,
iv_key: SessionKey,
reply_key: SessionKey,
reply_iv: [u8; 16],
flag: u8,
request_time: u32,
send_msg_id: u32,
}
pub struct BuildResponseRecord {
reply: u8,
}
//
// Messages
//
struct ReplyPath {
token: u32,
tid: TunnelId,
gateway: Hash,
}
enum DatabaseStoreData {
RI(RouterInfo),
LS(LeaseSet),
}
struct DatabaseStore {
key: Hash,
ds_type: u8,
reply: Option<ReplyPath>,
data: DatabaseStoreData,
}
struct DatabaseLookupFlags {
delivery: bool,
encryption: bool,
lookup_type: u8,
}
struct DatabaseLookup {
key: Hash,
from: Hash,
flags: DatabaseLookupFlags,
reply_tid: Option<TunnelId>,
excluded_peers: Vec<Hash>,
reply_key: Option<SessionKey>,
tags: Option<Vec<SessionTag>>,
}
struct DatabaseSearchReply {
key: Hash,
peers: Vec<Hash>,
from: Hash,
}
struct DeliveryStatus {
msg_id: u32,
time_stamp: I2PDate,
}
pub struct GarlicCloveDeliveryInstructions {
encrypted: bool,
delivery_type: u8,
delay_set: bool,
session_key: Option<SessionKey>,
to_hash: Option<Hash>,
tid: Option<TunnelId>,
delay: Option<u32>,
}
pub struct GarlicClove {
delivery_instructions: GarlicCloveDeliveryInstructions,
msg: Message,
clove_id: u32,
expiration: I2PDate,
cert: Certificate,
}
struct Garlic {
cloves: Vec<GarlicClove>,
cert: Certificate,
msg_id: u32,
expiration: I2PDate,
}
struct TunnelData {
tid: TunnelId,
data: [u8; 1024],
}
impl TunnelData {
fn from(tid: TunnelId, data: &[u8; 1024]) -> Self {
let mut x = [0u8; 1024];
x.copy_from_slice(data);
TunnelData { tid: tid, data: x }
}
}
struct TunnelGateway {
tid: TunnelId,
data: Vec<u8>,
}
enum MessagePayload {
DatabaseStore(DatabaseStore),
DatabaseLookup(DatabaseLookup),
DatabaseSearchReply(DatabaseSearchReply),
DeliveryStatus(DeliveryStatus),
Garlic(Garlic),
TunnelData(TunnelData),
TunnelGateway(TunnelGateway),
Data(Vec<u8>),
TunnelBuild([[u8; 528]; 8]),
TunnelBuildReply([[u8; 528]; 8]),
VariableTunnelBuild(Vec<[u8; 528]>),
VariableTunnelBuildReply(Vec<[u8; 528]>),
}
pub struct Message {
id: u32,
expiration: I2PDate,
payload: MessagePayload,
}
impl Message {
pub fn dummy_data() -> Self {
Message {
id: 0,
expiration: I2PDate::from_system_time(SystemTime::now()),
payload: MessagePayload::Data(vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9]),
}
}
}
|
use crate::matrix::{Matrix, MatrixOps};
#[derive(Debug)]
pub struct Layer {
input_size: usize,
output_size: usize,
pub(crate) weights_matrix: Matrix,
}
impl Layer {
pub fn new(data: Matrix) -> Layer {
Layer {
input_size: data.rows,
output_size: data.cols,
weights_matrix: data,
}
}
pub fn new_by_rand(input_size: usize, output_size: usize) -> Layer {
Layer {
input_size,
output_size,
weights_matrix: Matrix::new_by_rand(output_size, input_size),
}
}
pub fn show(&self) {
println!("[Layer] input size: {}", self.input_size);
println!("[Layer] output size: {}", self.output_size);
println!(
"[Layer] weights matrix: {}x{}",
self.weights_matrix.rows, self.weights_matrix.cols
);
// self.weights_matrix.show();
}
pub fn call(&self, input: &Matrix) -> Matrix {
let mut res = self.weights_matrix.product(input);
res.activate_sigmoid();
res
}
}
#[cfg(test)]
mod layer_tests {
use crate::layer::Layer;
use crate::matrix::{Matrix, MatrixOps};
#[test]
fn test_show() {
let layer = Layer::new_by_rand(3, 3);
layer.show();
}
#[test]
fn test_call() {
let weights = Matrix::new(vec![
vec![0.9, 0.3, 0.4],
vec![0.2, 0.8, 0.2],
vec![0.1, 0.5, 0.6],
]);
let layer = Layer::new(weights);
let inputs = Matrix::new(vec![vec![0.9, 0.1, 0.8]]);
let inputs = inputs.transpose();
println!("Inputs:");
let result = layer.call(&inputs);
result.show();
}
}
|
#![allow(missing_docs)]
use currency::assets::{Fee, Fees};
use decimal::UFract64;
pub struct Builder {
trade: Option<Fee>,
exchange: Option<Fee>,
transfer: Option<Fee>,
}
impl Builder {
pub fn new() -> Self {
Builder {
trade: None,
exchange: None,
transfer: None,
}
}
pub fn trade(self, fixed: u64, fraction: UFract64) -> Self {
Builder {
trade: Some(Fee::new(fixed, fraction)),
..self
}
}
pub fn exchange(self, fixed: u64, fraction: UFract64) -> Self {
Builder {
exchange: Some(Fee::new(fixed, fraction)),
..self
}
}
pub fn transfer(self, fixed: u64, fraction: UFract64) -> Self {
Builder {
transfer: Some(Fee::new(fixed, fraction)),
..self
}
}
pub fn build(self) -> Fees {
self.validate();
Fees::new(
self.trade.unwrap(),
self.exchange.unwrap(),
self.transfer.unwrap(),
)
}
fn validate(&self) {
assert!(self.trade.is_some());
assert!(self.exchange.is_some());
assert!(self.transfer.is_some());
}
}
|
use std::collections::HashMap;
use std::fs;
use std::io;
#[derive(Eq, PartialEq, Hash, Clone, Copy, Debug)]
struct Point {
x: isize,
y: isize,
}
impl std::ops::Sub for Point {
type Output = Point;
fn sub(self, other: Point) -> Point {
Point {x: self.x - other.x, y: self.y - other.y}
}
}
impl std::ops::Add for Point {
type Output = Point;
fn add(self, other: Point) -> Point {
Point {x: self.x + other.x, y: self.y + other.y}
}
}
impl std::ops::AddAssign for Point {
fn add_assign(&mut self, other: Self) {
*self = Self {
x: self.x + other.x,
y: self.y + other.y,
};
}
}
type Map = HashMap<Point, bool>;
struct MapMeta {
width: isize,
height: isize,
map: Map,
}
fn display_view(view: &Map, width: isize, height: isize) {
for y in 0..height {
for x in 0..width {
match view.get(&Point{x, y}) {
Some(_) => print!("#"),
None => print!("."),
}
}
println!("");
}
println!("");
}
fn string_to_map(input: &str) -> MapMeta {
let mut map = HashMap::new();
let height = input.lines().count() as isize;
let width = input.lines().next().unwrap().len() as isize;
for (y, line) in input.lines().enumerate() {
let y = y as isize;
for (x, character) in line.chars().enumerate() {
let x = x as isize;
if character == '#' {
map.insert(Point{x, y}, true);
}
}
}
MapMeta {
width,
height,
map,
}
}
fn out_of_bounds(width: isize, height: isize, point: Point) -> bool {
point.x < 0 || point.x >= width || point.y < 0 || point.y > height
}
fn do_coord(map: &MapMeta, view: &mut Map, origin: Point, to_check: Point) {
// if the coordinates to check are not within bounds, return None
if out_of_bounds(map.width, map.height, to_check) {
return;
}
let offset = to_check - origin;
let mut shadow = to_check;
let mut blocked = false;
while !out_of_bounds(map.width, map.height, shadow) {
match view.get(&shadow) {
Some(_) => match blocked {
true => { view.remove(&shadow); },
false => blocked = true,
},
None => (),
};
shadow += offset;
}
}
fn calc_view(map: &MapMeta, origin: Point) -> Map {
let mut view = map.map.clone();
view.remove(&origin);
let mut left = origin.x - 1;
let mut right = origin.x + 1;
let mut top = origin.y - 1;
let mut bottom = origin.y + 1;
while left >= 0 || right < map.width
|| top >= 0 || bottom < map.height {
// top and bottom edge
for x in left..=right {
do_coord(&map, &mut view, origin, Point{x, y: top});
do_coord(&map, &mut view, origin, Point{x, y: bottom});
}
// left and right edge
for y in (top + 1)..bottom {
do_coord(&map, &mut view, origin, Point{x: right, y});
do_coord(&map, &mut view, origin, Point{x: left, y});
}
left -= 1;
right += 1;
top -= 1;
bottom += 1;
}
view
}
fn find_best_roid(map: MapMeta) -> usize {
let mut most_in_view = 0;
for point in map.map.keys() {
let view = calc_view(&map, *point);
let num_in_view = view.iter().count();
if num_in_view > most_in_view {
most_in_view = num_in_view;
}
}
most_in_view
}
fn main() -> io::Result<()> {
let input = fs::read_to_string("input.txt")?;
let map = string_to_map(&input);
let best_count = find_best_roid(map);
println!("{:?}", best_count);
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
extern crate test_case;
use test_case::test_case;
#[test]
fn calc_view_test() {
let map = string_to_map(".#..#
.....
#####
....#
...##");
let view = calc_view(&map, Point{x: 2, y: 2});
assert_eq!(view.iter().count(), 7);
}
#[test_case(".#..#
.....
#####
....#
...##" => 8; "example 1")]
#[test_case("......#.#.
#..#.#....
..#######.
.#.#.###..
.#..#.....
..#....#.#
#..#....#.
.##.#..###
##...#..#.
.#....####" => 33; "example 2")]
#[test_case(".#..##.###...#######
##.############..##.
.#.######.########.#
.###.#######.####.#.
#####.##.#.##.###.##
..#####..#.#########
####################
#.####....###.#.#.##
##.#################
#####.##.###..####..
..######..##.#######
####.##.####...##..#
.#####..#.######.###
##...#.##########...
#.##########.#######
.####.#.###.###.#.##
....##.##.###..#####
.#.#.###########.###
#.#.#.#####.####.###
###.##.####.##.#..##" => 210; "example 5")]
fn test_find(input: &str) -> usize {
find_best_roid(string_to_map(input))
}
}
|
use log::{SetLoggerError, LevelFilter, Record, Level, Metadata, Log, set_logger, set_max_level, max_level};
use std::env;
static LOGGER: SimpleLogger = SimpleLogger;
pub struct SimpleLogger;
impl Log for SimpleLogger {
fn enabled(&self, metadata: &Metadata) -> bool {
metadata.level() <= max_level()
}
fn log(&self, record: &Record) {
if self.enabled(record.metadata()) {
println!("[{}][JDBC-rs] {}", record.level(), record.args());
}
}
fn flush(&self) {}
}
pub fn init() {
let level = match env::var("LOG_LEVEL") {
Ok(x) => x.to_uppercase(),
Err(e) => String::from("INFO"),
};
let level_filter = match level.as_ref() {
"TRACE" => LevelFilter::Trace,
"DEBUG" => LevelFilter::Debug,
"INFO" => LevelFilter::Info,
"ERROR" => LevelFilter::Error,
"WARNING" => LevelFilter::Warn,
_ => LevelFilter::Info,
};
set_logger(&LOGGER).map(|()| set_max_level(level_filter)).unwrap();
} |
#[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::SUBMODCTRL {
#[doc = r" Modifies the contents of the register"]
#[inline]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = "Possible values of the field `SMOD1TYPE`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SMOD1TYPER {
#[doc = "SPI Master submodule value."]
MSPI,
#[doc = "MI2C submodule value."]
I2C_MASTER,
#[doc = "SPI Slave submodule value."]
SSPI,
#[doc = "I2C Slave submodule value."]
SI2C,
#[doc = "NOT INSTALLED value."]
NA,
#[doc = r" Reserved"]
_Reserved(u8),
}
impl SMOD1TYPER {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
match *self {
SMOD1TYPER::MSPI => 0,
SMOD1TYPER::I2C_MASTER => 1,
SMOD1TYPER::SSPI => 2,
SMOD1TYPER::SI2C => 3,
SMOD1TYPER::NA => 7,
SMOD1TYPER::_Reserved(bits) => bits,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: u8) -> SMOD1TYPER {
match value {
0 => SMOD1TYPER::MSPI,
1 => SMOD1TYPER::I2C_MASTER,
2 => SMOD1TYPER::SSPI,
3 => SMOD1TYPER::SI2C,
7 => SMOD1TYPER::NA,
i => SMOD1TYPER::_Reserved(i),
}
}
#[doc = "Checks if the value of the field is `MSPI`"]
#[inline]
pub fn is_mspi(&self) -> bool {
*self == SMOD1TYPER::MSPI
}
#[doc = "Checks if the value of the field is `I2C_MASTER`"]
#[inline]
pub fn is_i2c_master(&self) -> bool {
*self == SMOD1TYPER::I2C_MASTER
}
#[doc = "Checks if the value of the field is `SSPI`"]
#[inline]
pub fn is_sspi(&self) -> bool {
*self == SMOD1TYPER::SSPI
}
#[doc = "Checks if the value of the field is `SI2C`"]
#[inline]
pub fn is_si2c(&self) -> bool {
*self == SMOD1TYPER::SI2C
}
#[doc = "Checks if the value of the field is `NA`"]
#[inline]
pub fn is_na(&self) -> bool {
*self == SMOD1TYPER::NA
}
}
#[doc = r" Value of the field"]
pub struct SMOD1ENR {
bits: bool,
}
impl SMOD1ENR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = "Possible values of the field `SMOD0TYPE`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SMOD0TYPER {
#[doc = "MSPI submodule value."]
SPI_MASTER,
#[doc = "I2C Master submodule value."]
I2C_MASTER,
#[doc = "SPI Slave submodule value."]
SSPI,
#[doc = "I2C Slave submodule value."]
SI2C,
#[doc = "NOT INSTALLED value."]
NA,
#[doc = r" Reserved"]
_Reserved(u8),
}
impl SMOD0TYPER {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
match *self {
SMOD0TYPER::SPI_MASTER => 0,
SMOD0TYPER::I2C_MASTER => 1,
SMOD0TYPER::SSPI => 2,
SMOD0TYPER::SI2C => 3,
SMOD0TYPER::NA => 7,
SMOD0TYPER::_Reserved(bits) => bits,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: u8) -> SMOD0TYPER {
match value {
0 => SMOD0TYPER::SPI_MASTER,
1 => SMOD0TYPER::I2C_MASTER,
2 => SMOD0TYPER::SSPI,
3 => SMOD0TYPER::SI2C,
7 => SMOD0TYPER::NA,
i => SMOD0TYPER::_Reserved(i),
}
}
#[doc = "Checks if the value of the field is `SPI_MASTER`"]
#[inline]
pub fn is_spi_master(&self) -> bool {
*self == SMOD0TYPER::SPI_MASTER
}
#[doc = "Checks if the value of the field is `I2C_MASTER`"]
#[inline]
pub fn is_i2c_master(&self) -> bool {
*self == SMOD0TYPER::I2C_MASTER
}
#[doc = "Checks if the value of the field is `SSPI`"]
#[inline]
pub fn is_sspi(&self) -> bool {
*self == SMOD0TYPER::SSPI
}
#[doc = "Checks if the value of the field is `SI2C`"]
#[inline]
pub fn is_si2c(&self) -> bool {
*self == SMOD0TYPER::SI2C
}
#[doc = "Checks if the value of the field is `NA`"]
#[inline]
pub fn is_na(&self) -> bool {
*self == SMOD0TYPER::NA
}
}
#[doc = r" Value of the field"]
pub struct SMOD0ENR {
bits: bool,
}
impl SMOD0ENR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = "Values that can be written to the field `SMOD1TYPE`"]
pub enum SMOD1TYPEW {
#[doc = "SPI Master submodule value."]
MSPI,
#[doc = "MI2C submodule value."]
I2C_MASTER,
#[doc = "SPI Slave submodule value."]
SSPI,
#[doc = "I2C Slave submodule value."]
SI2C,
#[doc = "NOT INSTALLED value."]
NA,
}
impl SMOD1TYPEW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> u8 {
match *self {
SMOD1TYPEW::MSPI => 0,
SMOD1TYPEW::I2C_MASTER => 1,
SMOD1TYPEW::SSPI => 2,
SMOD1TYPEW::SI2C => 3,
SMOD1TYPEW::NA => 7,
}
}
}
#[doc = r" Proxy"]
pub struct _SMOD1TYPEW<'a> {
w: &'a mut W,
}
impl<'a> _SMOD1TYPEW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: SMOD1TYPEW) -> &'a mut W {
unsafe { self.bits(variant._bits()) }
}
#[doc = "SPI Master submodule value."]
#[inline]
pub fn mspi(self) -> &'a mut W {
self.variant(SMOD1TYPEW::MSPI)
}
#[doc = "MI2C submodule value."]
#[inline]
pub fn i2c_master(self) -> &'a mut W {
self.variant(SMOD1TYPEW::I2C_MASTER)
}
#[doc = "SPI Slave submodule value."]
#[inline]
pub fn sspi(self) -> &'a mut W {
self.variant(SMOD1TYPEW::SSPI)
}
#[doc = "I2C Slave submodule value."]
#[inline]
pub fn si2c(self) -> &'a mut W {
self.variant(SMOD1TYPEW::SI2C)
}
#[doc = "NOT INSTALLED value."]
#[inline]
pub fn na(self) -> &'a mut W {
self.variant(SMOD1TYPEW::NA)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 7;
const OFFSET: u8 = 5;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _SMOD1ENW<'a> {
w: &'a mut W,
}
impl<'a> _SMOD1ENW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 4;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `SMOD0TYPE`"]
pub enum SMOD0TYPEW {
#[doc = "MSPI submodule value."]
SPI_MASTER,
#[doc = "I2C Master submodule value."]
I2C_MASTER,
#[doc = "SPI Slave submodule value."]
SSPI,
#[doc = "I2C Slave submodule value."]
SI2C,
#[doc = "NOT INSTALLED value."]
NA,
}
impl SMOD0TYPEW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> u8 {
match *self {
SMOD0TYPEW::SPI_MASTER => 0,
SMOD0TYPEW::I2C_MASTER => 1,
SMOD0TYPEW::SSPI => 2,
SMOD0TYPEW::SI2C => 3,
SMOD0TYPEW::NA => 7,
}
}
}
#[doc = r" Proxy"]
pub struct _SMOD0TYPEW<'a> {
w: &'a mut W,
}
impl<'a> _SMOD0TYPEW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: SMOD0TYPEW) -> &'a mut W {
unsafe { self.bits(variant._bits()) }
}
#[doc = "MSPI submodule value."]
#[inline]
pub fn spi_master(self) -> &'a mut W {
self.variant(SMOD0TYPEW::SPI_MASTER)
}
#[doc = "I2C Master submodule value."]
#[inline]
pub fn i2c_master(self) -> &'a mut W {
self.variant(SMOD0TYPEW::I2C_MASTER)
}
#[doc = "SPI Slave submodule value."]
#[inline]
pub fn sspi(self) -> &'a mut W {
self.variant(SMOD0TYPEW::SSPI)
}
#[doc = "I2C Slave submodule value."]
#[inline]
pub fn si2c(self) -> &'a mut W {
self.variant(SMOD0TYPEW::SI2C)
}
#[doc = "NOT INSTALLED value."]
#[inline]
pub fn na(self) -> &'a mut W {
self.variant(SMOD0TYPEW::NA)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 7;
const OFFSET: u8 = 1;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _SMOD0ENW<'a> {
w: &'a mut W,
}
impl<'a> _SMOD0ENW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bits 5:7 - Submodule 0 module type. This is the I2C Master interface"]
#[inline]
pub fn smod1type(&self) -> SMOD1TYPER {
SMOD1TYPER::_from({
const MASK: u8 = 7;
const OFFSET: u8 = 5;
((self.bits >> OFFSET) & MASK as u32) as u8
})
}
#[doc = "Bit 4 - Submodule 1 enable (1) or disable (0)"]
#[inline]
pub fn smod1en(&self) -> SMOD1ENR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 4;
((self.bits >> OFFSET) & MASK as u32) != 0
};
SMOD1ENR { bits }
}
#[doc = "Bits 1:3 - Submodule 0 module type. This is the SPI Master interface."]
#[inline]
pub fn smod0type(&self) -> SMOD0TYPER {
SMOD0TYPER::_from({
const MASK: u8 = 7;
const OFFSET: u8 = 1;
((self.bits >> OFFSET) & MASK as u32) as u8
})
}
#[doc = "Bit 0 - Submodule 0 enable (1) or disable (0)"]
#[inline]
pub fn smod0en(&self) -> SMOD0ENR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) != 0
};
SMOD0ENR { bits }
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline]
pub fn reset_value() -> W {
W { bits: 32 }
}
#[doc = r" Writes raw bits to the register"]
#[inline]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bits 5:7 - Submodule 0 module type. This is the I2C Master interface"]
#[inline]
pub fn smod1type(&mut self) -> _SMOD1TYPEW {
_SMOD1TYPEW { w: self }
}
#[doc = "Bit 4 - Submodule 1 enable (1) or disable (0)"]
#[inline]
pub fn smod1en(&mut self) -> _SMOD1ENW {
_SMOD1ENW { w: self }
}
#[doc = "Bits 1:3 - Submodule 0 module type. This is the SPI Master interface."]
#[inline]
pub fn smod0type(&mut self) -> _SMOD0TYPEW {
_SMOD0TYPEW { w: self }
}
#[doc = "Bit 0 - Submodule 0 enable (1) or disable (0)"]
#[inline]
pub fn smod0en(&mut self) -> _SMOD0ENW {
_SMOD0ENW { w: self }
}
}
|
use crate::color::{color, Color};
use crate::hittable::HitRecord;
use crate::material::{Material, MaterialType};
use crate::ray::Ray;
use crate::texture::{Texture, TextureColor};
use crate::vec::Vec3;
use std::sync::Arc;
// Diffuse
#[derive(Debug, Clone)]
pub struct Diffuse {
pub emit: Texture,
}
impl Diffuse {
pub fn new(emit: Texture) -> Arc<MaterialType> {
Arc::new(MaterialType::from(Diffuse { emit: emit }))
}
}
impl Material for Diffuse {
fn emitted(&self, _ray: &Ray, hit: &HitRecord, u: f64, v: f64, p: Vec3) -> Color {
if hit.front_face {
return self.emit.value(u, v, p);
} else {
return color(0.0, 0.0, 0.0);
}
}
}
|
use std::env;
use std::fs;
use std::process;
fn main() {
let args: Vec<String> = env::args().collect();
if args.len() < 2 {
eprintln!("{:?}: no arguments", &args[0]);
process::exit(1);
}
for i in 1..args.len() {
if let Err(why) = fs::remove_file(&args[i]) {
eprintln!("{:?}: {:?}", &args[1], why.to_string());
}
}
process::exit(0);
}
|
use crate::{
order::{output::OutputStream, parameters::ParameterValue},
tools,
};
use ffmpeg_sys_next::*;
#[derive(Debug)]
pub struct SubtitleEncoder {
pub identifier: String,
pub stream_index: isize,
pub codec_context: *mut AVCodecContext,
}
impl SubtitleEncoder {
pub fn new(
identifier: String,
stream_index: isize,
output_stream: &OutputStream,
) -> Result<Self, String> {
unsafe {
let codec = tools::get_codec(&output_stream.codec);
if codec.is_null() {
return Err(format!("Unable to found codec {}", output_stream.codec));
}
let codec_context = avcodec_alloc_context3(codec);
if let Some(ParameterValue::Rational(data)) = output_stream.parameters.get("frame_rate") {
(*codec_context).time_base = data.clone().invert().into();
}
Ok(SubtitleEncoder {
identifier,
stream_index,
codec_context,
})
}
}
}
impl Drop for SubtitleEncoder {
fn drop(&mut self) {
unsafe {
if !self.codec_context.is_null() {
avcodec_close(self.codec_context);
avcodec_free_context(&mut self.codec_context);
}
}
}
}
|
// Copyright 2021 The Matrix.org Foundation C.I.C.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use oauth2_types::requests::AuthorizationRequest;
use tide::{Body, Request, Response};
use crate::state::State;
pub async fn get(req: Request<State>) -> tide::Result {
let params: AuthorizationRequest = req.query()?;
let body = Body::from_json(¶ms)?;
Ok(Response::builder(200).body(body).build())
}
|
extern crate serial;
mod msg_gen;
use cm_com::serial::prelude::*;
use std::io::prelude::*;
use self::msg_gen::OpCodes;
use std::{thread, time};
pub fn connect(port: String) -> Result<(), serial::Error> {
//Try to open connection to serial port
let sp = serial::open(&port);
let mut sp = match sp {
Ok(serial_port) => serial_port,
Err(e) => return Err(e),
};
//Connection opend successfull --> reconfigure port
sp.reconfigure(&|settings| {
settings.set_baud_rate(serial::Baud2400)?;
settings.set_char_size(serial::Bits8);
settings.set_parity(serial::ParityOdd);
settings.set_stop_bits(serial::Stop1);
settings.set_flow_control(serial::FlowNone);
Ok(())
})?;
//Set signal pins
sp.set_dtr(true)?;
sp.set_rts(false)?;
//Connection Test
let one_second = time::Duration::from_millis(1000);
let mut serial_buf: Vec<u8> = vec![0; 100];
sp.write_all(&msg_gen::gen_msg(OpCodes::Alive, false))?;
thread::sleep(one_second);
match sp.read(serial_buf.as_mut_slice()) {
Ok(t) => println!("{:?}", t),
Err(e) => eprintln!("{:?}", e),
}
println!("{:?}", &serial_buf);
sp.write_all(&msg_gen::gen_msg(OpCodes::UnlockFirmware, false))?;
thread::sleep(one_second);
match sp.read(serial_buf.as_mut_slice()) {
Ok(t) => println!("{:?}", t),
Err(e) => eprintln!("{:?}", e),
}
println!("{:?}", &serial_buf);
sp.write_all(&msg_gen::gen_msg(OpCodes::PlaySound(0), false))?;
thread::sleep(one_second);
match sp.read(serial_buf.as_mut_slice()) {
Ok(t) => println!("{:?}", t),
Err(e) => eprintln!("{:?}", e),
}
println!("{:?}", &serial_buf);
Ok(())
}
|
mod actions;
mod checker;
mod keys;
mod operator;
mod splitter;
pub use self::keys::Primitive;
pub(crate) use self::{
actions::{Action, AssignmentActions, AssignmentError}, checker::{is_array, value_check},
keys::{Key, KeyBuf, KeyIterator, TypeError}, operator::Operator, splitter::split_assignment,
};
use types::{Array, Value};
#[derive(Debug, PartialEq)]
pub(crate) enum ReturnValue {
Str(Value),
Vector(Array),
}
|
//! Solutions to the Cryptopals crypto challenges.
//!
//! https://cryptopals.com/
extern crate ansi_term;
extern crate rand;
pub mod attacks;
pub mod challenges;
pub mod utils;
pub mod victims;
fn main() {
// Run the challenges in Set 1.
println!("{}", challenges::set1::challenge01());
println!("{}", challenges::set1::challenge02());
println!("{}", challenges::set1::challenge03());
println!("{}", challenges::set1::challenge04());
println!("{}", challenges::set1::challenge05());
println!("{}", challenges::set1::challenge06());
println!("{}", challenges::set1::challenge07());
println!("{}", challenges::set1::challenge08());
// Run the challenges in Set 2.
println!("{}", challenges::set2::challenge09());
println!("{}", challenges::set2::challenge10());
println!("{}", challenges::set2::challenge11());
println!("{}", challenges::set2::challenge12());
println!("{}", challenges::set2::challenge13());
println!("{}", challenges::set2::challenge14());
println!("{}", challenges::set2::challenge15());
println!("{}", challenges::set2::challenge16());
}
|
#![feature(lang_items, asm)]
#![no_std]
#[macro_use]
extern crate lazy_static;
pub mod vga; |
use std::io::BufRead;
use std::io::BufReader;
use std::io::BufWriter;
use std::io::Write;
use std::net::TcpStream;
use output;
use output::Output;
use ::zbackup::repository::*;
use ::misc::*;
pub fn handle_client (
repository: & Repository,
stream: TcpStream,
) {
let peer_address =
stream.peer_addr ().unwrap ();
println! (
"Connection from: {}",
peer_address);
match handle_client_real (
repository,
stream) {
Ok (_) => {
println! (
"Disconnection from: {}",
peer_address);
},
Err (error) => {
println! (
"Error from: {}: {}",
peer_address,
error);
},
}
}
fn handle_client_real (
repository: & Repository,
stream: TcpStream,
) -> Result <(), String> {
let mut reader =
BufReader::new (
& stream);
loop {
let mut line =
String::new ();
io_result (
reader.read_line (
& mut line),
) ?;
if line.is_empty () {
println! (
"Disconnect");
return Ok (());
}
let parts: Vec <& str> =
line.splitn (2, ' ').collect ();
let command_string =
parts [0].trim ().to_lowercase ();
let command =
& command_string;
let rest =
if parts.len () > 1 {
parts [1].trim ()
} else {
""
};
let output =
output::pipe ();
if command == "exit" {
println! (
"Exiting");
return Ok (());
} else if command == "reindex" {
handle_reindex (
& output,
repository,
& stream,
) ?;
} else if command == "restore" {
handle_restore (
& output,
repository,
& stream,
rest,
) ?;
return Ok (());
} else if command == "status" {
handle_status (
& output,
repository,
& stream,
) ?;
return Ok (());
} else {
handle_command_not_recognised (
& stream,
command,
) ?;
}
}
}
fn handle_reindex (
output: & Output,
repository: & Repository,
stream: & TcpStream,
) -> Result <(), String> {
output.message (
"Will reindex");
let mut writer =
BufWriter::new (
stream);
repository.reload_indexes (
output,
).map_err (
|error|
format! (
"Error during reindex: {}",
error)
) ?;
io_result (
writer.write_fmt (
format_args! (
"OK\n")),
) ?;
Ok (())
}
fn handle_restore (
output: & Output,
repository: & Repository,
stream: & TcpStream,
path: & str,
) -> Result <(), String> {
output.message_format (
format_args! (
"Will restore: {}",
path));
let mut writer =
BufWriter::new (
stream);
io_result (
writer.write_fmt (
format_args! (
"OK\n")),
) ?;
repository.restore (
output,
path,
& mut writer,
) ?;
Ok (())
}
fn handle_status (
output: & Output,
repository: & Repository,
stream: & TcpStream,
) -> Result <(), String> {
output.message_format (
format_args! (
"Will return status"));
let mut writer =
BufWriter::new (
stream);
let status =
repository.status ();
io_result (write! (
writer,
"OK\n",
)) ?;
io_result (write! (
writer,
"\n",
)) ?;
io_result (write! (
writer,
"chunk-cache:\n",
)) ?;
io_result (write! (
writer,
"\n",
)) ?;
io_result (write! (
writer,
" uncompressed-memory-items: {}\n",
status.chunk_cache.uncompressed_memory_items,
)) ?;
io_result (write! (
writer,
" compressed-memory-items: {}\n",
status.chunk_cache.compressed_memory_items,
)) ?;
io_result (write! (
writer,
" live-filesystem-items: {}\n",
status.chunk_cache.live_filesystem_items,
)) ?;
io_result (write! (
writer,
" dead-filesystem-items: {}\n",
status.chunk_cache.dead_filesystem_items,
)) ?;
io_result (write! (
writer,
"\n",
)) ?;
io_result (write! (
writer,
" uncompressed-memory-hits: {}\n",
status.chunk_cache.uncompressed_memory_hits,
)) ?;
io_result (write! (
writer,
" compressed-memory-hits: {}\n",
status.chunk_cache.compressed_memory_hits,
)) ?;
io_result (write! (
writer,
" live-filesystem-hits: {}\n",
status.chunk_cache.live_filesystem_hits,
)) ?;
io_result (write! (
writer,
" dead-filesystem-hits: {}\n",
status.chunk_cache.dead_filesystem_hits,
)) ?;
io_result (write! (
writer,
" misses: {}\n",
status.chunk_cache.misses,
)) ?;
io_result (write! (
writer,
"\n",
)) ?;
io_result (write! (
writer,
"bundle-loader:\n",
)) ?;
io_result (write! (
writer,
"\n",
)) ?;
io_result (write! (
writer,
" num-loads: {}\n",
status.bundle_loader.num_loads,
)) ?;
io_result (write! (
writer,
"\n",
)) ?;
if status.bundle_loader.loading_later.is_empty () {
io_result (write! (
writer,
" loading-now: []\n",
)) ?;
} else {
io_result (write! (
writer,
" loading-now:\n",
)) ?;
io_result (write! (
writer,
"\n",
)) ?;
for bundle_id
in status.bundle_loader.loading_now.iter () {
io_result (write! (
writer,
" - \"{}\"\n",
bundle_id,
)) ?;
}
}
io_result (write! (
writer,
"\n",
)) ?;
if status.bundle_loader.loading_later.is_empty () {
io_result (write! (
writer,
" loading-later: []\n",
)) ?;
} else {
io_result (write! (
writer,
" loading-later:\n",
)) ?;
io_result (write! (
writer,
"\n",
)) ?;
for bundle_id
in status.bundle_loader.loading_later.iter () {
io_result (write! (
writer,
" - \"{}\"\n",
bundle_id,
)) ?;
}
}
io_result (write! (
writer,
"\n",
)) ?;
Ok (())
}
fn handle_command_not_recognised (
stream: & TcpStream,
command_name: & str,
) -> Result <(), String> {
println! (
"Command not recognised: {}",
command_name);
let mut writer =
BufWriter::new (
stream);
io_result (
writer.write_fmt (
format_args! (
"ERROR Command not recognised: {}\n",
command_name)),
) ?;
Ok (())
}
// ex: noet ts=4 filetype=rust
|
use crate::{qjs, Actual, Artifact, ArtifactStore, Input, Output, Ref, Result};
use relative_path::{RelativePath, RelativePathBuf};
pub struct Internal {
artifacts: ArtifactStore,
path: RelativePathBuf,
}
#[derive(Clone)]
#[repr(transparent)]
pub struct Directory(Ref<Internal>);
impl AsRef<ArtifactStore> for Directory {
fn as_ref(&self) -> &ArtifactStore {
&self.0.artifacts
}
}
impl AsRef<RelativePathBuf> for Directory {
fn as_ref(&self) -> &RelativePathBuf {
&self.0.path
}
}
impl AsRef<RelativePath> for Directory {
fn as_ref(&self) -> &RelativePath {
&self.0.path
}
}
impl Directory {
pub fn new(artifacts: impl AsRef<ArtifactStore>, path: impl Into<RelativePathBuf>) -> Self {
Self(Ref::new(Internal {
artifacts: artifacts.as_ref().clone(),
path: path.into(),
}))
}
pub fn child(&self, path: impl AsRef<RelativePath>) -> Self {
Self::new(self, self.0.path.join(path))
}
pub async fn input(&self, name: impl AsRef<RelativePath>) -> Result<Artifact<Input, Actual>> {
Artifact::<Input, Actual>::new_init(self, self.0.path.join(name).to_string(), "").await
}
pub async fn output(&self, name: impl AsRef<RelativePath>) -> Result<Artifact<Output, Actual>> {
Artifact::<Output, Actual>::new_init(self, self.0.path.join(name).to_string(), "").await
}
}
#[qjs::bind(module, public)]
#[quickjs(bare)]
mod js {
pub use super::*;
#[quickjs(cloneable)]
impl Directory {
#[quickjs(rename = "new")]
pub fn ctor() -> Self {
unimplemented!()
}
#[quickjs(get)]
pub fn path(&self) -> &str {
self.0.path.as_ref()
}
#[quickjs(rename = "child")]
pub fn _child(&self, path: String) -> Self {
self.child(path)
}
#[quickjs(get)]
pub fn parent(&self) -> Option<Self> {
self.0
.path
.parent()
.map(|path| Self::new(self, path.to_owned()))
}
#[doc(hidden)]
#[quickjs(rename = "input")]
pub async fn input_js(self, name: String) -> Result<Artifact<Input, Actual>> {
self.input(name).await
}
#[doc(hidden)]
#[quickjs(rename = "output")]
pub async fn output_js(self, name: String) -> Result<Artifact<Output, Actual>> {
self.output(name).await
}
}
}
|
//! Node Implemation
//!
use std::{
fmt::Display,
rc::Rc,
};
use crate::hash::*;
#[derive(Clone, Debug)]
pub enum Node<T: ToString + Display> {
Mid {
left: Box<Node<T>>,
right: Box<Node<T>>,
hash: String,
},
Leaf {
data: Rc<T>,
hash: String,
},
Empty {hash: String},
}
impl<T: Display> Node<T> {
pub fn empty() -> Self {
Node::Empty { hash: hash_empty()}
}
pub fn hash(&self) -> Option<&String> {
match *self {
Node::Mid { ref hash, ..} |
Node::Leaf { ref hash, ..} |
Node::Empty { ref hash, ..} => Some(hash),
}
}
pub fn create_leaf(val: Rc<T>) -> Node<T> {
let hash = hash_leaf(val.as_ref());
Node::Leaf {
data: val,
hash: hash,
}
}
pub fn create_mid(left: Node<T>, right: Node<T>) -> Node<T> {
let hash = hash_mid(left.hash().unwrap(), right.hash().unwrap());
Node::Mid {
left: Box::new(left),
right: Box::new(right),
hash: hash,
}
}
}
|
use core::marker::PhantomPinned;
use std::mem;
use std::pin::Pin;
fn main() {
let mut heap_value = Box::pin(SelfReferential {
self_ptr: 0 as *const _,
_pin: PhantomPinned,
});
let ptr = &*heap_value as *const SelfReferential;
// safe because modifying a field doesn't move the whole struct
unsafe {
let mut_ref = Pin::as_mut(&mut heap_value);
Pin::get_unchecked_mut(mut_ref).self_ptr = ptr;
}
println!("heap value at: {:p}", heap_value);
println!("internal reference: {:p}", heap_value.self_ptr);
let stack_value = mem::replace(
&mut *heap_value,
SelfReferential {
self_ptr: 0 as *const _,
_pin: PhantomPinned,
},
);
println!("value at: {:p}", &stack_value);
println!("internal reference: {:p}", stack_value.self_ptr);
}
struct SelfReferential {
self_ptr: *const Self,
_pin: PhantomPinned,
}
|
use std::collections::HashMap;
use std::fmt;
const COMPLETED_SEGMENT_SIZE: i32 = 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9;
const WIDTH: usize = 9;
const HEIGHT: usize = 9;
const SQUARE_SIZE: usize = 3;
fn segment_valid(segment: &Vec<i32>) -> bool {
let segment_copy = segment.clone();
let segment_no_zeroes = segment_copy
.iter()
.filter(|x| (**x) > 0)
.collect::<Vec<&i32>>();
let mut segment_no_zeroes_dedup = segment_no_zeroes.clone();
segment_no_zeroes_dedup.sort();
segment_no_zeroes_dedup.dedup();
segment_no_zeroes.len() == segment_no_zeroes_dedup.len()
}
fn segments_valid(segments: Vec<Vec<i32>>) -> bool {
segments
.iter()
.map(|segment| segment_valid(segment))
.all(|segment| segment == true)
}
fn segment_complete(segment: &Vec<i32>) -> bool {
segment.iter().sum::<i32>() == COMPLETED_SEGMENT_SIZE
}
fn segments_complete(segments: Vec<Vec<i32>>) -> bool {
segments
.iter()
.map(|segment| segment_complete(segment))
.all(|segment| segment == true)
}
pub struct Board {
pub board: Vec<Vec<i32>>,
// pub possible_values: HashMap<(usize, usize), Vec<i32>>,
}
impl Board {
pub fn new() -> Self {
// let mut possible_values = HashMap::new();
// (0..HEIGHT).for_each(|i| {
// (0..WIDTH).for_each(|j| {
// possible_values.insert((i, j), ALL_POSSIBLE_VALUES);
// });
// });
Self {
board: vec![vec![0; WIDTH]; HEIGHT],
// possible_values: possible_values,
}
}
pub fn from_string(board_string: &str) -> Self {
let board = board_string
.chars()
.map(|character| character.to_digit(10).unwrap() as i32)
.collect::<Vec<i32>>()
.chunks(WIDTH)
.map(|chunk| chunk.to_vec())
.collect();
// let possible_values =
Self { board: board }
}
}
pub trait Sudoku {
fn rows(&self) -> Vec<Vec<i32>>;
fn cols(&self) -> Vec<Vec<i32>>;
fn squares(&self) -> Vec<Vec<i32>>;
fn get_row(&self, i: usize) -> Vec<i32>;
fn get_col(&self, j: usize) -> Vec<i32>;
fn get_square(&self, i: usize, j: usize) -> Vec<i32>;
fn rows_valid(&self) -> bool;
fn cols_valid(&self) -> bool;
fn squares_valid(&self) -> bool;
fn board_valid(&self) -> bool;
fn rows_complete(&self) -> bool;
fn cols_complete(&self) -> bool;
fn squares_complete(&self) -> bool;
fn board_complete(&self) -> bool;
fn set_value(&mut self, i: usize, j: usize, value: i32);
fn possible_values(&self) -> HashMap<(usize, usize), Vec<i32>>;
fn solve_tick(&mut self) -> i32;
fn solve(&mut self) -> i32;
}
impl Sudoku for Board {
fn rows(&self) -> Vec<Vec<i32>> {
self.board.clone()
}
fn cols(&self) -> Vec<Vec<i32>> {
(0..WIDTH)
.map(|i| {
self.board
.clone()
.into_iter()
.map(|row| row[i].clone())
.collect::<Vec<i32>>()
})
.collect::<Vec<Vec<i32>>>()
}
fn squares(&self) -> Vec<Vec<i32>> {
// Chunk rows into vertical SQUARE_SIZE
let chunked_rows = self
.board
.chunks(SQUARE_SIZE)
.map(|chunk| chunk.to_vec())
.collect::<Vec<Vec<Vec<i32>>>>();
// Chunk the rows into horizontal SQUARE_SIZE
let chunked_squares = chunked_rows
.iter()
.map(|chunk| {
chunk
.iter()
.map(|row| {
row.chunks(SQUARE_SIZE)
.map(|chunk| chunk.to_vec())
.collect::<Vec<Vec<i32>>>()
})
.collect::<Vec<Vec<Vec<i32>>>>()
})
.collect::<Vec<Vec<Vec<Vec<i32>>>>>();
// Get vectors of the numbers in each square
let squares = chunked_squares
.iter()
.map(|square_row| {
(0..SQUARE_SIZE)
.map(|i| {
square_row
.iter()
.map(|chunked_row| chunked_row.iter().nth(i).unwrap().clone())
.collect::<Vec<Vec<i32>>>()
.concat()
})
.collect::<Vec<Vec<i32>>>()
})
.collect::<Vec<Vec<Vec<i32>>>>()
.concat();
squares
}
fn get_row(&self, row_index: usize) -> Vec<i32> {
self.rows()[row_index].clone()
}
fn get_col(&self, col_index: usize) -> Vec<i32> {
self.cols()[col_index].clone()
}
fn get_square(&self, row_index: usize, col_index: usize) -> Vec<i32> {
self.squares()
.chunks(SQUARE_SIZE)
.nth(row_index / SQUARE_SIZE)
.unwrap()
.iter()
.nth(col_index / SQUARE_SIZE)
.unwrap()
.clone()
}
fn rows_valid(&self) -> bool {
segments_valid(self.rows())
}
fn cols_valid(&self) -> bool {
segments_valid(self.cols())
}
fn squares_valid(&self) -> bool {
segments_valid(self.squares())
}
fn board_valid(&self) -> bool {
self.rows_valid() && self.cols_valid() && self.squares_valid()
}
fn rows_complete(&self) -> bool {
segments_complete(self.rows())
}
fn cols_complete(&self) -> bool {
segments_complete(self.cols())
}
fn squares_complete(&self) -> bool {
segments_complete(self.squares())
}
fn board_complete(&self) -> bool {
self.rows_complete() && self.cols_complete() && self.squares_complete()
}
fn set_value(&mut self, row_index: usize, col_index: usize, value: i32) {
self.board[row_index][col_index] = value
}
fn possible_values(&self) -> HashMap<(usize, usize), Vec<i32>> {
let mut possible_values: HashMap<(usize, usize), Vec<i32>> = HashMap::new();
(0..HEIGHT).for_each(|row_index| {
(0..WIDTH).for_each(|col_index| {
let current_value = self
.board
.iter()
.nth(row_index)
.unwrap()
.iter()
.nth(col_index)
.unwrap();
let key = (row_index, col_index);
let value = match current_value {
x if x > &0 => vec![*x],
_ => vec![1, 2, 3, 4, 5, 6, 7, 8, 9]
.into_iter()
.filter(|number| {
(self.get_row(row_index).iter().all(|i| i != number))
&& (self.get_col(col_index).iter().all(|i| i != number))
&& (self
.get_square(row_index, col_index)
.iter()
.all(|i| i != number))
})
.collect(),
};
possible_values.insert(key, value);
});
});
possible_values
}
fn solve_tick(&mut self) -> i32 {
// For values with only one possible value, fill them in
let mut values_set = 0;
let possible_values = self.possible_values();
let keys = possible_values.keys();
keys.for_each(|key| {
let possible = possible_values.get(&key);
match possible {
Some(possible) => {
if possible.len() == 1 && self.board[key.0][key.1] == 0 {
self.set_value(key.0, key.1, possible[0]);
values_set += 1;
println!("set a value")
}
}
None => panic!("Tried to solve an invalid cell"),
}
});
values_set
}
fn solve(&mut self) -> i32 {
let mut steps_taken = 0;
while !self.board_complete() {
let values_set = self.solve_tick();
steps_taken += 1;
if values_set == 0 {
println!("Failed board state");
println!("{:?}", self.board);
panic!("Could not solve: ran out of definitive possible values")
}
}
steps_taken
}
}
impl fmt::Display for Board {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"{}",
self.board
.iter()
.map(|row| row
.iter()
.map(|col| col.to_string())
.collect::<Vec<String>>()
.join(" "))
.collect::<Vec<String>>()
.join("\n")
)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn blank_board_from_new() {
let board = Board::new();
assert_eq!(board.board, vec![vec![0; 9]; 9]);
}
#[test]
fn blank_board_is_valid() {
let board = Board::new();
assert_eq!(board.board_valid(), true);
assert_eq!(board.board_complete(), false);
}
#[test]
fn set_value() {
let mut board = Board::new();
board.set_value(0, 0, 1);
assert_eq!(
board.board,
[
[1, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
]
);
board.set_value(3, 5, 9);
assert_eq!(
board.board,
[
[1, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 9, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
]
);
}
#[test]
fn correct_board_from_string() {
let board = Board::from_string(
"379000014060010070080009005435007000090040020000800436900700080040080050850000249",
);
assert_eq!(
board.board,
[
[3, 7, 9, 0, 0, 0, 0, 1, 4],
[0, 6, 0, 0, 1, 0, 0, 7, 0],
[0, 8, 0, 0, 0, 9, 0, 0, 5],
[4, 3, 5, 0, 0, 7, 0, 0, 0],
[0, 9, 0, 0, 4, 0, 0, 2, 0],
[0, 0, 0, 8, 0, 0, 4, 3, 6],
[9, 0, 0, 7, 0, 0, 0, 8, 0],
[0, 4, 0, 0, 8, 0, 0, 5, 0],
[8, 5, 0, 0, 0, 0, 2, 4, 9]
]
);
}
#[test]
fn correct_rows() {
let board = Board::from_string(
"379000014060010070080009005435007000090040020000800436900700080040080050850000249",
);
assert_eq!(board.rows()[0], [3, 7, 9, 0, 0, 0, 0, 1, 4]);
assert_eq!(board.rows()[1], [0, 6, 0, 0, 1, 0, 0, 7, 0]);
assert_eq!(board.rows()[2], [0, 8, 0, 0, 0, 9, 0, 0, 5]);
assert_eq!(board.rows()[3], [4, 3, 5, 0, 0, 7, 0, 0, 0]);
assert_eq!(board.rows()[4], [0, 9, 0, 0, 4, 0, 0, 2, 0]);
assert_eq!(board.rows()[5], [0, 0, 0, 8, 0, 0, 4, 3, 6]);
assert_eq!(board.rows()[6], [9, 0, 0, 7, 0, 0, 0, 8, 0]);
assert_eq!(board.rows()[7], [0, 4, 0, 0, 8, 0, 0, 5, 0]);
assert_eq!(board.rows()[8], [8, 5, 0, 0, 0, 0, 2, 4, 9]);
}
#[test]
fn correct_cols() {
let board = Board::from_string(
"379000014060010070080009005435007000090040020000800436900700080040080050850000249",
);
assert_eq!(board.cols()[0], [3, 0, 0, 4, 0, 0, 9, 0, 8]);
assert_eq!(board.cols()[1], [7, 6, 8, 3, 9, 0, 0, 4, 5]);
assert_eq!(board.cols()[2], [9, 0, 0, 5, 0, 0, 0, 0, 0]);
assert_eq!(board.cols()[3], [0, 0, 0, 0, 0, 8, 7, 0, 0]);
assert_eq!(board.cols()[4], [0, 1, 0, 0, 4, 0, 0, 8, 0]);
assert_eq!(board.cols()[5], [0, 0, 9, 7, 0, 0, 0, 0, 0]);
assert_eq!(board.cols()[6], [0, 0, 0, 0, 0, 4, 0, 0, 2]);
assert_eq!(board.cols()[7], [1, 7, 0, 0, 2, 3, 8, 5, 4]);
assert_eq!(board.cols()[8], [4, 0, 5, 0, 0, 6, 0, 0, 9]);
}
#[test]
fn correct_squares() {
let board = Board::from_string(
"379000014060010070080009005435007000090040020000800436900700080040080050850000249",
);
assert_eq!(board.squares()[0], [3, 7, 9, 0, 6, 0, 0, 8, 0]);
assert_eq!(board.squares()[1], [0, 0, 0, 0, 1, 0, 0, 0, 9]);
assert_eq!(board.squares()[2], [0, 1, 4, 0, 7, 0, 0, 0, 5]);
assert_eq!(board.squares()[3], [4, 3, 5, 0, 9, 0, 0, 0, 0]);
assert_eq!(board.squares()[4], [0, 0, 7, 0, 4, 0, 8, 0, 0]);
assert_eq!(board.squares()[5], [0, 0, 0, 0, 2, 0, 4, 3, 6]);
assert_eq!(board.squares()[6], [9, 0, 0, 0, 4, 0, 8, 5, 0]);
assert_eq!(board.squares()[7], [7, 0, 0, 0, 8, 0, 0, 0, 0]);
assert_eq!(board.squares()[8], [0, 8, 0, 0, 5, 0, 2, 4, 9]);
}
#[test]
fn incomplete_valid_board_string_is_valid() {
let board = Board::from_string(
"379000014060010070080009005435007000090040020000800436900700080040080050850000249",
);
assert_eq!(board.board_valid(), true);
assert_eq!(board.board_complete(), false);
}
#[test]
#[ignore]
fn complete_valid_board_string_is_valid_and_complete() {
let board = Board::from_string(
"845632179732918654196745328683574912457291836219863547361429785574186293928357461",
);
assert_eq!(board.board_valid(), true);
assert_eq!(board.rows_complete(), true);
assert_eq!(board.cols_complete(), true);
assert_eq!(board.squares_complete(), true);
assert_eq!(board.board_complete(), true);
}
#[test]
fn test_squares() {
let board = Board::from_string(
"379000014060010070080009005435007000090040020000800436900700080040080050850000249",
);
assert_eq!(
board.squares(),
vec![
vec![3, 7, 9, 0, 6, 0, 0, 8, 0],
vec![0, 0, 0, 0, 1, 0, 0, 0, 9],
vec![0, 1, 4, 0, 7, 0, 0, 0, 5],
vec![4, 3, 5, 0, 9, 0, 0, 0, 0],
vec![0, 0, 7, 0, 4, 0, 8, 0, 0],
vec![0, 0, 0, 0, 2, 0, 4, 3, 6],
vec![9, 0, 0, 0, 4, 0, 8, 5, 0],
vec![7, 0, 0, 0, 8, 0, 0, 0, 0],
vec![0, 8, 0, 0, 5, 0, 2, 4, 9]
]
);
}
#[test]
fn test_get_row() {
let board = Board::from_string(
"379000014060010070080009005435007000090040020000800436900700080040080050850000249",
);
assert_eq!(board.get_row(0), vec![3, 7, 9, 0, 0, 0, 0, 1, 4]);
assert_eq!(board.get_row(1), vec![0, 6, 0, 0, 1, 0, 0, 7, 0]);
assert_eq!(board.get_row(2), vec![0, 8, 0, 0, 0, 9, 0, 0, 5]);
assert_eq!(board.get_row(3), vec![4, 3, 5, 0, 0, 7, 0, 0, 0]);
assert_eq!(board.get_row(4), vec![0, 9, 0, 0, 4, 0, 0, 2, 0]);
assert_eq!(board.get_row(5), vec![0, 0, 0, 8, 0, 0, 4, 3, 6]);
assert_eq!(board.get_row(6), vec![9, 0, 0, 7, 0, 0, 0, 8, 0]);
assert_eq!(board.get_row(7), vec![0, 4, 0, 0, 8, 0, 0, 5, 0]);
assert_eq!(board.get_row(8), vec![8, 5, 0, 0, 0, 0, 2, 4, 9]);
}
#[test]
fn test_get_col() {
let board = Board::from_string(
"379000014060010070080009005435007000090040020000800436900700080040080050850000249",
);
assert_eq!(board.get_col(0), vec![3, 0, 0, 4, 0, 0, 9, 0, 8]);
assert_eq!(board.get_col(1), vec![7, 6, 8, 3, 9, 0, 0, 4, 5]);
assert_eq!(board.get_col(2), vec![9, 0, 0, 5, 0, 0, 0, 0, 0]);
assert_eq!(board.get_col(3), vec![0, 0, 0, 0, 0, 8, 7, 0, 0]);
assert_eq!(board.get_col(4), vec![0, 1, 0, 0, 4, 0, 0, 8, 0]);
assert_eq!(board.get_col(5), vec![0, 0, 9, 7, 0, 0, 0, 0, 0]);
assert_eq!(board.get_col(6), vec![0, 0, 0, 0, 0, 4, 0, 0, 2]);
assert_eq!(board.get_col(7), vec![1, 7, 0, 0, 2, 3, 8, 5, 4]);
assert_eq!(board.get_col(8), vec![4, 0, 5, 0, 0, 6, 0, 0, 9]);
}
#[test]
fn test_get_square() {
let board = Board::from_string(
"379000014060010070080009005435007000090040020000800436900700080040080050850000249",
);
assert_eq!(board.get_square(0, 0), vec![3, 7, 9, 0, 6, 0, 0, 8, 0]);
assert_eq!(board.get_square(8, 8), vec![0, 8, 0, 0, 5, 0, 2, 4, 9]);
assert_eq!(board.get_square(4, 4), vec![0, 0, 7, 0, 4, 0, 8, 0, 0]);
assert_eq!(board.get_square(4, 2), vec![4, 3, 5, 0, 9, 0, 0, 0, 0]);
assert_eq!(board.get_square(7, 3), vec![7, 0, 0, 0, 8, 0, 0, 0, 0]);
}
#[test]
fn test_possible_values() {
let board = Board::from_string(
"379000014060010070080009005435007000090040020000800436900700080040080050850000249",
);
assert_eq!(board.possible_values().get(&(0, 0)), Some(&vec![3]));
assert_eq!(board.possible_values().get(&(1, 0)), Some(&vec![2, 5]));
assert_eq!(board.possible_values().get(&(0, 3)), Some(&vec![2, 5, 6]));
}
#[test]
fn test_solve_tick() {
let mut board = Board::from_string(
"379000014060010070080009005435007000090040020000800436900700080040080050850000249",
);
assert_eq!(board.solve_tick(), 2);
assert_eq!(
board.board,
[
[3, 7, 9, 0, 0, 0, 0, 1, 4],
[0, 6, 0, 0, 1, 0, 0, 7, 0],
[0, 8, 0, 0, 0, 9, 0, 6, 5],
[4, 3, 5, 0, 0, 7, 0, 9, 0],
[0, 9, 0, 0, 4, 0, 0, 2, 0],
[0, 0, 0, 8, 0, 0, 4, 3, 6],
[9, 0, 0, 7, 0, 0, 0, 8, 0],
[0, 4, 0, 0, 8, 0, 0, 5, 0],
[8, 5, 0, 0, 0, 0, 2, 4, 9],
]
);
assert_eq!(board.solve_tick(), 2);
assert_eq!(
board.board,
[
[3, 7, 9, 0, 0, 0, 8, 1, 4],
[0, 6, 0, 0, 1, 0, 0, 7, 0],
[0, 8, 0, 0, 0, 9, 3, 6, 5],
[4, 3, 5, 0, 0, 7, 0, 9, 0],
[0, 9, 0, 0, 4, 0, 0, 2, 0],
[0, 0, 0, 8, 0, 0, 4, 3, 6],
[9, 0, 0, 7, 0, 0, 0, 8, 0],
[0, 4, 0, 0, 8, 0, 0, 5, 0],
[8, 5, 0, 0, 0, 0, 2, 4, 9]
]
);
assert_eq!(board.solve_tick(), 3);
assert_eq!(
board.board,
[
[3, 7, 9, 0, 0, 0, 8, 1, 4],
[0, 6, 0, 0, 1, 0, 9, 7, 2],
[0, 8, 0, 0, 0, 9, 3, 6, 5],
[4, 3, 5, 0, 0, 7, 1, 9, 0],
[0, 9, 0, 0, 4, 0, 0, 2, 0],
[0, 0, 0, 8, 0, 0, 4, 3, 6],
[9, 0, 0, 7, 0, 0, 0, 8, 0],
[0, 4, 0, 0, 8, 0, 0, 5, 0],
[8, 5, 0, 0, 0, 0, 2, 4, 9],
]
);
assert_eq!(board.solve_tick(), 4);
assert_eq!(
board.board,
[
[3, 7, 9, 0, 0, 0, 8, 1, 4],
[5, 6, 4, 0, 1, 0, 9, 7, 2],
[0, 8, 0, 0, 0, 9, 3, 6, 5],
[4, 3, 5, 0, 0, 7, 1, 9, 8],
[0, 9, 0, 0, 4, 0, 0, 2, 0],
[0, 0, 0, 8, 0, 0, 4, 3, 6],
[9, 0, 0, 7, 0, 0, 6, 8, 0],
[0, 4, 0, 0, 8, 0, 0, 5, 0],
[8, 5, 0, 0, 0, 0, 2, 4, 9]
]
);
assert_eq!(board.solve_tick(), 3);
assert_eq!(
board.board,
[
[3, 7, 9, 0, 0, 0, 8, 1, 4],
[5, 6, 4, 3, 1, 0, 9, 7, 2],
[0, 8, 0, 0, 0, 9, 3, 6, 5],
[4, 3, 5, 0, 0, 7, 1, 9, 8],
[0, 9, 0, 0, 4, 0, 0, 2, 7],
[0, 0, 0, 8, 0, 0, 4, 3, 6],
[9, 0, 0, 7, 0, 0, 6, 8, 0],
[0, 4, 0, 0, 8, 0, 7, 5, 0],
[8, 5, 0, 0, 0, 0, 2, 4, 9]
]
);
}
#[test]
fn test_solve_easy() {
let mut board = Board::from_string(
"002000500010705020400090007049000730801030409036000210200080004080902060007000800",
);
assert_eq!(board.solve(), 12);
assert_eq!(
board.board,
[
[9, 7, 2, 8, 6, 3, 5, 4, 1],
[6, 1, 8, 7, 4, 5, 9, 2, 3],
[4, 5, 3, 2, 9, 1, 6, 8, 7],
[5, 4, 9, 1, 2, 8, 7, 3, 6],
[8, 2, 1, 6, 3, 7, 4, 5, 9],
[7, 3, 6, 4, 5, 9, 2, 1, 8],
[2, 9, 5, 3, 8, 6, 1, 7, 4],
[1, 8, 4, 9, 7, 2, 3, 6, 5],
[3, 6, 7, 5, 1, 4, 8, 9, 2]
]
);
assert_eq!(board.board_valid(), true);
assert_eq!(board.board_complete(), true);
}
// #[test]
// fn test_solve() {
// let mut board = Board::from_string(
// "379000014060010070080009005435007000090040020000800436900700080040080050850000249",
// );
// assert_eq!(board.solve(), 20);
// assert_eq!(
// board.board,
// [
// [3, 7, 9, 0, 0, 0, 0, 1, 4],
// [0, 6, 0, 0, 1, 0, 0, 7, 0],
// [0, 8, 0, 0, 0, 9, 0, 6, 5],
// [4, 3, 5, 0, 0, 7, 0, 9, 0],
// [0, 9, 0, 0, 4, 0, 0, 2, 0],
// [0, 0, 0, 8, 0, 0, 4, 3, 6],
// [9, 0, 0, 7, 0, 0, 0, 8, 0],
// [0, 4, 0, 0, 8, 0, 0, 5, 0],
// [8, 5, 0, 0, 0, 0, 2, 4, 9],
// ]
// );
// }
}
|
use std::env;
use serenity::prelude::*;
use toaster_core::{
toaster_framework::ToasterFramework,
dynamic_loading::{
PluginManager,
},
handler,
share_map_hack::{
ToasterHack,
}
};
fn main()
{
println!("Hello, world!");
let mut client = Client::new(&env::var("DISCORD_TOKEN").expect("Didn't find DISCORD_TOKEN env variable!"), handler::Handler)
.expect("Error creating client!");
let framework = {
let plugin_manager = PluginManager::new("/home/toaster/fracking-toaster/target/release/libtoaster_commands.so", "/home/toaster/plugin_temp_dir");
ToasterFramework::new(plugin_manager.unwrap(), |c| c)
};
framework.add_all_groups().unwrap();
{
let mut data = client.data.write();
data.insert_toaster(framework.clone());
}
client.with_framework(framework);
if let Err(why) = client.start()
{
println!("An error occurred while starting the client: {:?}", why);
}
} |
use ethers::{
providers::Middleware,
types::{Address, BlockId, Bytes, H256, U256},
};
use tokio::runtime::Runtime;
#[derive(Debug)]
/// Blocking wrapper around an Ethers middleware, for use in synchronous contexts
/// (powered by a tokio runtime)
pub struct BlockingProvider<M> {
provider: M,
runtime: Runtime,
}
impl<M: Clone> Clone for BlockingProvider<M> {
fn clone(&self) -> Self {
Self { provider: self.provider.clone(), runtime: Runtime::new().unwrap() }
}
}
#[cfg(feature = "sputnik")]
use sputnik::backend::MemoryVicinity;
impl<M: Middleware> BlockingProvider<M> {
pub fn new(provider: M) -> Self {
Self { provider, runtime: Runtime::new().unwrap() }
}
#[cfg(feature = "sputnik")]
pub fn vicinity(&self, pin_block: Option<u64>) -> Result<MemoryVicinity, M::Error> {
let block_number = if let Some(pin_block) = pin_block {
pin_block
} else {
self.block_on(self.provider.get_block_number())?.as_u64()
};
let gas_price = self.block_on(self.provider.get_gas_price())?;
let chain_id = self.block_on(self.provider.get_chainid())?;
let block = self.block_on(self.provider.get_block(block_number))?.expect("block not found");
Ok(MemoryVicinity {
origin: Address::default(),
chain_id,
block_hashes: Vec::new(),
block_number: block.number.expect("block number not found").as_u64().into(),
block_coinbase: block.author,
block_difficulty: block.difficulty,
block_gas_limit: block.gas_limit,
block_timestamp: block.timestamp,
gas_price,
})
}
fn block_on<F: std::future::Future>(&self, f: F) -> F::Output {
self.runtime.block_on(f)
}
pub fn get_balance(&self, address: Address, block: Option<BlockId>) -> Result<U256, M::Error> {
self.block_on(self.provider.get_balance(address, block))
}
pub fn get_transaction_count(
&self,
address: Address,
block: Option<BlockId>,
) -> Result<U256, M::Error> {
self.block_on(self.provider.get_transaction_count(address, block))
}
pub fn get_code(&self, address: Address, block: Option<BlockId>) -> Result<Bytes, M::Error> {
self.block_on(self.provider.get_code(address, block))
}
pub fn get_storage_at(
&self,
address: Address,
slot: H256,
block: Option<BlockId>,
) -> Result<H256, M::Error> {
self.block_on(self.provider.get_storage_at(address, slot, block))
}
}
|
use serde::{Deserialize, Serialize};
use strum_macros::Display;
use utility_types::{omit, partial, pick};
#[derive(Debug, Serialize, Deserialize, Display, Clone)]
#[serde(tag = "type")] // to flatten the enum to the parent struct
pub enum AccountType {
Admin {
first_name: String,
last_name: String,
},
Individual {
first_name: String,
last_name: String,
},
Company {
company_name: String,
},
// JobSeeker,
}
#[omit(DBAccount, [id], [Serialize, Deserialize, Clone])]
#[pick(CompactAccount, [id, slug, r#type], [Debug, Serialize, Deserialize, Clone])]
#[partial(PartialAccount)]
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct Account {
pub id: u32,
pub slug: String,
pub email: String,
#[serde(flatten)]
pub r#type: AccountType,
}
pub trait AccountTrait {
fn to_compact_account(&self) -> CompactAccount;
}
impl AccountTrait for Account {
fn to_compact_account(&self) -> CompactAccount {
CompactAccount {
id: self.id,
slug: self.slug.clone(),
r#type: self.r#type.clone(),
}
}
}
pub trait AccountNameTrait {
fn get_display_name(&self) -> String;
fn get_names(&self) -> (Option<&String>, Option<&String>, Option<&String>);
}
impl AccountNameTrait for Account {
fn get_display_name(&self) -> String {
match &self.r#type {
AccountType::Admin {
first_name,
last_name,
} => format!("{} {}", first_name, last_name),
AccountType::Individual {
first_name,
last_name,
} => format!("{} {}", first_name, last_name),
AccountType::Company { company_name } => company_name.to_string(),
}
}
fn get_names(&self) -> (Option<&String>, Option<&String>, Option<&String>) {
match &self.r#type {
AccountType::Admin {
first_name,
last_name,
}
| AccountType::Individual {
first_name,
last_name,
} => (Some(first_name), Some(last_name), None),
AccountType::Company { company_name } => (None, None, Some(company_name)),
}
}
}
impl AccountNameTrait for DBAccount {
fn get_display_name(&self) -> String {
match &self.r#type {
AccountType::Admin {
first_name,
last_name,
} => format!("{} {}", first_name, last_name),
AccountType::Individual {
first_name,
last_name,
} => format!("{} {}", first_name, last_name),
AccountType::Company { company_name } => company_name.to_string(),
}
}
fn get_names(&self) -> (Option<&String>, Option<&String>, Option<&String>) {
match &self.r#type {
AccountType::Admin {
first_name,
last_name,
}
| AccountType::Individual {
first_name,
last_name,
} => (Some(first_name), Some(last_name), None),
AccountType::Company { company_name } => (None, None, Some(company_name)),
}
}
}
impl AccountNameTrait for CompactAccount {
fn get_display_name(&self) -> String {
match &self.r#type {
AccountType::Admin {
first_name,
last_name,
} => format!("{} {}", first_name, last_name),
AccountType::Individual {
first_name,
last_name,
} => format!("{} {}", first_name, last_name),
AccountType::Company { company_name } => company_name.to_string(),
}
}
fn get_names(&self) -> (Option<&String>, Option<&String>, Option<&String>) {
match &self.r#type {
AccountType::Admin {
first_name,
last_name,
}
| AccountType::Individual {
first_name,
last_name,
} => (Some(first_name), Some(last_name), None),
AccountType::Company { company_name } => (None, None, Some(company_name)),
}
}
}
pub trait PartialAccountTrait {
fn to_account(&self, fallback_account: Account) -> Account;
}
impl PartialAccountTrait for PartialAccount {
fn to_account(&self, fallback_account: Account) -> Account {
Account {
id: self.id.unwrap_or(fallback_account.id),
slug: self.slug.clone().unwrap_or(fallback_account.slug),
email: self.email.clone().unwrap_or(fallback_account.email),
r#type: self.r#type.clone().unwrap_or(fallback_account.r#type),
}
}
}
|
#[macro_use]
extern crate lazy_static;
extern crate regex;
use pyo3::prelude::*;
use pyo3::wrap_pyfunction;
use regex::Regex;
lazy_static! {
static ref DOLLAR_SEARCH: Regex = Regex::new(r"DOLLAR|USD|\$").unwrap();
}
/// Finds the multiplier within a couple of lines that dollar is mentioned
#[pyfunction]
pub fn find_multiplier(table_header: Vec<String>) -> u32 {
// find the line within the header that contains the dollar term
let line_index = table_header
.iter()
.enumerate()
.find(|(_, line)| DOLLAR_SEARCH.is_match(&line.to_uppercase()))
.map(|(x, _)| x);
// if dollar is found within the index then find the number multiplier nearby
if let Some(index) = line_index {
table_header
.iter()
.skip(index - 1)
.take(3)
.find_map(|line| {
[
("THOUSAND", 1000),
("MILLION", 1000000),
("BILLION", 1000000000),
]
.iter()
.find_map(|(key, val)| {
if line.to_uppercase().contains(key) {
Some(*val)
} else {
None
}
})
})
.unwrap_or(1)
} else {
1
}
}
#[pymodule]
fn librust_vs_python(_py: Python, m: &PyModule) -> PyResult<()> {
m.add_wrapped(wrap_pyfunction!(find_multiplier))?;
Ok(())
}
|
pub fn process_part1(input: &str) -> String {
let cols = input.lines().last().unwrap().chars().map(|_| 1).sum::<usize>();
let lines = input.lines().map(|_| 1).sum::<usize>();
let input_rows = input
.lines()
.map(|row| row.chars().map(|num| num.to_digit(10).unwrap() as usize)
.collect::<Vec<usize>>())
.collect::<Vec<Vec<usize>>>();
let input_cols = (0..cols)
.map(|col| {
input_rows.iter()
.map(|line| line[col])
.collect::<Vec<usize>>()
})
.collect::<Vec<Vec<usize>>>();
let visible_trees = (1..cols-1).map(|col| {
(1..lines-1).map(|row| {
let current_height = input_rows[row][col];
let lower_in_row = input_rows[row].iter().map(|&height| height<current_height).collect::<Vec<bool>>();
let lower_in_col = input_cols[col].iter().map(|&height| height<current_height).collect::<Vec<bool>>();
if lower_in_col[0..row].iter().all(|&lower| lower) {
return 1
} else if lower_in_col[row+1..].iter().all(|&lower| lower) {
return 1
} else if lower_in_row[0..col].iter().all(|&lower| lower) {
return 1
} else if lower_in_row[col+1..].iter().all(|&lower| lower) {
return 1
}
0
}).sum::<usize>()
}).sum::<usize>();
(visible_trees + 2*cols + 2*lines - 4).to_string()
}
pub fn process_part2(input: &str) -> String {
let cols = input.lines().last().unwrap().chars().map(|_| 1).sum::<usize>();
let lines = input.lines().map(|_| 1).sum::<usize>();
let input_rows = input
.lines()
.map(|row| row.chars().map(|num| num.to_digit(10).unwrap() as usize)
.collect::<Vec<usize>>())
.collect::<Vec<Vec<usize>>>();
let input_cols = (0..cols)
.map(|col| {
input_rows.iter()
.map(|line| line[col])
.collect::<Vec<usize>>()
})
.collect::<Vec<Vec<usize>>>();
(1..cols-1).map(|col| {
(1..lines-1).map(|row| {
let current_height = input_rows[row][col];
let mut score = (0usize, 0usize, 0usize, 0usize);
// top
for i in (0..row).rev() {
score.0 += 1;
if input_cols[col][i] >= current_height {
break;
}
}
// bottom
for i in row+1..input_cols[col].len() {
score.1 += 1;
if input_cols[col][i] >= current_height {
break;
}
}
// left
for i in (0..col).rev() {
score.2 += 1;
if input_rows[row][i] >= current_height {
break;
}
}
// right
for i in col+1..input_cols[row].len() {
score.3 += 1;
if input_rows[row][i] >= current_height {
break;
}
}
score.0*score.1*score.2*score.3
}).max().unwrap()
}).max().unwrap().to_string()
}
#[cfg(test)]
mod tests {
use super::*;
const INPUT: &str = "30373
25512
65332
33549
35390";
#[test]
fn part1() {
assert_eq!(process_part1(INPUT), "21");
}
#[test]
fn part2() {
assert_eq!(process_part2(INPUT), "8");
}
}
|
mod dynamics;
mod properties;
pub use self::{dynamics::Dynamics, properties::PhysicalProperties};
|
use criterion::{criterion_group, criterion_main, Criterion};
use scc::HashIndex;
fn read(c: &mut Criterion) {
let hashindex: HashIndex<usize, usize> = HashIndex::default();
assert!(hashindex.insert(1, 1).is_ok());
c.bench_function("HashIndex: read", |b| {
b.iter(|| {
hashindex.read(&1, |_, v| assert_eq!(*v, 1));
})
});
}
criterion_group!(hash_index, read);
criterion_main!(hash_index);
|
pub mod app_config;
pub mod log;
|
#[doc = "Register `OTG_DOEPMSK` reader"]
pub type R = crate::R<OTG_DOEPMSK_SPEC>;
#[doc = "Register `OTG_DOEPMSK` writer"]
pub type W = crate::W<OTG_DOEPMSK_SPEC>;
#[doc = "Field `XFRCM` reader - XFRCM"]
pub type XFRCM_R = crate::BitReader;
#[doc = "Field `XFRCM` writer - XFRCM"]
pub type XFRCM_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `EPDM` reader - EPDM"]
pub type EPDM_R = crate::BitReader;
#[doc = "Field `EPDM` writer - EPDM"]
pub type EPDM_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `AHBERRM` reader - AHBERRM"]
pub type AHBERRM_R = crate::BitReader;
#[doc = "Field `AHBERRM` writer - AHBERRM"]
pub type AHBERRM_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `STUPM` reader - STUPM"]
pub type STUPM_R = crate::BitReader;
#[doc = "Field `STUPM` writer - STUPM"]
pub type STUPM_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `OTEPDM` reader - OTEPDM"]
pub type OTEPDM_R = crate::BitReader;
#[doc = "Field `OTEPDM` writer - OTEPDM"]
pub type OTEPDM_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `STSPHSRXM` reader - STSPHSRXM"]
pub type STSPHSRXM_R = crate::BitReader;
#[doc = "Field `STSPHSRXM` writer - STSPHSRXM"]
pub type STSPHSRXM_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B2BSTUPM` reader - B2BSTUPM"]
pub type B2BSTUPM_R = crate::BitReader;
#[doc = "Field `B2BSTUPM` writer - B2BSTUPM"]
pub type B2BSTUPM_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `OUTPKTERRM` reader - OUTPKTERRM"]
pub type OUTPKTERRM_R = crate::BitReader;
#[doc = "Field `OUTPKTERRM` writer - OUTPKTERRM"]
pub type OUTPKTERRM_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `BNAM` reader - BNAM"]
pub type BNAM_R = crate::BitReader;
#[doc = "Field `BNAM` writer - BNAM"]
pub type BNAM_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `BERRM` reader - BERRM"]
pub type BERRM_R = crate::BitReader;
#[doc = "Field `BERRM` writer - BERRM"]
pub type BERRM_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `NAKMSK` reader - NAKMSK"]
pub type NAKMSK_R = crate::BitReader;
#[doc = "Field `NAKMSK` writer - NAKMSK"]
pub type NAKMSK_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `NYETMSK` reader - NYETMSK"]
pub type NYETMSK_R = crate::BitReader;
#[doc = "Field `NYETMSK` writer - NYETMSK"]
pub type NYETMSK_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bit 0 - XFRCM"]
#[inline(always)]
pub fn xfrcm(&self) -> XFRCM_R {
XFRCM_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - EPDM"]
#[inline(always)]
pub fn epdm(&self) -> EPDM_R {
EPDM_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - AHBERRM"]
#[inline(always)]
pub fn ahberrm(&self) -> AHBERRM_R {
AHBERRM_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - STUPM"]
#[inline(always)]
pub fn stupm(&self) -> STUPM_R {
STUPM_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - OTEPDM"]
#[inline(always)]
pub fn otepdm(&self) -> OTEPDM_R {
OTEPDM_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 5 - STSPHSRXM"]
#[inline(always)]
pub fn stsphsrxm(&self) -> STSPHSRXM_R {
STSPHSRXM_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bit 6 - B2BSTUPM"]
#[inline(always)]
pub fn b2bstupm(&self) -> B2BSTUPM_R {
B2BSTUPM_R::new(((self.bits >> 6) & 1) != 0)
}
#[doc = "Bit 8 - OUTPKTERRM"]
#[inline(always)]
pub fn outpkterrm(&self) -> OUTPKTERRM_R {
OUTPKTERRM_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bit 9 - BNAM"]
#[inline(always)]
pub fn bnam(&self) -> BNAM_R {
BNAM_R::new(((self.bits >> 9) & 1) != 0)
}
#[doc = "Bit 12 - BERRM"]
#[inline(always)]
pub fn berrm(&self) -> BERRM_R {
BERRM_R::new(((self.bits >> 12) & 1) != 0)
}
#[doc = "Bit 13 - NAKMSK"]
#[inline(always)]
pub fn nakmsk(&self) -> NAKMSK_R {
NAKMSK_R::new(((self.bits >> 13) & 1) != 0)
}
#[doc = "Bit 14 - NYETMSK"]
#[inline(always)]
pub fn nyetmsk(&self) -> NYETMSK_R {
NYETMSK_R::new(((self.bits >> 14) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - XFRCM"]
#[inline(always)]
#[must_use]
pub fn xfrcm(&mut self) -> XFRCM_W<OTG_DOEPMSK_SPEC, 0> {
XFRCM_W::new(self)
}
#[doc = "Bit 1 - EPDM"]
#[inline(always)]
#[must_use]
pub fn epdm(&mut self) -> EPDM_W<OTG_DOEPMSK_SPEC, 1> {
EPDM_W::new(self)
}
#[doc = "Bit 2 - AHBERRM"]
#[inline(always)]
#[must_use]
pub fn ahberrm(&mut self) -> AHBERRM_W<OTG_DOEPMSK_SPEC, 2> {
AHBERRM_W::new(self)
}
#[doc = "Bit 3 - STUPM"]
#[inline(always)]
#[must_use]
pub fn stupm(&mut self) -> STUPM_W<OTG_DOEPMSK_SPEC, 3> {
STUPM_W::new(self)
}
#[doc = "Bit 4 - OTEPDM"]
#[inline(always)]
#[must_use]
pub fn otepdm(&mut self) -> OTEPDM_W<OTG_DOEPMSK_SPEC, 4> {
OTEPDM_W::new(self)
}
#[doc = "Bit 5 - STSPHSRXM"]
#[inline(always)]
#[must_use]
pub fn stsphsrxm(&mut self) -> STSPHSRXM_W<OTG_DOEPMSK_SPEC, 5> {
STSPHSRXM_W::new(self)
}
#[doc = "Bit 6 - B2BSTUPM"]
#[inline(always)]
#[must_use]
pub fn b2bstupm(&mut self) -> B2BSTUPM_W<OTG_DOEPMSK_SPEC, 6> {
B2BSTUPM_W::new(self)
}
#[doc = "Bit 8 - OUTPKTERRM"]
#[inline(always)]
#[must_use]
pub fn outpkterrm(&mut self) -> OUTPKTERRM_W<OTG_DOEPMSK_SPEC, 8> {
OUTPKTERRM_W::new(self)
}
#[doc = "Bit 9 - BNAM"]
#[inline(always)]
#[must_use]
pub fn bnam(&mut self) -> BNAM_W<OTG_DOEPMSK_SPEC, 9> {
BNAM_W::new(self)
}
#[doc = "Bit 12 - BERRM"]
#[inline(always)]
#[must_use]
pub fn berrm(&mut self) -> BERRM_W<OTG_DOEPMSK_SPEC, 12> {
BERRM_W::new(self)
}
#[doc = "Bit 13 - NAKMSK"]
#[inline(always)]
#[must_use]
pub fn nakmsk(&mut self) -> NAKMSK_W<OTG_DOEPMSK_SPEC, 13> {
NAKMSK_W::new(self)
}
#[doc = "Bit 14 - NYETMSK"]
#[inline(always)]
#[must_use]
pub fn nyetmsk(&mut self) -> NYETMSK_W<OTG_DOEPMSK_SPEC, 14> {
NYETMSK_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "This register works with each of the OTG_DOEPINTx registers for all endpoints to generate an interrupt per OUT endpoint. The OUT endpoint interrupt for a specific status in the OTG_DOEPINTx register can be masked by writing into the corresponding bit in this register. Status bits are masked by default.\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`otg_doepmsk::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`otg_doepmsk::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct OTG_DOEPMSK_SPEC;
impl crate::RegisterSpec for OTG_DOEPMSK_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`otg_doepmsk::R`](R) reader structure"]
impl crate::Readable for OTG_DOEPMSK_SPEC {}
#[doc = "`write(|w| ..)` method takes [`otg_doepmsk::W`](W) writer structure"]
impl crate::Writable for OTG_DOEPMSK_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets OTG_DOEPMSK to value 0"]
impl crate::Resettable for OTG_DOEPMSK_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
// This file is part of dpdk. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/dpdk/master/COPYRIGHT. No part of dpdk, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
// Copyright © 2017 The developers of dpdk. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/dpdk/master/COPYRIGHT.
pub trait ibv_wqEx: HasContextPointer
{
#[inline(always)]
fn ibv_post_wq_recv(self, recv_wr: *mut ibv_recv_wr, bad_recv_wr: *mut *mut ibv_recv_wr) -> c_int;
}
impl ibv_wqEx for *mut ibv_wq
{
#[inline(always)]
fn ibv_post_wq_recv(self, recv_wr: *mut ibv_recv_wr, bad_recv_wr: *mut *mut ibv_recv_wr) -> c_int
{
debug_assert!(!self.is_null(), "self is null");
debug_assert!(!recv_wr.is_null(), "recv_wr is null");
debug_assert!(!bad_recv_wr.is_null(), "bad_recv_wr is null");
unsafe { (*self).post_recv.unwrap()(self, recv_wr, bad_recv_wr) }
}
}
|
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::io::net::ip;
use std::mem;
use libc;
use raw::{mod, Loop, Handle, Allocated, Raw};
use {uvll, UvResult};
pub struct Udp {
handle: *mut uvll::uv_udp_t,
}
impl Udp {
/// Create a new uv_udp_t handle.
///
/// This function is unsafe as a successful return value is not
/// automatically deallocated.
pub unsafe fn new(uv_loop: &Loop) -> UvResult<Udp> {
let raw = Raw::new();
try!(call!(uvll::uv_udp_init(uv_loop.raw(), raw.get())));
Ok(Udp { handle: raw.unwrap() })
}
pub fn open(&mut self, sock: uvll::uv_os_socket_t) -> UvResult<()> {
unsafe {
try!(call!(uvll::uv_udp_open(self.handle, sock)));
Ok(())
}
}
pub fn bind(&mut self, addr: ip::SocketAddr) -> UvResult<()> {
unsafe {
let mut raw_addr: libc::sockaddr_storage = mem::zeroed();
raw::addr_to_sockaddr(addr, &mut raw_addr);
try!(call!(uvll::uv_udp_bind(self.handle,
&raw_addr as *const _ as *const _,
0)));
Ok(())
}
}
pub fn getsockname(&mut self) -> UvResult<ip::SocketAddr> {
unsafe { raw::socket_name(&*self.handle, uvll::uv_udp_getsockname) }
}
pub fn set_membership(&mut self, addr: ip::IpAddr,
membership: uvll::uv_membership) -> UvResult<()> {
let addr = addr.to_string().to_c_str();
unsafe {
try!(call!(uvll::uv_udp_set_membership(self.handle, addr.as_ptr(),
0 as *const _,
membership)));
Ok(())
}
}
pub fn set_multicast_loop(&mut self, on: bool) -> UvResult<()> {
unsafe {
let on = on as libc::c_int;
try!(call!(uvll::uv_udp_set_multicast_loop(self.handle, on)));
Ok(())
}
}
pub fn set_multicast_ttl(&mut self, ttl: int) -> UvResult<()> {
unsafe {
let ttl = ttl as libc::c_int;
try!(call!(uvll::uv_udp_set_multicast_ttl(self.handle, ttl)));
Ok(())
}
}
pub fn set_broadcast(&mut self, on: bool) -> UvResult<()> {
unsafe {
let on = on as libc::c_int;
try!(call!(uvll::uv_udp_set_broadcast(self.handle, on)));
Ok(())
}
}
pub fn set_ttl(&mut self, ttl: int) -> UvResult<()> {
unsafe {
let ttl = ttl as libc::c_int;
try!(call!(uvll::uv_udp_set_ttl(self.handle, ttl)));
Ok(())
}
}
pub fn try_send(&mut self, buf: &[u8],
addr: ip::SocketAddr) -> UvResult<()> {
unsafe {
let mut raw_addr: libc::sockaddr_storage = mem::zeroed();
raw::addr_to_sockaddr(addr, &mut raw_addr);
try!(call!(uvll::uv_udp_try_send(self.handle,
&raw::slice_to_uv_buf(buf),
1,
&raw_addr as *const _ as *const _)));
Ok(())
}
}
pub fn recv_start(&mut self, alloc: uvll::uv_alloc_cb,
recv: uvll::uv_udp_recv_cb) -> UvResult<()> {
unsafe {
try!(call!(uvll::uv_udp_recv_start(self.handle, alloc, recv)));
Ok(())
}
}
pub fn recv_stop(&mut self) -> UvResult<()> {
unsafe {
try!(call!(uvll::uv_udp_recv_stop(self.handle)));
Ok(())
}
}
}
impl Allocated for uvll::uv_udp_t {
fn size(_self: Option<uvll::uv_udp_t>) -> uint {
unsafe { uvll::uv_handle_size(uvll::UV_UDP) as uint }
}
}
impl Handle<uvll::uv_udp_t> for Udp {
fn raw(&self) -> *mut uvll::uv_udp_t { self.handle }
fn from_raw(t: *mut uvll::uv_udp_t) -> Udp { Udp { handle: t } }
}
|
use std::ops::{Add, Sub, Mul, Div, Index};
use std::cmp::{PartialEq};
#[derive(Copy, Clone)]
struct Vector3 {
x: f32,
y: f32,
z: f32,
}
impl Vector3 {
//
// Constructor
//
fn new(x: f32, y: f32, z: f32) -> Vector3 { Vector3 { x: x, y: y, z: z } }
//
// Basic Forms
//
fn back() -> Vector3 { Vector3 { x: 0.0f32, y: 0.0f32, z: -1.0f32 } }
fn down() -> Vector3 { Vector3 { x: 0.0f32, y: -1.0f32, z: 0.0f32 } }
fn left() -> Vector3 { Vector3 { x: -1.0f32, y: 0.0f32, z: 0.0f32 } }
fn forward() -> Vector3 { Vector3 { x: 0.0f32, y: 0.0f32, z: 0.0f32 } }
fn up() -> Vector3 { Vector3 { x: 0.0f32, y: 1.0f32, z: 0.0f32 } }
fn right() -> Vector3 { Vector3 { x: 1.0f32, y: 0.0f32, z: 0.0f32 } }
fn one() -> Vector3 { Vector3 { x: 1.0f32, y: 1.0f32, z: 1.0f32 } }
fn zero() -> Vector3 { Vector3 { x: 0.0f32, y: 0.0f32, z: 0.0f32 } }
//
// Set the components of an existing vector
//
fn set(&mut self, x: f32, y: f32, z: f32) -> &mut Vector3 {
self.x = x;
self.y = y;
self.z = z;
self
}
//
// Normalize the vector
//
fn normalize(&self) -> Vector3 {
let len = (self.x * self.x + self.y * self.y + self.z * self.z).sqrt();
Vector3 { x: self.x / len, y: self.y / len, z: self.z / len }
}
//
// Calculate the distance between two vectors
//
fn distance(a: Vector3, b: Vector3) -> f32 {
((b.x - a.x) * (b.x - a.x) + (b.y - a.y) * (b.y - a.y) + (b.z - a.z) * (b.z - a.z))
}
//
// Calculate the maximum of two vectors
//
fn max(a: Vector3, b: Vector3) -> Vector3 {
Vector3 {
x: a.x.max(b.x),
y: a.y.max(b.y),
z: a.z.max(b.z),
}
}
//
// Calculate the minimum of two vectors
//
fn min(a: Vector3, b: Vector3) -> Vector3 {
Vector3 {
x: a.x.min(b.x),
y: a.y.min(b.y),
z: a.z.min(b.z),
}
}
//
// Calculate linear interpolation
//
fn lerp(a: Vector3, b: Vector3, f: f32) -> Vector3 {
Vector3 {
x: (a.x + f * (b.x - a.x)),
y: (a.y + f * (b.y - a.y)),
z: (a.z + f * (b.z - a.z)),
}
}
//
// Calculate the dot product
//
fn dot(a: Vector3, b: Vector3) -> f32 {
(a.x * b.x + a.y * b.y + a.z * b.z)
}
//
// Calculate the cross product
//
fn cross(a: Vector3, b: Vector3) -> Vector3 {
Vector3 {
x: (a.y * b.z - a.z * b.y),
y: (a.z * b.x - a.x * b.z),
z: (a.x * b.y - a.y * b.x),
}
}
}
//
// Vector3 + Vector3 operator
//
impl Add<Vector3> for Vector3 {
type Output = Vector3;
fn add(self, v: Vector3) -> Self::Output {
Vector3 { x: self.x + v.x, y: self.y + v.y, z: self.z + v.z }
}
}
//
// Vector3 - Vector3 operator
//
impl Sub<Vector3> for Vector3 {
type Output = Vector3;
fn sub(self, v: Vector3) -> Self::Output {
Vector3 { x: self.x - v.x, y: self.y - v.y, z: self.z - v.z }
}
}
//
// Vector3 * f32 operator
//
impl Mul<f32> for Vector3 {
type Output = Vector3;
fn mul(self, f: f32) -> Self::Output {
Vector3 { x: self.x * f, y: self.x * f, z: self.z * f }
}
}
//
// Vector3 / f32 operator
//
impl Div<f32> for Vector3 {
type Output = Vector3;
fn div(self, f: f32) -> Self::Output {
Vector3 { x: self.x / f, y: self.y / f, z: self.z / f }
}
}
//
// Comparison operators
//
impl PartialEq<Vector3> for Vector3 {
fn eq(&self, v: &Vector3) -> bool { (self.x == v.x && self.y == v.y && self.z == v.z) }
fn ne(&self, v: &Vector3) -> bool { (self.x != v.x || self.y != v.y || self.z != v.z) }
}
//
// The index operator
//
impl Index<i32> for Vector3 {
type Output = f32;
fn index(&self, index: i32) -> &Self::Output {
match index {
0 => self.x,
1 => self.y,
2 => self.z,
_ => panic!("Index out of bounds"),
}
}
}
// TODO: Add PartialOrd/Ord trait
|
#![allow(clippy::reversed_empty_ranges)]
use crate::pattern::Pattern;
use crate::{Offsets, Result};
use std::ops::{Bound, RangeBounds};
use unicode_normalization_alignments::UnicodeNormalization;
/// The possible offsets referential
pub enum OffsetReferential {
Original,
Normalized,
}
/// Represents a Range usable by the NormalizedString to index its content.
/// A Range can use indices relative to either the `Original` or the `Normalized` string
#[derive(Debug, Clone)]
pub enum Range<T: RangeBounds<usize> + Clone> {
Original(T),
Normalized(T),
}
impl<T> Range<T>
where
T: RangeBounds<usize> + Clone,
{
/// Unwrap the underlying range
fn unwrap(self) -> T {
match self {
Range::Original(r) => r,
Range::Normalized(r) => r,
}
}
/// Converts the current Range to a `std::ops::Range<usize>`. This requires the `max_len`
/// of the represented string (in chars, not bytes) in order to cover the case where the
/// original provided range was unbounded
fn into_full_range(self, max_len: usize) -> std::ops::Range<usize> {
let range = self.unwrap();
let start = match range.start_bound() {
Bound::Unbounded => 0,
Bound::Included(i) => *i,
Bound::Excluded(i) => *i + 1,
};
let end = match range.end_bound() {
Bound::Unbounded => max_len,
Bound::Included(i) => *i + 1,
Bound::Excluded(i) => *i,
};
start..end
}
}
/// Defines the expected behavior for the delimiter of a Split Pattern
/// When splitting on `'-'` for example, with input `the-final--countdown`:
/// - Removed => `[ "the", "final", "countdown" ]`
/// - Isolated => `[ "the", "-", "final", "-", "-", "countdown" ]`
/// - MergedWithPrevious => `[ "the-", "final-", "-", "countdown" ]`
/// - MergedWithNext => `[ "the", "-final", "-", "-countdown" ]`
pub enum SplitDelimiterBehavior {
Removed,
Isolated,
MergedWithPrevious,
MergedWithNext,
}
/// A `NormalizedString` takes care of processing an "original" string to modify
/// it and obtain a "normalized" string. It keeps both version of the string,
/// alignments information between both and provides an interface to retrieve
/// ranges of each string, using offsets from any of them.
///
/// It is possible to retrieve a part of the original string, by indexing it with
/// offsets from the normalized one, and the other way around too. It is also
/// possible to convert offsets from one referential to the other one easily.
#[derive(Default, Debug, Clone, PartialEq)]
pub struct NormalizedString {
/// The original version of the string, before any modification
original: String,
/// The normalized version of the string, after all modifications
normalized: String,
/// Mapping from normalized string to original one: (start, end) for each
/// character of the normalized string
alignments: Vec<(usize, usize)>,
}
impl NormalizedString {
/// Return the normalized string
pub fn get(&self) -> &str {
&self.normalized
}
/// Return the original string
pub fn get_original(&self) -> &str {
&self.original
}
/// Return the offsets of the normalized part
pub fn offsets(&self) -> Offsets {
(0, self.len())
}
/// Return the offsets of the original part
pub fn offsets_original(&self) -> Offsets {
(0, self.len_original())
}
/// Convert the given offsets range from one referential to the other one:
/// `Original => Normalized` or `Normalized => Original`
pub fn convert_offsets<T>(&self, range: Range<T>) -> Option<std::ops::Range<usize>>
where
T: RangeBounds<usize> + Clone,
{
match range {
Range::Original(_) => {
let (mut start, mut end) = (None, None);
let target = range.into_full_range(self.len_original());
// If we target before the start of the normalized string
if let Some((start, _)) = self.alignments.first() {
if target.end <= *start {
return Some(0..0);
}
}
// If we target after the end of the normalized string
if let Some((_, end)) = self.alignments.last() {
if target.start >= *end {
let len = self.len();
return Some(len..len);
}
}
// Otherwise lets find the range
self.alignments
.iter()
.enumerate()
.take_while(|(_, alignment)| target.end >= alignment.1)
.for_each(|(i, alignment)| {
if alignment.0 >= target.start && start.is_none() {
// Here we want to keep the first char in the normalized string
// that is on or *after* the target start.
start = Some(i);
}
if alignment.1 <= target.end {
end = Some(i + 1);
}
});
// If we didn't find the start, let's use the end of the normalized string
let start = start.unwrap_or_else(|| self.len());
// The end must be greater or equal to start, and might be None otherwise
let end = end.filter(|e| *e >= start);
Some(start..end?)
}
Range::Normalized(_) => {
// If we target 0..0 on an empty normalized string, we want to return the
// entire original one
let range = range.into_full_range(self.len());
if self.alignments.is_empty() && range == (0..0) {
Some(0..self.len_original())
} else {
self.alignments
.get(range)
.map(|alignments| {
if alignments.is_empty() {
None
} else {
let start = alignments[0].0;
let end = alignments[alignments.len() - 1].1;
Some(start..end)
}
})
.flatten()
}
}
}
}
/// Return a range of the normalized string (indexing on char, not bytes)
pub fn get_range<T>(&self, range: Range<T>) -> Option<&str>
where
T: RangeBounds<usize> + Clone,
{
match range {
Range::Original(_) => self
.convert_offsets(range)
.map(|r| get_range_of(&self.normalized, r))
.flatten(),
Range::Normalized(r) => get_range_of(&self.normalized, r),
}
}
/// Return a range of the original string (indexing on char, not bytes)
pub fn get_range_original<T>(&self, range: Range<T>) -> Option<&str>
where
T: RangeBounds<usize> + Clone,
{
match range {
Range::Original(r) => get_range_of(&self.original, r),
Range::Normalized(_) => self
.convert_offsets(range)
.map(|r| get_range_of(&self.original, r))
.flatten(),
}
}
/// Return a new NormalizedString that contains only the specified range,
/// indexing on bytes. Any range that splits a UTF-8 char will return None.
///
/// If we want a slice of the `NormalizedString` based on a `Range::Normalized``,
/// the original part of the `NormalizedString` will contain any "additional"
/// content on the right, and also on the left. The left will be included
/// only if we are retrieving the very beginning of the string, since there
/// is no previous part. The right is always included, up to what's covered
/// by the next part of the normalized string. This is important to be able
/// to build a new `NormalizedString` from multiple contiguous slices
pub fn slice_bytes<T>(&self, range: Range<T>) -> Option<NormalizedString>
where
T: RangeBounds<usize> + Clone,
{
let (r, s) = match range {
Range::Original(_) => (
range.clone().into_full_range(self.original.len()),
&self.original,
),
Range::Normalized(_) => (
range.clone().into_full_range(self.normalized.len()),
&self.normalized,
),
};
let (mut start, mut end) = if r == (0..0) {
(Some(0), Some(0))
} else {
(None, None)
};
s.char_indices()
.enumerate()
.take_while(|(_, (b, _))| *b < r.end)
.filter(|(_, (b, _))| *b >= r.start)
.for_each(|(i, (b, c))| {
if b == r.start {
start = Some(i);
}
if b + c.len_utf8() == r.end {
end = Some(i + 1);
}
});
match range {
Range::Original(_) => self.slice(Range::Original(start?..end?)),
Range::Normalized(_) => self.slice(Range::Normalized(start?..end?)),
}
}
/// Return a new NormalizedString that contains only the specified range,
/// indexing on char
///
/// If we want a slice of the `NormalizedString` based on a `Range::Normalized``,
/// the original part of the `NormalizedString` will contain any "additional"
/// content on the right, and also on the left. The left will be included
/// only if we are retrieving the very beginning of the string, since there
/// is no previous part. The right is always included, up to what's covered
/// by the next part of the normalized string. This is important to be able
/// to build a new `NormalizedString` from multiple contiguous slices
pub fn slice<T>(&self, range: Range<T>) -> Option<NormalizedString>
where
T: RangeBounds<usize> + Clone,
{
let len_original = self.len_original();
let len_normalized = self.len();
// Find out the part of the normalized string we should keep
let r_normalized = match range {
Range::Original(_) => self.convert_offsets(range.clone())?,
Range::Normalized(_) => range.clone().into_full_range(len_normalized),
};
let r_original = match range {
Range::Original(_) => range.into_full_range(len_original),
Range::Normalized(_) => {
let end_range = self.convert_offsets(Range::Normalized(r_normalized.end..));
let mut range = self.convert_offsets(range)?;
// If we take the very beginning of the normalized string, we should take
// all the beginning of the original too
if r_normalized.start == 0 && range.start != 0 {
range.start = 0;
}
// If there is a void between the `end` char we target and the next one, we
// want to include everything in-between from the original string
match end_range {
Some(r) if r.start > range.end => range.end = r.start,
_ => {}
}
// If we target the end of the normalized but the original is longer
if r_normalized.end == self.alignments.len() && len_original > range.end {
range.end = len_original;
}
range
}
};
// We need to shift the alignments according to the part of the original string that we
// keep
let alignment_shift = r_original.start;
Some(Self {
original: get_range_of(&self.original, r_original)
.unwrap_or_default()
.into(),
normalized: get_range_of(&self.normalized, r_normalized.clone())
.unwrap_or_default()
.into(),
alignments: self
.alignments
.get(r_normalized)?
.to_vec()
.iter()
.map(|(start, end)| (start - alignment_shift, end - alignment_shift))
.collect(),
})
}
/// Applies transformations to the current normalized version, updating the current
/// alignments with the new ones.
/// This method expect an Iterator yielding each char of the new normalized string
/// with a `change` isize equals to:
/// - `1` if this is a new char
/// - `-N` if the char is right before N removed chars
/// - `0` if this char represents the old one (even if changed)
/// Since it is possible that the normalized string doesn't include some of the characters at
/// the beginning of the original one, we need an `initial_offset` which represents the number
/// of removed chars at the very beginning.
///
/// `change` should never be more than `1`. If multiple chars are added, each of
/// them has a `change` of `1`, but more doesn't make any sense.
/// We treat any value above `1` as `1`.
pub fn transform<I: Iterator<Item = (char, isize)>>(&mut self, dest: I, initial_offset: usize) {
let mut offset = -(initial_offset as isize);
let (normalized, alignments): (String, Vec<_>) = dest
.enumerate()
.map(|(index, (c, changes))| {
// A positive offset means we added characters. So we need to remove this offset
// from the current index to find out the previous id
let idx = (index as isize - offset) as usize;
offset += changes;
let align = if changes.is_positive() {
if idx < 1 {
(0, 0)
} else {
// This is a newly inserted character, so we use the alignment from the
// previous one
self.alignments[idx - 1]
}
} else {
self.alignments[idx]
};
// Then we keep only the char for string reconstruction
(c, align)
})
.unzip();
self.alignments = alignments;
self.normalized = normalized;
}
/// Applies NFD normalization
pub fn nfd(&mut self) -> &mut Self {
self.transform(self.get().to_owned().nfd(), 0);
self
}
/// Applies NFKD normalization
pub fn nfkd(&mut self) -> &mut Self {
self.transform(self.get().to_owned().nfkd(), 0);
self
}
/// Applies NFC normalization
pub fn nfc(&mut self) -> &mut Self {
self.transform(self.get().to_owned().nfc(), 0);
self
}
/// Applies NFKC normalization
pub fn nfkc(&mut self) -> &mut Self {
self.transform(self.get().to_owned().nfkc(), 0);
self
}
/// Applies filtering over our characters
pub fn filter<F: Fn(char) -> bool>(&mut self, keep: F) -> &mut Self {
let mut removed = 0;
let filtered = self
.normalized
.chars()
.rev()
.map(|c| {
if keep(c) {
if removed > 0 {
let res = (c, -(removed as isize));
removed = 0;
Some(res)
} else {
Some((c, 0))
}
} else {
removed += 1;
None
}
})
.collect::<Vec<_>>();
self.transform(filtered.into_iter().rev().filter_map(|o| o), removed);
self
}
/// Prepend the given string to ourself
pub fn prepend(&mut self, s: &str) -> &mut Self {
self.normalized.insert_str(0, s);
self.alignments.splice(0..0, s.chars().map(|_| (0, 0)));
self
}
/// Append the given string to ourself
pub fn append(&mut self, s: &str) -> &mut Self {
self.normalized.push_str(s);
let last_offset = self.alignments.last().map_or((0, 0), |o| (o.1, o.1));
self.alignments.extend(s.chars().map(|_| last_offset));
self
}
/// Map our characters
pub fn map<F: Fn(char) -> char>(&mut self, map: F) -> &mut Self {
self.normalized = self.normalized.chars().map(map).collect::<String>();
self
}
/// Calls the given function for each characters
pub fn for_each<F: FnMut(char)>(&mut self, foreach: F) -> &mut Self {
self.normalized.chars().for_each(foreach);
self
}
/// Lowercase
pub fn lowercase(&mut self) -> &mut Self {
let mut new_chars: Vec<(char, isize)> = vec![];
self.for_each(|c| {
c.to_lowercase().enumerate().for_each(|(index, c)| {
new_chars.push((c, if index > 0 { 1 } else { 0 }));
})
});
self.transform(new_chars.into_iter(), 0);
self
}
/// Uppercase
pub fn uppercase(&mut self) -> &mut Self {
let mut new_chars: Vec<(char, isize)> = vec![];
self.for_each(|c| {
c.to_uppercase().enumerate().for_each(|(index, c)| {
new_chars.push((c, if index > 0 { 1 } else { 0 }));
})
});
self.transform(new_chars.into_iter(), 0);
self
}
/// Replace anything that matches the pattern with the given content.
pub fn replace<P: Pattern>(&mut self, pattern: P, content: &str) -> Result<()> {
let matches = pattern.find_matches(&self.normalized)?;
let (normalized, alignments): (String, Vec<Offsets>) = matches
.into_iter()
.flat_map(|((start, end), is_match)| {
let len = end - start;
if is_match {
let original_offsets = self
.convert_offsets(Range::Normalized(start..end))
.expect("Bad offsets when replacing");
// Here, since we don't know the exact alignment, each character in
// the new normalized part will align to the whole replaced one.
itertools::Either::Left(content.chars().zip(std::iter::repeat((
original_offsets.start,
original_offsets.end,
))))
} else {
// No need to replace anything, just zip the relevant parts
itertools::Either::Right(
self.normalized
.chars()
.skip(start)
.take(len)
.zip(self.alignments.iter().skip(start).take(len).copied()),
)
}
})
.unzip();
self.normalized = normalized;
self.alignments = alignments;
Ok(())
}
/// Clear the normalized part of the string
pub fn clear(&mut self) {
self.normalized = "".into();
self.alignments = vec![];
}
/// Split the current string in many subparts. Specify what to do with the
/// delimiter.
///
/// This method will always ensure that the entire `self` is covered in the
/// produced subparts. This means that the delimiter parts will also be included,
/// and will appear empty if we don't want to include them (their `original`
/// part will still be present). It should always be possible to merge all the
/// subparts back to the original `NormalizedString`
///
/// ## Splitting Behavior for the delimiter
///
/// The behavior can be one of the followings:
/// When splitting on `'-'` for example, with input `the-final--countdown`:
/// - Removed => `[ "the", "", "final", "", "", "countdown" ]`
/// - Isolated => `[ "the", "-", "final", "-", "-", "countdown" ]`
/// - MergedWithPrevious => `[ "the-", "final-", "-", "countdown" ]`
/// - MergedWithNext => `[ "the", "-final", "-", "-countdown" ]`
pub fn split<P: Pattern>(
self,
pattern: P,
behavior: SplitDelimiterBehavior,
) -> Result<Vec<NormalizedString>> {
let matches = pattern.find_matches(&self.normalized)?;
// Process the matches according to the selected behavior: Vec<(Offsets, should_remove)>
use SplitDelimiterBehavior::*;
let splits = match behavior {
Isolated => matches
.into_iter()
.map(|(offsets, _)| (offsets, false))
.collect(),
Removed => matches,
MergedWithPrevious => {
let mut previous_match = false;
matches
.into_iter()
.fold(vec![], |mut acc, (offsets, is_match)| {
if is_match && !previous_match {
if let Some(((_, end), _)) = acc.last_mut() {
*end = offsets.1;
} else {
acc.push((offsets, false));
}
} else {
acc.push((offsets, false));
}
previous_match = is_match;
acc
})
}
MergedWithNext => {
let mut previous_match = false;
let mut matches =
matches
.into_iter()
.rev()
.fold(vec![], |mut acc, (offsets, is_match)| {
if is_match && !previous_match {
if let Some(((start, _), _)) = acc.last_mut() {
*start = offsets.0;
} else {
acc.push((offsets, false));
}
} else {
acc.push((offsets, false));
}
previous_match = is_match;
acc
});
matches.reverse();
matches
}
};
// Then we split according to the computed splits
Ok(splits
.into_iter()
.map(|(offsets, remove)| {
let mut slice = self
.slice(Range::Normalized(offsets.0..offsets.1))
.expect("NormalizedString bad split");
if remove {
slice.clear();
}
slice
})
.collect())
}
/// Split off ourselves, returning a new Self that contains the range [at, len).
/// self will then contain the range [0, at).
/// The provided `at` indexes on `char` not bytes.
pub fn split_off(&mut self, at: usize) -> Self {
if at > self.len() {
return NormalizedString::from("");
}
// Split normalized
let byte_index = self.normalized.chars().enumerate().fold(0, |acc, (i, c)| {
if i < at {
acc + c.len_utf8()
} else {
acc
}
});
let normalized = self.normalized.split_off(byte_index);
let alignments = self.alignments.split_off(at);
// Split original
let original_at = self.alignments.last().map(|(_, end)| *end).unwrap_or(0);
let original_byte_index = self.original.chars().enumerate().fold(0, |acc, (i, c)| {
if i < original_at {
acc + c.len_utf8()
} else {
acc
}
});
let original = self.original.split_off(original_byte_index);
NormalizedString {
original,
normalized,
alignments,
}
}
/// Merge with the given NormalizedString by appending it to self
pub fn merge_with(&mut self, other: &NormalizedString) {
let shift_len = self.len_original();
self.original.push_str(&other.original);
self.normalized.push_str(&other.normalized);
self.alignments.extend(
other
.alignments
.iter()
.map(|(start, end)| (start + shift_len, end + shift_len)),
);
}
/// Remove any leading space(s) of the normalized string
pub fn lstrip(&mut self) -> &mut Self {
self.lrstrip(true, false)
}
/// Remove any trailing space(s) of the normalized string
pub fn rstrip(&mut self) -> &mut Self {
self.lrstrip(false, true)
}
/// Remove any leading and trailing space(s) of the normalized string
pub fn strip(&mut self) -> &mut Self {
self.lrstrip(true, true)
}
fn lrstrip(&mut self, left: bool, right: bool) -> &mut Self {
let leading_spaces = if left {
self.get().chars().take_while(|c| c.is_whitespace()).count()
} else {
0
};
let trailing_spaces = if right {
self.get()
.chars()
.rev()
.take_while(|c| c.is_whitespace())
.count()
} else {
0
};
if leading_spaces > 0 || trailing_spaces > 0 {
let transformation = self
.normalized
.chars()
.enumerate()
.filter_map(|(i, c)| {
if i < leading_spaces || i >= self.len() - trailing_spaces {
None
} else if i == self.len() - trailing_spaces - 1 {
Some((c, -(trailing_spaces as isize)))
} else {
Some((c, 0))
}
})
.collect::<Vec<_>>();
self.transform(transformation.into_iter(), leading_spaces);
}
self
}
/// Returns the length of the normalized string (counting chars not bytes)
pub fn len(&self) -> usize {
self.normalized.chars().count()
}
/// Returns the length of the original string (counting chars not bytes)
pub fn len_original(&self) -> usize {
self.original.chars().count()
}
/// Whether empty
pub fn is_empty(&self) -> bool {
self.normalized.len() == 0
}
}
/// Returns a range of the given string slice, by indexing chars instead of bytes
pub fn get_range_of<T: RangeBounds<usize>>(s: &str, range: T) -> Option<&str> {
let len = s.chars().count();
let start = match range.start_bound() {
Bound::Unbounded => 0,
Bound::Included(i) => *i,
Bound::Excluded(i) => *i + 1,
};
let end = match range.end_bound() {
Bound::Unbounded => len,
Bound::Included(i) => *i + 1,
Bound::Excluded(i) => *i,
};
if start >= len || end > len || start >= end {
None
} else {
let start_b = s
.char_indices()
.map(|(i, _)| i)
.nth(start as usize)
.unwrap_or(0);
let end_b = s
.char_indices()
.map(|(i, _)| i)
.nth(end as usize)
.unwrap_or_else(|| s.len());
Some(&s[start_b..end_b])
}
}
impl From<String> for NormalizedString {
fn from(s: String) -> Self {
let len = s.chars().count();
Self {
original: s.clone(),
normalized: s,
alignments: (0..len).map(|v| (v, v + 1)).collect(),
}
}
}
impl From<&str> for NormalizedString {
fn from(s: &str) -> Self {
Self::from(s.to_owned())
}
}
impl std::iter::FromIterator<NormalizedString> for NormalizedString {
fn from_iter<I: IntoIterator<Item = NormalizedString>>(iter: I) -> NormalizedString {
let mut normalized: NormalizedString = "".into();
for sub in iter {
normalized.merge_with(&sub)
}
normalized
}
}
#[cfg(test)]
mod tests {
#![allow(clippy::reversed_empty_ranges)]
use super::*;
use regex::Regex;
use unicode_categories::UnicodeCategories;
#[test]
fn new_chars() {
let mut n = NormalizedString::from("élégant");
n.nfd();
assert_eq!(
&n.alignments,
&[
(0, 1),
(0, 1),
(1, 2),
(2, 3),
(2, 3),
(3, 4),
(4, 5),
(5, 6),
(6, 7)
]
);
}
#[test]
fn unchanged() {
let mut n = NormalizedString::from("élégant");
n.nfd().filter(|c| !c.is_mark_nonspacing());
assert_eq!(
&n.alignments,
&[(0, 1), (1, 2), (2, 3), (3, 4), (4, 5), (5, 6), (6, 7)]
);
}
#[test]
fn removed_chars() {
let mut n = NormalizedString::from("élégant");
n.filter(|c| c != 'n');
assert_eq!(
&n.alignments,
&[(0, 1), (1, 2), (2, 3), (3, 4), (4, 5), (6, 7)]
);
}
#[test]
fn mixed_addition_and_removal() {
let mut n = NormalizedString::from("élégant");
n.nfd().filter(|c| !c.is_mark_nonspacing() && c != 'n');
assert_eq!(
&n.alignments,
&[(0, 1), (1, 2), (2, 3), (3, 4), (4, 5), (6, 7)]
);
}
#[test]
fn range_conversion() {
let mut n = NormalizedString::from(" __Hello__ ");
n.filter(|c| !c.is_whitespace()).lowercase();
let hello_n = n.convert_offsets(Range::Original(6..11));
assert_eq!(hello_n, Some(2..7));
assert_eq!(
n.get_range(Range::Normalized(hello_n.clone().unwrap())),
Some("hello")
);
assert_eq!(
n.get_range_original(Range::Normalized(hello_n.unwrap())),
Some("Hello")
);
assert_eq!(n.get_range(Range::Original(6..11)), Some("hello"));
assert_eq!(n.get_range_original(Range::Original(6..11)), Some("Hello"));
}
#[test]
fn original_range() {
let mut n = NormalizedString::from("Hello_______ World!");
n.filter(|c| c != '_').lowercase();
let world_n = n.get_range(Range::Normalized(6..11)).unwrap();
let world_o = n.get_range_original(Range::Normalized(6..11)).unwrap();
assert_eq!(world_n, "world");
assert_eq!(world_o, "World");
let original_range = Range::Original(n.convert_offsets(Range::Normalized(6..11)).unwrap());
assert_eq!(n.get_range(original_range.clone()).unwrap(), "world");
assert_eq!(
n.get_range_original(original_range.clone()).unwrap(),
"World"
);
assert_eq!(original_range.into_full_range(n.len_original()), 13..18);
}
#[test]
fn added_around_edges() {
let mut n = NormalizedString::from("Hello");
n.transform(
vec![
(' ', 1),
('H', 0),
('e', 0),
('l', 0),
('l', 0),
('o', 0),
(' ', 1),
]
.into_iter(),
0,
);
assert_eq!(&n.normalized, " Hello ");
assert_eq!(
n.get_range_original(Range::Normalized(1..n.normalized.len() - 1)),
Some("Hello")
);
}
#[test]
fn remove_at_beginning() {
let mut n = NormalizedString::from(" Hello");
n.filter(|c| !c.is_whitespace());
assert_eq!(
n.get_range_original(Range::Normalized(1.."Hello".len())),
Some("ello")
);
assert_eq!(
n.get_range_original(Range::Normalized(0..n.normalized.len())),
Some("Hello")
);
}
#[test]
fn remove_at_end() {
let mut n = NormalizedString::from("Hello ");
n.filter(|c| !c.is_whitespace());
assert_eq!(n.get_range_original(Range::Normalized(0..4)), Some("Hell"));
assert_eq!(
n.get_range_original(Range::Normalized(0..n.normalized.len())),
Some("Hello")
);
}
#[test]
fn removed_around_both_edges() {
let mut n = NormalizedString::from(" Hello ");
n.filter(|c| !c.is_whitespace());
assert_eq!(&n.normalized, "Hello");
assert_eq!(
n.get_range_original(Range::Normalized(0.."Hello".len())),
Some("Hello")
);
assert_eq!(
n.get_range_original(Range::Normalized(1.."Hell".len())),
Some("ell")
);
}
#[test]
fn lstrip() {
let mut n = NormalizedString::from(" This is an example ");
n.lstrip();
assert_eq!(&n.normalized, "This is an example ");
assert_eq!(
n.get_range_original(Range::Normalized(0..n.normalized.len())),
Some("This is an example ")
);
}
#[test]
fn rstrip() {
let mut n = NormalizedString::from(" This is an example ");
n.rstrip();
assert_eq!(&n.normalized, " This is an example");
assert_eq!(
n.get_range_original(Range::Normalized(0..n.normalized.len())),
Some(" This is an example")
);
}
#[test]
fn strip() {
let mut n = NormalizedString::from(" This is an example ");
n.strip();
assert_eq!(&n.normalized, "This is an example");
assert_eq!(
n.get_range_original(Range::Normalized(0..n.normalized.len())),
Some("This is an example")
);
}
#[test]
fn prepend() {
let mut n = NormalizedString::from("there");
n.prepend("Hey ");
assert_eq!(&n.normalized, "Hey there");
assert_eq!(
n.alignments,
vec![
(0, 0),
(0, 0),
(0, 0),
(0, 0),
(0, 1),
(1, 2),
(2, 3),
(3, 4),
(4, 5)
]
);
assert_eq!(n.convert_offsets(Range::Normalized(0..4)), Some(0..0));
}
#[test]
fn append() {
let mut n = NormalizedString::from("Hey");
n.append(" there");
assert_eq!(&n.normalized, "Hey there");
assert_eq!(
n.alignments,
vec![
(0, 1),
(1, 2),
(2, 3),
(3, 3),
(3, 3),
(3, 3),
(3, 3),
(3, 3),
(3, 3)
]
);
assert_eq!(
n.convert_offsets(Range::Normalized(3.." there".len())),
Some(3..3)
);
}
#[test]
fn get_range() {
let s = String::from("Hello my name is John 👋");
assert_eq!(get_range_of(&s, ..), Some(&s[..]));
assert_eq!(get_range_of(&s, 17..), Some("John 👋"));
}
#[test]
fn merge() {
// Merge unmodified
let s = NormalizedString::from("A sentence that will be merged");
let mut merged = NormalizedString::from("A sentence");
let s2 = NormalizedString::from(" that will");
let s3 = NormalizedString::from(" be merged");
merged.merge_with(&s2);
merged.merge_with(&s3);
assert_eq!(s, merged);
// Merge grown normalized
let mut s = NormalizedString::from("A sentence that will be merged");
s.prepend(" ");
let mut merged = NormalizedString::from("A sentence");
let s2 = NormalizedString::from(" that will");
let s3 = NormalizedString::from(" be merged");
merged.prepend(" ");
merged.merge_with(&s2);
merged.merge_with(&s3);
assert_eq!(s, merged);
// Merge shrinked normalized
let mut s = NormalizedString::from(" A sentence that will be merged ");
s.strip();
let mut merged = NormalizedString::from(" A sentence");
merged.strip();
let s2 = NormalizedString::from(" that will");
let mut s3 = NormalizedString::from(" be merged ");
s3.rstrip();
merged.merge_with(&s2);
merged.merge_with(&s3);
assert_eq!(s, merged);
}
#[test]
fn slice() {
let mut s = NormalizedString::from("𝔾𝕠𝕠𝕕 𝕞𝕠𝕣𝕟𝕚𝕟𝕘");
s.nfkc();
assert_eq!(
s.slice(Range::Original(0..4)),
Some(NormalizedString {
original: "𝔾𝕠𝕠𝕕".to_string(),
normalized: "Good".to_string(),
alignments: vec![(0, 1), (1, 2), (2, 3), (3, 4)]
})
);
assert_eq!(
s.slice(Range::Normalized(0..4)),
Some(NormalizedString {
original: "𝔾𝕠𝕠𝕕".to_string(),
normalized: "Good".to_string(),
alignments: vec![(0, 1), (1, 2), (2, 3), (3, 4)]
})
);
// Make sure the sliced NormalizedString is still aligned as expected
let mut s = NormalizedString::from(" Good Morning! ");
s.strip();
// If we keep the whole slice
let slice = s.slice(Range::Original(..)).unwrap();
assert_eq!(
slice.get_range_original(Range::Normalized(0..4)),
Some("Good")
);
let slice = s.slice(Range::Normalized(..)).unwrap();
assert_eq!(
slice.get_range_original(Range::Normalized(0..4)),
Some("Good")
);
// If we keep after the modified piece
let slice = s.slice(Range::Original(4..15)).unwrap();
assert_eq!(
slice.get_range_original(Range::Normalized(0..3)),
Some("ood")
);
// If we keep only the modified piece
let slice = s.slice(Range::Original(3..16)).unwrap();
assert_eq!(
slice.get_range_original(Range::Normalized(0..4)),
Some("Good")
);
}
#[test]
fn slice_bytes() {
let mut s = NormalizedString::from("𝔾𝕠𝕠𝕕 𝕞𝕠𝕣𝕟𝕚𝕟𝕘");
s.nfkc();
assert_eq!(
s.slice_bytes(Range::Original(0..16)),
Some(NormalizedString {
original: "𝔾𝕠𝕠𝕕".to_string(),
normalized: "Good".to_string(),
alignments: vec![(0, 1), (1, 2), (2, 3), (3, 4)]
})
);
assert_eq!(
s.slice_bytes(Range::Original(17..)),
Some(NormalizedString {
original: "𝕞𝕠𝕣𝕟𝕚𝕟𝕘".to_string(),
normalized: "morning".to_string(),
alignments: vec![(0, 1), (1, 2), (2, 3), (3, 4), (4, 5), (5, 6), (6, 7)]
})
);
assert_eq!(
s.slice_bytes(Range::Normalized(0..4)),
Some(NormalizedString {
original: "𝔾𝕠𝕠𝕕".to_string(),
normalized: "Good".to_string(),
alignments: vec![(0, 1), (1, 2), (2, 3), (3, 4)]
})
);
assert_eq!(s.slice_bytes(Range::Original(0..10)), None);
// Check that we get a `None` if we try to split chars
for cut_at in 1..s.len() {
let res = s.slice_bytes(Range::Original(..cut_at));
// The chars in the original string all take 4 bytes.
assert!(if cut_at % 4 == 0 {
res.is_some()
} else {
res.is_none()
});
}
}
#[test]
fn slice_coverage() {
let mut s = NormalizedString::from(" Hello friend ");
s.filter(|c| !c.is_whitespace());
assert_eq!(s.get(), "Hellofriend");
// Multiple slices with Normalized range
for cut_at in 1..s.len() {
let mut slices = vec![];
slices.push(s.slice(Range::Normalized(..cut_at)).unwrap());
slices.push(s.slice(Range::Normalized(cut_at..)).unwrap());
let rebuilt: NormalizedString = slices.into_iter().collect();
assert_eq!(rebuilt, s);
}
// Multiple slices with Original range
for cut_at in 1..s.len_original() {
let mut slices = vec![];
slices.push(s.slice(Range::Original(..cut_at)).unwrap());
slices.push(s.slice(Range::Original(cut_at..)).unwrap());
let rebuilt: NormalizedString = slices.into_iter().collect();
assert_eq!(rebuilt, s);
}
}
#[test]
fn replace() {
// Simple
let mut s = NormalizedString::from(" Hello friend ");
s.replace(' ', "_").unwrap();
assert_eq!(s.get(), "_Hello___friend_");
let mut s = NormalizedString::from("aaaab");
s.replace('a', "b").unwrap();
assert_eq!(s.get(), "bbbbb");
// Overlapping
let mut s = NormalizedString::from("aaaab");
s.replace("aaa", "b").unwrap();
assert_eq!(s.get(), "bab");
// Regex
let mut s = NormalizedString::from(" Hello friend ");
let re = Regex::new(r"\s+").unwrap();
s.replace(&re, "_").unwrap();
assert_eq!(s.get(), "_Hello_friend_");
}
#[test]
fn split() {
use SplitDelimiterBehavior::*;
let s = NormalizedString::from("The-final--countdown");
let test = |behavior: SplitDelimiterBehavior, result: Vec<&str>| {
let splits = s.clone().split('-', behavior).unwrap();
assert_eq!(splits.iter().map(|n| n.get()).collect::<Vec<_>>(), result);
};
test(Removed, vec!["The", "", "final", "", "", "countdown"]);
test(Isolated, vec!["The", "-", "final", "-", "-", "countdown"]);
test(MergedWithPrevious, vec!["The-", "final-", "-", "countdown"]);
test(MergedWithNext, vec!["The", "-final", "-", "-countdown"]);
}
}
|
use byteorder::*;
use crate::midi::MidiHandler;
use std::error::Error;
use std::fs::*;
use std::io::{Cursor, Write};
use std::path::*;
use serde_derive::{Serialize, Deserialize};
use serde_json;
mod command;
pub mod instruments;
mod seqtree;
mod track;
use self::command::*;
use self::seqtree::*;
use self::track::*;
const PREAMBLE_TRACK_0: [u8; 6] = [
0xfa, 0x19, // percussion offset
0xe5, 0xc8, // global volume
0xed, 0xc8, // channel volume
];
const PREAMBLE_OTHER_TRACK: [u8; 2] = [
0xed, 0xc8, // channel volume
];
#[derive(Copy, Clone, Debug)]
pub struct CallLoopRef {
pub target_track: usize,
pub ref_pos: u64,
}
#[derive(Debug, Serialize, Deserialize)]
struct Part {
tracks: Vec<usize>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Song {
parts: Vec<Part>,
tracks: Vec<Track>,
}
impl Song {
pub fn from_midi(
midi: &MidiHandler,
tempo_factor: f32,
optimize_loops: bool,
verbose: bool,
) -> Result<Song, Box<Error>> {
let tracks: Result<Vec<Track>, Box<Error>> = (0..8)
.filter_map(|voice| {
match Track::new(
midi.events_for_voice(voice),
midi.ticks_per_beat,
midi.max_time,
tempo_factor,
voice,
) {
Ok(track) => {
if track.commands.is_empty() {
None
} else {
Some(Ok(track))
}
}
Err(err) => Some(Err(err)),
}
})
.collect();
match tracks {
Ok(tracks) => {
let mut parts = Vec::new();
let part = Part {
tracks: tracks.iter().enumerate().map(|(i, _)| i).collect(),
};
parts.push(part);
if optimize_loops {
let top_level_tracks = tracks.len();
Ok(Song {
parts,
tracks: Song::optimize_call_loops(tracks, top_level_tracks, verbose),
})
} else {
Ok(Song { parts, tracks })
}
}
Err(err) => Err(err),
}
}
fn optimize_call_loops(
tracks: Vec<Track>,
top_level_tracks: usize,
verbose: bool,
) -> Vec<Track> {
let mut seqtree = SeqTree::new();
for (i, track) in tracks.iter().take(top_level_tracks).enumerate() {
seqtree.add_track(track, i);
}
let best_sequence = seqtree.best_sequence();
if verbose {
println!("optimal call loop sequence {:?}", best_sequence);
};
match best_sequence {
None => tracks,
Some(seq) => Song::optimize_call_loops(
Song::extract_sequence(tracks, seq, top_level_tracks),
top_level_tracks,
verbose,
),
}
}
fn extract_sequence(
tracks: Vec<Track>,
sequence: Sequence,
top_level_tracks: usize,
) -> Vec<Track> {
let sequence_length = sequence.commands.len();
let mut new_tracks = Vec::new();
for (i, track) in tracks.iter().take(top_level_tracks).enumerate() {
let mut new_track = Track {
commands: Vec::new(),
};
let locations = sequence.locations.iter().filter(|loc| loc.track_idx == i);
let mut last_index: Option<usize> = None;
for location in locations {
new_track
.commands
.extend_from_slice(&track.commands[last_index.unwrap_or(0)..location.cmd_idx]);
new_track.commands.push(ParameterizedCommand::new(
Some(0),
Some(0),
Some(0),
Command::CallLoop(tracks.len(), location.repeat_count),
));
last_index =
Some(location.cmd_idx + sequence_length * location.repeat_count as usize);
}
if last_index.is_none() || last_index.unwrap() < track.commands.len() {
new_track.commands.extend_from_slice(
&track.commands[last_index.unwrap_or(0)..track.commands.len()],
);
}
new_tracks.push(new_track);
}
if tracks.len() > top_level_tracks {
new_tracks.extend_from_slice(&tracks[top_level_tracks..tracks.len()]);
}
new_tracks.push(Track {
commands: sequence.commands,
});
new_tracks
}
pub fn from_json(path: &Path) -> Song {
let file = File::open(path).unwrap();
serde_json::from_reader(file).unwrap()
}
pub fn write_to_json(&self, path: &Path) {
let out = File::create(path).unwrap();
serde_json::to_writer_pretty(out, self).unwrap()
}
pub fn empty() -> Result<Song, Box<Error>> {
Ok(Song {
parts: vec![Part { tracks: vec![0] }],
tracks: vec![Track::new(&vec![], 24, 0, 0.3, 0)?],
})
}
pub fn get_part_tracks(&self, part_idx: usize) -> &[usize] {
self.parts[part_idx].tracks.as_slice()
}
pub fn get_num_tracks(&self) -> usize {
self.tracks.len()
}
pub fn write_track(
&self,
out: &mut Cursor<Vec<u8>>,
track_idx: usize,
call_loops: &mut Vec<CallLoopRef>,
) -> Result<(), Box<Error>> {
let track = &self.tracks[track_idx];
if !track.commands.is_empty() {
if self.parts.iter().any(|part| part.tracks[0] == track_idx) {
out.write(&PREAMBLE_TRACK_0)?;
} else if self
.parts
.iter()
.any(|part| part.tracks.contains(&track_idx))
{
out.write(&PREAMBLE_OTHER_TRACK)?;
}
track.write(out, call_loops)?;
out.write_u8(0x00)?;
}
Ok(())
}
}
|
use std::cmp::Ordering;
use std::collections::BinaryHeap;
use std::collections::HashMap;
use colored::*;
/// Solves the Day 23 Part 1 puzzle with respect to the given input.
pub fn part_1(input: String) {
let mut lines = input.lines().skip(2);
let top = lines.next().unwrap();
let bot = lines.next().unwrap();
let start = State {
cost: 0,
pods: [
// Room A
Pod {
pos: 11,
clr: top.chars().nth(3).unwrap(),
fin: false,
},
Pod {
pos: 12,
clr: bot.chars().nth(3).unwrap(),
fin: false,
},
Pod {
pos: 13,
clr: 'A',
fin: true,
},
Pod {
pos: 14,
clr: 'A',
fin: true,
},
// Room B
Pod {
pos: 15,
clr: top.chars().nth(5).unwrap(),
fin: false,
},
Pod {
pos: 16,
clr: bot.chars().nth(5).unwrap(),
fin: false,
},
Pod {
pos: 17,
clr: 'B',
fin: true,
},
Pod {
pos: 18,
clr: 'B',
fin: true,
},
// Room C
Pod {
pos: 19,
clr: top.chars().nth(7).unwrap(),
fin: false,
},
Pod {
pos: 20,
clr: bot.chars().nth(7).unwrap(),
fin: false,
},
Pod {
pos: 21,
clr: 'C',
fin: true,
},
Pod {
pos: 22,
clr: 'C',
fin: true,
},
// Room D
Pod {
pos: 23,
clr: top.chars().nth(9).unwrap(),
fin: false,
},
Pod {
pos: 24,
clr: bot.chars().nth(9).unwrap(),
fin: false,
},
Pod {
pos: 25,
clr: 'D',
fin: true,
},
Pod {
pos: 26,
clr: 'D',
fin: true,
},
],
};
let cost = solve(start).unwrap();
println!("{}", cost);
}
/// Solves the Day 23 Part 2 puzzle with respect to the given input.
pub fn part_2(input: String) {
let mut lines = input.lines().skip(2);
let top = lines.next().unwrap();
let bot = lines.next().unwrap();
let start = State {
cost: 0,
pods: [
// Room A
Pod {
pos: 11,
clr: top.chars().nth(3).unwrap(),
fin: false,
},
Pod {
pos: 12,
clr: 'D',
fin: false,
},
Pod {
pos: 13,
clr: 'D',
fin: false,
},
Pod {
pos: 14,
clr: bot.chars().nth(3).unwrap(),
fin: false,
},
// Room B
Pod {
pos: 15,
clr: top.chars().nth(5).unwrap(),
fin: false,
},
Pod {
pos: 16,
clr: 'C',
fin: false,
},
Pod {
pos: 17,
clr: 'B',
fin: false,
},
Pod {
pos: 18,
clr: bot.chars().nth(5).unwrap(),
fin: false,
},
// Room C
Pod {
pos: 19,
clr: top.chars().nth(7).unwrap(),
fin: false,
},
Pod {
pos: 20,
clr: 'B',
fin: false,
},
Pod {
pos: 21,
clr: 'A',
fin: false,
},
Pod {
pos: 22,
clr: bot.chars().nth(7).unwrap(),
fin: false,
},
// Room D
Pod {
pos: 23,
clr: top.chars().nth(9).unwrap(),
fin: false,
},
Pod {
pos: 24,
clr: 'A',
fin: false,
},
Pod {
pos: 25,
clr: 'C',
fin: false,
},
Pod {
pos: 26,
clr: bot.chars().nth(9).unwrap(),
fin: false,
},
],
};
let cost = solve(start).unwrap();
println!("{}", cost);
}
/// Solves the Day 23 puzzle with respect to the given input.
fn solve(start: State) -> Option<usize> {
let mut costs: HashMap<String, usize> = HashMap::new();
costs.insert(start.to_string(), start.cost);
let mut heap = BinaryHeap::new();
heap.push(start);
while let Some(state) = heap.pop() {
if is_goal_state(&state) {
return Some(state.cost);
}
let repr = state.to_string();
if state.cost > *costs.get(&repr).unwrap() {
continue;
}
for next_state in get_next_states(&state) {
let repr = next_state.to_string();
if costs.contains_key(&repr) {
if next_state.cost >= *costs.get(&repr).unwrap() {
continue;
}
}
heap.push(next_state);
costs.insert(repr, next_state.cost);
}
}
None
}
/// Represents a state of the diagram, ordered by cumulative energy cost.
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
struct State {
cost: usize,
pods: [Pod; 16],
}
impl Ord for State {
fn cmp(&self, other: &Self) -> Ordering {
other
.cost
.cmp(&self.cost)
.then_with(|| self.pods.cmp(&other.pods))
}
}
impl PartialOrd for State {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl State {
/// Converts this state into an array of characters.
fn to_chars(&self) -> [char; 27] {
let mut chars: [char; 27] = ['.'; 27];
for pod in self.pods {
chars[pod.pos] = pod.clr;
}
chars
}
/// Converts this state into a string.
fn to_string(&self) -> String {
self.to_chars().iter().collect()
}
/// Draws a (colourful!) representation of the given state.
#[allow(dead_code)]
fn draw(&self) {
let chars = self.to_chars();
let get = |pos: usize| match chars[pos] {
'A' => "A".red(),
'B' => "B".green(),
'C' => "C".blue(),
'D' => "D".yellow(),
_ => ".".white(),
};
println!("Cost = {}.\n", self.cost);
print!(" ");
for i in 0..11 {
print!("{}", get(i));
}
println!();
for row in 0..4 {
print!(" ");
for room in 0..4 {
print!(" {}", get(4 * room + 11 + row));
}
println!();
}
let line = (0..40).map(|_| '-').collect::<String>();
println!("{}", line);
}
}
/// Represents an amphipod (or "pod") instance in the diagram.
#[derive(Copy, Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
struct Pod {
pos: usize,
clr: char,
fin: bool,
}
impl Pod {
/// Returns the energy required to move this pod to an adjacent cell.
fn energy(&self) -> usize {
match self.clr {
'A' => 1,
'B' => 10,
'C' => 100,
'D' => 1000,
_ => panic!(),
}
}
}
/// Reports whether all the pods in the given state are in their respective rooms.
fn is_goal_state(state: &State) -> bool {
state.to_string() == "...........AAAABBBBCCCCDDDD"
}
/// Returns all the successors to the given state.
fn get_next_states(state: &State) -> Vec<State> {
let mut next_states = Vec::new();
for (i, pod) in state.pods.iter().enumerate() {
if pod.fin {
continue;
}
if pod.pos <= 10 {
if let Some(next_state) = get_next_state_from_hallway(&state, i) {
next_states.push(next_state);
}
continue;
}
for next_state in get_next_state_from_room(&state, i) {
next_states.push(next_state);
}
}
next_states
}
/// Returns all the successors to the given state where the provided hallway pod moves.
fn get_next_state_from_hallway(state: &State, index: usize) -> Option<State> {
let chars = state.to_chars();
let pod = state.pods[index];
let is_dot = |x: &char| *x == '.';
let is_allowed = |x: &&char| **x == '.' || **x == pod.clr;
let room = "ABCD".find(pod.clr).unwrap();
let door = 2 + 2 * room;
let beg = 11 + 4 * room;
let end = beg + 4;
if chars[beg..end].iter().filter(is_allowed).count() < 4 {
return None;
}
let path;
if pod.pos < door {
path = (pod.pos + 1)..door;
} else {
path = door..pod.pos;
}
if !chars[path].iter().all(is_dot) {
return None;
}
let mut next_pods: [Pod; 16] = state.pods.clone();
let next_slot = chars[beg..end].iter().rposition(is_dot).unwrap();
let next_pos = beg + next_slot;
next_pods[index] = Pod {
pos: next_pos,
clr: pod.clr,
fin: true,
};
let next_cost = state.cost + pod.energy() * dist(pod.pos, next_pos);
return Some(State {
cost: next_cost,
pods: next_pods,
});
}
/// Returns all the successors to the given state where the provided room pod moves.
fn get_next_state_from_room(state: &State, index: usize) -> Vec<State> {
let chars = state.to_chars();
let pod = state.pods[index];
let is_dot = |x: &char| *x == '.';
let room = (pod.pos - 11) / 4;
let door = 2 + 2 * room;
let beg = 11 + 4 * room;
let end = pod.pos;
if !chars[beg..end].iter().all(is_dot) {
return vec![];
}
let mut next_states = Vec::new();
for pos in door + 1..11 {
if chars[pos] != '.' {
break;
} else if pos == 2 || pos == 4 || pos == 6 || pos == 8 {
continue;
}
let mut next_pods: [Pod; 16] = state.pods.clone();
next_pods[index] = Pod {
pos: pos,
clr: pod.clr,
fin: false,
};
let next_cost = state.cost + pod.energy() * dist(pod.pos, pos);
let next_state = State {
cost: next_cost,
pods: next_pods,
};
next_states.push(next_state);
}
for pos in (0..door).rev() {
if chars[pos] != '.' {
break;
} else if pos == 2 || pos == 4 || pos == 6 || pos == 8 {
continue;
}
let mut next_pods: [Pod; 16] = state.pods.clone();
next_pods[index] = Pod {
pos: pos,
clr: pod.clr,
fin: false,
};
let next_cost = state.cost + pod.energy() * dist(pod.pos, pos);
let next_state = State {
cost: next_cost,
pods: next_pods,
};
next_states.push(next_state);
}
next_states
}
/// Returns the L1 norm between the given positions.
fn dist(pos_1: usize, pos_2: usize) -> usize {
let (r1, c1) = coords(pos_1);
let (r2, c2) = coords(pos_2);
let dr = (r1 as isize - r2 as isize).abs() as usize;
let dc = (c1 as isize - c2 as isize).abs() as usize;
dr + dc
}
/// Returns the Cartesian coordinates of the given position.
fn coords(pos: usize) -> (usize, usize) {
if pos < 11 {
return (0, pos);
}
let col = 2 + 2 * ((pos - 11) / 4);
let row = 1 + ((pos - 11) % 4);
(row, col)
}
|
use std::collections::{HashMap, HashSet};
use std::hash::{Hash, Hasher};
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug)]
pub struct Point(pub usize, pub usize);
impl Point {
fn line(&self, other: &Point) -> Line {
let m = (other.1 as f64 - self.1 as f64) / (other.0 as f64 - self.0 as f64);
let b = self.1 as f64 - (self.0 as f64 * m);
Line {
m,
b,
x: if other.0 == self.0 {
Some(self.0)
} else {
None
},
}
}
}
#[derive(Copy, Clone, Debug)]
pub struct Line {
m: f64,
b: f64,
x: Option<usize>,
}
impl PartialEq for Line {
fn eq(&self, other: &Self) -> bool {
match (self.x, other.x) {
(Some(a), Some(b)) => a == b,
(Some(_), None) => false,
(None, Some(_)) => false,
(None, None) => {
(self.m - other.m).abs() < 0.000_000_1 && (self.b - other.b).abs() < 0.000_000_1
}
}
}
}
impl Eq for Line {}
impl Hash for Line {
fn hash<H: Hasher>(&self, state: &mut H) {
self.x.hash(state);
match self.x {
Some(_) => (),
None => {
self.m.to_bits().hash(state);
self.b.to_bits().hash(state);
}
}
}
}
pub struct Map {
points: Vec<Point>,
}
impl Map {
pub fn new<F: Iterator<Item = String>>(iter: F) -> Self {
Map {
points: Self::parse(iter),
}
}
fn parse<F: Iterator<Item = String>>(iter: F) -> Vec<Point> {
iter.enumerate()
.flat_map(|(j, s)| {
s.chars()
.enumerate()
.flat_map(|(i, c)| if c == '#' { vec![Point(i, j)] } else { vec![] })
.collect::<Vec<_>>()
})
.collect()
}
pub fn visible_from(&self) -> HashMap<Point, usize> {
// The algorithm is as follows. We compare each point with each other point. If the
// second point is on the same line and in the same direction as another point, then only
// one of those points is visible. We therefore use the pair (line, ordering) as a
// unique key to de-duplicate to find the number of other points visible from this one.
self.points
.iter()
.map(|p| {
(
*p,
self.points
.iter()
.filter(|q| *q != p)
.map(|q| (p.line(q), p.cmp(q)))
.collect::<HashSet<_>>()
.len(),
)
})
.collect()
}
}
#[cfg(test)]
mod tests {
use super::{Line, Map, Point};
use std::f64::INFINITY;
use std::io;
use std::io::BufRead;
fn map(s: &str) -> Map {
Map::new(io::Cursor::new(s.trim()).lines().map(|r| r.unwrap()))
}
fn best_point(s: &str) -> (Point, usize) {
map(s)
.visible_from()
.iter()
.fold((Point(0, 0), 0), |(p, pcount), (&q, &qcount)| {
if qcount > pcount {
(q, qcount)
} else {
(p, pcount)
}
})
}
#[test]
fn lines() {
assert_eq!(
Point(0, 0).line(&Point(1, 1)),
Line {
m: 1.0,
b: 0.0,
x: None
}
);
assert_eq!(
Point(0, 1).line(&Point(1, 3)),
Line {
m: 2.0,
b: 1.0,
x: None
}
);
assert_eq!(
Point(1, 10).line(&Point(3, 17)),
Line {
m: 3.5,
b: 6.5,
x: None
}
);
// The specific infinities don't matter here because they won't be compared.
assert_eq!(
Point(1, 10).line(&Point(1, 20)),
Line {
m: INFINITY,
b: INFINITY,
x: Some(1)
}
);
}
#[test]
fn examples() {
let map = "
......#.#.
#..#.#....
..#######.
.#.#.###..
.#..#.....
..#....#.#
#..#....#.
.##.#..###
##...#..#.
.#....####
";
assert_eq!(best_point(map), (Point(5, 8), 33));
let map = "
#.#...#.#.
.###....#.
.#....#...
##.#.#.#.#
....#.#.#.
.##..###.#
..#...##..
..##....##
......#...
.####.###.
";
assert_eq!(best_point(map), (Point(1, 2), 35));
let map = "
.#..#..###
####.###.#
....###.#.
..###.##.#
##.##.#.#.
....###..#
..#.#..#.#
#..#.#.###
.##...##.#
.....#.#..
";
assert_eq!(best_point(map), (Point(6, 3), 41));
let map = "
.#..##.###...#######
##.############..##.
.#.######.########.#
.###.#######.####.#.
#####.##.#.##.###.##
..#####..#.#########
####################
#.####....###.#.#.##
##.#################
#####.##.###..####..
..######..##.#######
####.##.####...##..#
.#####..#.######.###
##...#.##########...
#.##########.#######
.####.#.###.###.#.##
....##.##.###..#####
.#.#.###########.###
#.#.#.#####.####.###
###.##.####.##.#..##
";
assert_eq!(best_point(map), (Point(11, 13), 210));
}
}
|
extern crate smpl;
extern crate find_folder;
extern crate toml;
#[macro_use]
extern crate serde_derive;
#[macro_use]
extern crate irmatch;
#[macro_use]
extern crate failure;
mod input;
mod script_lib;
mod library;
mod game;
mod assets;
mod display;
mod game_screen;
mod title_screen;
use failure::Error;
fn main() {
let library = library::scan_library();
let story = title_screen::title_screen(&library);
match run(story) {
Ok(_) => (),
Err(e) => {
println!("{}", e);
return;
}
}
}
fn run(story: Option<&library::StoryHandle>) -> Result<(), Error> {
match story {
Some(story) => {
let game_instance = game::GameInstance::new(story)?;
game_screen::game_screen(game_instance)?;
Ok(())
}
None => {
Ok(())
}
}
}
|
pub mod channel;
pub mod gateway;
pub mod guild;
pub mod user;
pub mod voice;
use std::hash::Hash;
/// Efficient cachable entities mapping to the models returned from Discord's
/// API.
///
/// For example, the [`EmojiEntity`] does not contain the user data within it,
/// but contains only the ID of the user. This can act similar to foreign keys
/// in a relational database.
///
/// [`EmojiEntity`]: emoji/struct.EmojiEntity.html
pub trait Entity: Send + Sync {
type Id: Copy + Eq + Hash + Send + Sync;
/// Return the ID of the entity.
///
/// For entities like the [`EmojiEntity`] this will return an ID consisting
/// of the emoji's ID, while for entities like the [`MemberEntity`] this
/// will return a tuple pair of the member's guild ID and member's user ID.
///
/// [`EmojiEntity`]: emoji/struct.EmojiEntity.html
/// [`MemberEntity`]: member/struct.MemberEntity.html
fn id(&self) -> Self::Id;
}
|
use pnet::packet::Packet;
use pnet::packet::ethernet::{EtherTypes, EthernetPacket};
use pnet::packet::ethernet::EtherType;
pub fn handler(interface_name: &str, ethernet: &EthernetPacket) -> EtherType {
let ether_type: EtherType = match ethernet.get_ethertype() {
//EtherTypes::Ipv4 => handle_ipv4_packet(interface_name, ethernet, ip_defrag_hash_map),
EtherTypes::Ipv4 => EtherTypes::Ipv4,
_ => {
println!("[{}]: DataLink!: {} > {}; ethertype: {:?} length: {}",
interface_name,
ethernet.get_source(),
ethernet.get_destination(),
ethernet.get_ethertype(),
ethernet.packet().len());
let a: EtherType = EtherType(0x0000);
a
}
};
ether_type
}
|
//!
//! This crate can be used for tests that accompany hacspecs.
//!
pub mod prelude;
pub mod rand;
pub mod test_vectors;
/// Convert a hex string to a byte vector.
pub fn hex_to_bytes(hex: &str) -> Vec<u8> {
assert!(hex.len() % 2 == 0);
let mut bytes = Vec::new();
for i in 0..(hex.len() / 2) {
bytes.push(u8::from_str_radix(&hex[2 * i..2 * i + 2], 16).unwrap());
}
bytes
}
|
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree.
*/
// reinject stat* as fstatat unittest
use reverie::Tool;
#[derive(Debug, Default, Clone)]
struct LocalState;
#[reverie::tool]
impl Tool for LocalState {
type GlobalState = ();
type ThreadState = ();
}
#[cfg(all(not(sanitized), test))]
mod tests {
use std::mem::MaybeUninit;
use reverie_ptrace::testing::check_fn;
use super::*;
#[test]
fn stat_can_be_reinjected() {
check_fn::<LocalState, _>(|| {
let path = "/proc/self/exe\0".as_ptr() as _;
let fd = unsafe { libc::open(path, libc::O_RDONLY) };
assert!(fd > 0);
let mut stat_result: MaybeUninit<libc::stat> = MaybeUninit::uninit();
let mut lstat_result: MaybeUninit<libc::stat> = MaybeUninit::uninit();
let mut fstat_result: MaybeUninit<libc::stat> = MaybeUninit::uninit();
assert_eq!(0, unsafe { libc::stat(path, stat_result.as_mut_ptr()) });
let stat_result = unsafe { stat_result.assume_init() };
assert_eq!(0, unsafe { libc::lstat(path, lstat_result.as_mut_ptr()) });
let lstat_result = unsafe { lstat_result.assume_init() };
assert_eq!(0, unsafe { libc::fstat(fd, fstat_result.as_mut_ptr()) });
let fstat_result = unsafe { fstat_result.assume_init() };
assert_eq!(stat_result.st_ino, fstat_result.st_ino);
assert_ne!(stat_result.st_ino, lstat_result.st_ino);
})
}
// glibc doesn't provide wrapper for statx
unsafe fn statx(
dirfd: i32,
path: *const libc::c_char,
flags: i32,
mask: u32,
statxbuf: *mut libc::statx,
) -> i64 {
libc::syscall(libc::SYS_statx, dirfd, path, flags, mask, statxbuf)
}
#[test]
fn statx_fstat_returns_same_ino() {
check_fn::<LocalState, _>(|| {
let path = "/proc/self/exe\0".as_ptr() as _;
let dirfd = libc::AT_FDCWD;
let mut fstatat_result: MaybeUninit<libc::stat> = MaybeUninit::uninit();
let mut statx_result: MaybeUninit<libc::statx> = MaybeUninit::uninit();
assert_eq!(0, unsafe {
libc::fstatat(
dirfd,
path,
fstatat_result.as_mut_ptr(),
libc::AT_SYMLINK_NOFOLLOW,
)
});
let fstatat_result = unsafe { fstatat_result.assume_init() };
assert_eq!(0, unsafe {
statx(
dirfd,
path,
libc::AT_SYMLINK_NOFOLLOW,
libc::STATX_INO,
statx_result.as_mut_ptr(),
)
});
let statx_result = unsafe { statx_result.assume_init() };
assert_eq!(fstatat_result.st_ino, statx_result.stx_ino);
})
}
}
|
#[doc = "Reader of register TEMP_SR"]
pub type R = crate::R<u32, super::TEMP_SR>;
#[doc = "Reader of field `TS1_ITEF`"]
pub type TS1_ITEF_R = crate::R<bool, bool>;
#[doc = "Reader of field `TS1_ITLF`"]
pub type TS1_ITLF_R = crate::R<bool, bool>;
#[doc = "Reader of field `TS1_ITHF`"]
pub type TS1_ITHF_R = crate::R<bool, bool>;
#[doc = "Reader of field `TS1_AITEF`"]
pub type TS1_AITEF_R = crate::R<bool, bool>;
#[doc = "Reader of field `TS1_AITLF`"]
pub type TS1_AITLF_R = crate::R<bool, bool>;
#[doc = "Reader of field `TS1_AITHF`"]
pub type TS1_AITHF_R = crate::R<bool, bool>;
#[doc = "Reader of field `TS1_RDY`"]
pub type TS1_RDY_R = crate::R<bool, bool>;
impl R {
#[doc = "Bit 0 - TS1_ITEF"]
#[inline(always)]
pub fn ts1_itef(&self) -> TS1_ITEF_R {
TS1_ITEF_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - TS1_ITLF"]
#[inline(always)]
pub fn ts1_itlf(&self) -> TS1_ITLF_R {
TS1_ITLF_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - TS1_ITHF"]
#[inline(always)]
pub fn ts1_ithf(&self) -> TS1_ITHF_R {
TS1_ITHF_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 4 - TS1_AITEF"]
#[inline(always)]
pub fn ts1_aitef(&self) -> TS1_AITEF_R {
TS1_AITEF_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 5 - TS1_AITLF"]
#[inline(always)]
pub fn ts1_aitlf(&self) -> TS1_AITLF_R {
TS1_AITLF_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bit 6 - TS1_AITHF"]
#[inline(always)]
pub fn ts1_aithf(&self) -> TS1_AITHF_R {
TS1_AITHF_R::new(((self.bits >> 6) & 0x01) != 0)
}
#[doc = "Bit 15 - TS1_RDY"]
#[inline(always)]
pub fn ts1_rdy(&self) -> TS1_RDY_R {
TS1_RDY_R::new(((self.bits >> 15) & 0x01) != 0)
}
}
|
use anyhow::{anyhow, Result};
use chrono::{DateTime, Local};
use itertools::{Itertools, MinMaxResult};
use log::debug;
use sun::Position;
use crate::{
geo::{Coords, Hemisphere},
wallpaper::properties::SolarItem,
};
/// Get the index of the image which should be displayed for given datetime and location.
pub fn current_image_index_solar(
solar_items: &[SolarItem],
datetime: &DateTime<Local>,
coords: &Coords,
) -> Result<usize> {
let sun_pos = sun::pos(datetime.timestamp_millis(), coords.lat, coords.lon);
let sun_pos_degrees = Position {
azimuth: sun_pos.azimuth.to_degrees(),
altitude: sun_pos.altitude.to_degrees(),
};
debug!("sun position: {:?}", sun_pos_degrees);
current_image_index_from_sun_pos(solar_items, &sun_pos_degrees, &coords.hemisphere())
}
/// Get the index of image which should be displayed for a given sun position.
fn current_image_index_from_sun_pos(
solar_items: &[SolarItem],
sun_pos: &Position,
hemisphere: &Hemisphere,
) -> Result<usize> {
Ok(current_item_solar_from_sun_pos(solar_items, sun_pos, hemisphere)?.index)
}
/// Get the solar item which should be displayed for a given sun position.
/// Sun position is expected in degrees!
fn current_item_solar_from_sun_pos<'i>(
solar_items: &'i [SolarItem],
sun_pos: &Position,
hemisphere: &Hemisphere,
) -> Result<&'i SolarItem> {
let (min_alt_item, max_alt_item) = get_minmax_alt_items(solar_items)?;
let sorted_items = sort_solar_items(solar_items);
let current_phase_items = match is_rising(sun_pos.azimuth, hemisphere) {
// If sun is rising, get items from the lowest altitude to the highest altitude
true => get_items_between(&sorted_items, min_alt_item, max_alt_item),
// If sun is setting, get items from the highest altitude to the lowest altitude
false => get_items_between(&sorted_items, max_alt_item, min_alt_item),
};
let current_item = current_phase_items
.iter()
.min_by_key(|item| not_nan!((item.altitude - sun_pos.altitude).abs()))
.unwrap();
Ok(current_item)
}
/// Get items with lowest and highest altitude.
fn get_minmax_alt_items(solar_items: &[SolarItem]) -> Result<(&SolarItem, &SolarItem)> {
match solar_items.iter().minmax_by_key(|item| item.altitude) {
MinMaxResult::MinMax(min, max) => Ok((min, max)),
MinMaxResult::OneElement(item) => Ok((item, item)),
MinMaxResult::NoElements => Err(anyhow!("no solar items to choose from")),
}
}
/// Get all items between 'first' and 'last', inclusive.
/// Items are cycled so we can wrap around the end and start from the beginning again.
fn get_items_between<'i>(
solar_items: &[&'i SolarItem],
first: &SolarItem,
last: &SolarItem,
) -> Vec<&'i SolarItem> {
let mut starting_from_first = solar_items
.iter()
.cycle()
.skip_while(|item| ***item != *first)
.peekable();
let mut items_between = starting_from_first
.peeking_take_while(|item| ***item != *last)
.cloned()
.collect_vec();
items_between.push(*starting_from_first.next().unwrap());
items_between
}
/// Check whether given sun azimuth corresponds with rising or setting sun position
/// on given hemisphere.
fn is_rising(azimuth: f64, hemishphere: &Hemisphere) -> bool {
match hemishphere {
Hemisphere::Northern => azimuth <= 180.0,
Hemisphere::Southern => azimuth > 180.0,
}
}
/// Get indices of images in appearance order.
pub fn get_image_index_order_solar(solar_items: &[SolarItem]) -> Vec<usize> {
sort_solar_items(solar_items)
.iter()
.map(|item| item.index)
.collect_vec()
}
/// Sort solar items by their occurrence order in a day.
pub fn sort_solar_items(solar_items: &[SolarItem]) -> Vec<&SolarItem> {
// We assume Northern Hemisphere and just sort by azimuth value.
// There is no localization metadata in images so I don't see other option.
solar_items
.iter()
.sorted_by_key(|item| item.azimuth)
.collect_vec()
}
#[cfg(test)]
mod tests {
use rstest::*;
use super::*;
#[fixture]
#[rustfmt::skip]
fn solar_items_1() -> Vec<SolarItem> {
// -50, -10, 10, 80, 30, -60, intentionally unordered
vec![
SolarItem { index: 2, azimuth: not_nan!(100.0), altitude: not_nan!(10.0) },
SolarItem { index: 0, azimuth: not_nan!(30.0), altitude: not_nan!(-50.0) },
SolarItem { index: 1, azimuth: not_nan!(50.0), altitude: not_nan!(-10.0) },
SolarItem { index: 3, azimuth: not_nan!(190.0), altitude: not_nan!(80.0) },
SolarItem { index: 5, azimuth: not_nan!(350.0), altitude: not_nan!(-60.0) },
SolarItem { index: 4, azimuth: not_nan!(250.0), altitude: not_nan!(30.0) },
]
}
#[fixture]
#[rustfmt::skip]
fn solar_items_2() -> Vec<SolarItem> {
vec![
SolarItem { index: 0, azimuth: not_nan!(100.0), altitude: not_nan!(-50.0) },
SolarItem { index: 1, azimuth: not_nan!(250.0), altitude: not_nan!(-44.0) },
]
}
#[fixture]
#[rustfmt::skip]
fn solar_items_3() -> Vec<SolarItem> {
vec![
SolarItem { index: 0, azimuth: not_nan!(100.0), altitude: not_nan!(50.0) },
]
}
// Normal, expected cases.
#[rstest]
#[case(Position { azimuth: 100.0, altitude: -70.0 }, 5)] // wrap around to last item
#[case(Position { azimuth: 100.0, altitude: -58.0 }, 5)] // wrap around to last item
#[case(Position { azimuth: 100.0, altitude: -54.0 }, 0)]
#[case(Position { azimuth: 100.0, altitude: -45.0 }, 0)]
#[case(Position { azimuth: 100.0, altitude: -31.0 }, 0)]
#[case(Position { azimuth: 100.0, altitude: -29.0 }, 1)]
#[case(Position { azimuth: 100.0, altitude: -10.0 }, 1)]
#[case(Position { azimuth: 100.0, altitude: 01.0 }, 2)]
#[case(Position { azimuth: 100.0, altitude: 70.0 }, 3)]
#[case(Position { azimuth: 170.0, altitude: 80.0 }, 3)] // peak value before noon
#[case(Position { azimuth: 200.0, altitude: 80.0 }, 3)] // peak value after noon
#[case(Position { azimuth: 250.0, altitude: 70.0 }, 3)]
#[case(Position { azimuth: 250.0, altitude: 40.0 }, 4)]
#[case(Position { azimuth: 250.0, altitude: 00.0 }, 4)]
#[case(Position { azimuth: 250.0, altitude: -50.0 }, 5)]
#[case(Position { azimuth: 250.0, altitude: -70.0 }, 5)]
fn test_current_image_index_from_sun_pos_1(
solar_items_1: Vec<SolarItem>,
#[case] sun_pos: Position,
#[case] expected_index: usize,
) {
let result =
current_image_index_from_sun_pos(&solar_items_1, &sun_pos, &Hemisphere::Northern);
assert_eq!(result.unwrap(), expected_index);
}
// Only two items, test wrapping around.
#[rstest]
#[case(Position { azimuth: 100.0, altitude: -60.0 }, 0)]
#[case(Position { azimuth: 100.0, altitude: -40.0 }, 1)]
#[case(Position { azimuth: 250.0, altitude: -40.0 }, 1)]
#[case(Position { azimuth: 250.0, altitude: -60.0 }, 0)] // wrap around to first item
fn test_current_image_index_from_sun_pos_2(
solar_items_2: Vec<SolarItem>,
#[case] sun_pos: Position,
#[case] expected_index: usize,
) {
let result =
current_image_index_from_sun_pos(&solar_items_2, &sun_pos, &Hemisphere::Northern);
assert_eq!(result.unwrap(), expected_index);
}
// Single item so just should return it for every given position.
#[rstest]
#[case(Position { azimuth: 100.0, altitude: -60.0 }, 0)]
#[case(Position { azimuth: 100.0, altitude: -40.0 }, 0)]
#[case(Position { azimuth: 250.0, altitude: -40.0 }, 0)]
#[case(Position { azimuth: 250.0, altitude: -60.0 }, 0)]
fn test_current_image_index_from_sun_pos_3(
solar_items_3: Vec<SolarItem>,
#[case] sun_pos: Position,
#[case] expected_index: usize,
) {
let result =
current_image_index_from_sun_pos(&solar_items_3, &sun_pos, &Hemisphere::Northern);
assert_eq!(result.unwrap(), expected_index);
}
#[rstest]
fn test_get_image_index_order_solar(solar_items_1: Vec<SolarItem>) {
let result = get_image_index_order_solar(&solar_items_1);
assert_eq!(result, vec![0, 1, 2, 3, 4, 5]);
}
}
|
use crate::io::{Load, Save};
use crate::layers::loss_layer::*;
use crate::layers::time_layers::*;
use crate::layers::Dropout;
use crate::math::Norm;
use crate::optimizer::*;
use crate::params::*;
use crate::types::*;
use crate::util::{randarr2d, remove_axis};
use ndarray::{Array, Array2, Axis, Ix2, Ix3, RemoveAxis};
use std::rc::Rc;
pub trait Rnnlm {
fn forward(&mut self, x: Array2<usize>, t: Array2<usize>) -> f32;
fn eval_forward(&mut self, x: Array2<usize>, t: Array2<usize>) -> f32 {
self.forward(x, t)
}
fn backward(&mut self);
fn reset_state(&mut self) {
unimplemented!();
}
}
pub trait RnnlmParams {
fn update_lr(&self, lr: f32) {
for param in self.params() {
param.update_lr(lr);
}
}
fn update_clip_lr(&self, clip: f32, lr: f32) {
for param in self.params() {
param.update_clip_lr(clip, lr);
}
}
fn update_clipgrads(&self, clip: f32, lr: f32) {
let norm = self
.params()
.iter()
.map(|x| x.grads_norm_squared())
.sum::<f32>()
.sqrt();
let rate = (clip / (norm + 1e-6)).min(1.0) * lr;
self.update_lr(rate);
}
fn reset_grads(&self) {
for param in self.params() {
param.reset_grads();
}
}
fn params(&self) -> Vec<&Update>;
}
impl RnnlmParams for SimpleRnnlmParams {
fn params(&self) -> Vec<&Update> {
vec![
&self.embed_w,
&self.rnn_wx,
&self.rnn_wh,
&self.rnn_b,
&self.affine_w,
&self.affine_b,
]
}
}
pub struct SimpleRnnlmParams {
pub embed_w: P1<Arr2d>,
pub rnn_wx: P1<Arr2d>,
pub rnn_wh: P1<Arr2d>,
pub rnn_b: P1<Arr1d>,
pub affine_w: P1<Arr2d>,
pub affine_b: P1<Arr1d>,
}
impl SimpleRnnlmParams {
pub fn new(vocab_size: usize, wordvec_size: usize, hidden_size: usize) -> Self {
let embed_w = P1::new(randarr2d(vocab_size, wordvec_size) / 100.0);
let mat_init = |m, n| randarr2d(m, n) / (m as f32).sqrt();
let rnn_wx = P1::new(mat_init(wordvec_size, hidden_size));
let rnn_wh = P1::new(mat_init(hidden_size, hidden_size));
let rnn_b = P1::new(Arr1d::zeros((hidden_size,)));
let affine_w = P1::new(mat_init(hidden_size, vocab_size));
let affine_b = P1::new(Arr1d::zeros((vocab_size,)));
Self {
embed_w,
rnn_wx,
rnn_wh,
rnn_b,
affine_w,
affine_b,
}
}
pub fn new_for_LSTM(vocab_size: usize, wordvec_size: usize, hidden_size: usize) -> Self {
let embed_w = P1::new(randarr2d(vocab_size, wordvec_size) / 100.0);
let mat_init = |m, n| randarr2d(m, n) / (m as f32).sqrt();
let rnn_wx = P1::new(mat_init(wordvec_size, 4 * hidden_size));
let rnn_wh = P1::new(mat_init(hidden_size, 4 * hidden_size));
let rnn_b = P1::new(Arr1d::zeros((4 * hidden_size,)));
let affine_w = P1::new(mat_init(hidden_size, vocab_size));
let affine_b = P1::new(Arr1d::zeros((vocab_size,)));
Self {
embed_w,
rnn_wx,
rnn_wh,
rnn_b,
affine_w,
affine_b,
}
}
pub fn new_for_Decoder(vocab_size: usize, wordvec_size: usize, hidden_size: usize) -> Self {
Self::new_for_LSTM(vocab_size, wordvec_size, hidden_size)
}
pub fn new_for_PeekyDecoder(
vocab_size: usize,
wordvec_size: usize,
hidden_size: usize,
) -> Self {
let embed_w = P1::new(randarr2d(vocab_size, wordvec_size) / 100.0);
// ランダムベクトル初期化用のクロージャ
let mat_init = |m, n| randarr2d(m, n) / (m as f32).sqrt();
// rnnへの入力はembedからの(b, wordvec)とEncoderからの(b, hidden)
let rnn_wx = P1::new(mat_init(wordvec_size + hidden_size, 4 * hidden_size));
let rnn_wh = P1::new(mat_init(hidden_size, 4 * hidden_size));
let rnn_b = P1::new(Arr1d::zeros((4 * hidden_size,)));
// rnnからの(b, hidden)とEncoderからの(b, hidden)
let affine_w = P1::new(mat_init(hidden_size * 2, vocab_size));
let affine_b = P1::new(Arr1d::zeros((vocab_size,)));
Self {
embed_w,
rnn_wx,
rnn_wh,
rnn_b,
affine_w,
affine_b,
}
}
pub fn new_for_AttentionDecoder(
vocab_size: usize,
wordvec_size: usize,
hidden_size: usize,
) -> Self {
let embed_w = P1::new(randarr2d(vocab_size, wordvec_size) / 100.0);
// ランダムベクトル初期化用のクロージャ
let mat_init = |m, n| randarr2d(m, n) / (m as f32).sqrt();
// rnnへの入力はembedの(b, wordvec)
let rnn_wx = P1::new(mat_init(wordvec_size, 4 * hidden_size));
let rnn_wh = P1::new(mat_init(hidden_size, 4 * hidden_size));
let rnn_b = P1::new(Arr1d::zeros((4 * hidden_size,)));
// attentionからの(b, hidden)と、attention前のrnnの(b, hidden)
let affine_w = P1::new(mat_init(hidden_size * 2, vocab_size));
let affine_b = P1::new(Arr1d::zeros((vocab_size,)));
Self {
embed_w,
rnn_wx,
rnn_wh,
rnn_b,
affine_w,
affine_b,
}
}
}
pub struct SimpleRnnlm<'a> {
vocab_size: usize,
wordvec_size: usize,
hidden_size: usize,
embed: TimeEmbedding<'a>,
rnn: TimeRNN<'a>,
affine: TimeAffine<'a>,
loss_layer: SoftMaxWithLoss,
}
impl<'a> Rnnlm for SimpleRnnlm<'a> {
fn forward(&mut self, x: Array2<usize>, t: Array2<usize>) -> f32 {
let x = self.embed.forward(x);
let x = remove_axis(self.rnn.forward(x));
let x = self.affine.forward(x);
let batch_time_size = t.len();
let t = t.into_shape((batch_time_size,)).unwrap();
self.loss_layer.forward2(x, t)
}
fn backward(&mut self) {
let dout = self.loss_layer.backward();
let dout = self.affine.backward(dout);
let dout = self.rnn.backward(dout);
self.embed.backward(dout);
}
}
impl<'a> SimpleRnnlm<'a> {
pub fn new(
vocab_size: usize,
wordvec_size: usize,
hidden_size: usize,
time_size: usize,
params: &'a SimpleRnnlmParams,
) -> Self {
let embed = TimeEmbedding::new(¶ms.embed_w);
let rnn = TimeRNN::new(¶ms.rnn_wx, ¶ms.rnn_wh, ¶ms.rnn_b, time_size);
let affine = TimeAffine::new(¶ms.affine_w, ¶ms.affine_b);
Self {
vocab_size,
wordvec_size,
hidden_size,
embed,
rnn,
affine,
loss_layer: Default::default(),
}
}
}
pub struct SimpleRnnlmLSTM<'a> {
vocab_size: usize,
wordvec_size: usize,
hidden_size: usize,
embed: TimeEmbedding<'a>,
rnn: TimeLSTM<'a>,
affine: TimeAffine<'a>,
loss_layer: SoftMaxWithLoss,
}
impl<'a> Rnnlm for SimpleRnnlmLSTM<'a> {
fn forward(&mut self, x: Array2<usize>, t: Array2<usize>) -> f32 {
let x = self.embed.forward(x);
let x = remove_axis(self.rnn.forward(x));
let x = self.affine.forward(x);
let batch_time_size = t.len();
let t = t.into_shape((batch_time_size,)).unwrap();
self.loss_layer.forward2(x, t)
}
fn backward(&mut self) {
let dout = self.loss_layer.backward();
let dout = self.affine.backward(dout);
let dout = self.rnn.conv_2d_3d(dout);
let dout = self.rnn.backward(dout);
self.embed.backward(dout);
}
fn reset_state(&mut self) {
self.rnn.reset_state();
}
}
impl<'a> SimpleRnnlmLSTM<'a> {
pub fn new(
vocab_size: usize,
wordvec_size: usize,
hidden_size: usize,
time_size: usize,
params: &'a SimpleRnnlmParams,
) -> Self {
let embed = TimeEmbedding::new(¶ms.embed_w);
let rnn = TimeLSTM::new(
¶ms.rnn_wx,
¶ms.rnn_wh,
¶ms.rnn_b,
time_size,
true,
);
let affine = TimeAffine::new(¶ms.affine_w, ¶ms.affine_b);
Self {
vocab_size,
wordvec_size,
hidden_size,
embed,
rnn,
affine,
loss_layer: Default::default(),
}
}
}
pub struct RnnlmLSTM<'a> {
vocab_size: usize,
time_size: usize,
embed: TimeEmbedding<'a>,
dropouts: [Dropout<Ix3>; 3],
rnn: [TimeLSTM<'a>; 2],
affine: TimeAffine<'a>,
loss_layer: SoftMaxWithLoss,
}
impl<'a> RnnlmParams for RnnlmLSTMParams {
fn params(&self) -> Vec<&Update> {
vec![
self.affine_w.t(),
&self.lstm_wx1,
&self.lstm_wh1,
&self.lstm_b1,
&self.lstm_wx2,
&self.lstm_wh2,
&self.lstm_b2,
&self.affine_b,
]
}
}
pub struct RnnlmLSTMParams {
// embed_w: P2<Arr2d>,
lstm_wx1: P1<Arr2d>,
lstm_wh1: P1<Arr2d>,
lstm_b1: P1<Arr1d>,
lstm_wx2: P1<Arr2d>,
lstm_wh2: P1<Arr2d>,
lstm_b2: P1<Arr1d>,
affine_w: P2<Arr2d>,
affine_b: P1<Arr1d>,
}
impl<'a> RnnlmLSTMParams {
/// simplparamsでwordvec_sizeというのがあったが、これはhidden_sizeと共通にさせる
/// これにより、embed_wとaffine_wを共有させる
pub fn new(vocab_size: usize, hidden_size: usize) -> Self {
let h = hidden_size;
let embed_w = P1::new(randarr2d(vocab_size, h) / 100.0);
// let mat_init = |m, n| randarr2d(m, n) / (m as f32).sqrt();
let mat_h4h = || randarr2d(h, 4 * h) / (h as f32).sqrt();
let lstm_wx1 = P1::new(mat_h4h());
let lstm_wh1 = P1::new(mat_h4h());
let lstm_b1 = P1::new(Arr1d::zeros((4 * h,)));
let lstm_wx2 = P1::new(mat_h4h());
let lstm_wh2 = P1::new(mat_h4h());
let lstm_b2 = P1::new(Arr1d::zeros((4 * h,)));
let affine_w = embed_w.t();
let affine_b = P1::new(Arr1d::zeros((vocab_size,)));
Self {
// embed_w,
lstm_wx1,
lstm_wh1,
lstm_b1,
lstm_wx2,
lstm_wh2,
lstm_b2,
affine_w, // (hidden_size, vocab_size)
affine_b,
}
}
pub fn summary(&self) {
putsd!(self.affine_w.p().dim());
putsd!(self.affine_w.t().p().dim());
putsd!(self.lstm_b1.p().dim());
putsd!(self.lstm_wx1.p().dim());
putsd!(self.lstm_b2.p().dim());
putsd!(self.lstm_wx2.p().dim());
}
}
impl<'a> RnnlmLSTM<'a> {
pub fn new(time_size: usize, dropout_ratio: f32, params: &'a RnnlmLSTMParams) -> Self {
let embed = TimeEmbedding::new(params.affine_w.t());
let vocab_size = params.affine_w.p().dim().1;
let lstm1 = TimeLSTM::new(
¶ms.lstm_wx1,
¶ms.lstm_wh1,
¶ms.lstm_b1,
time_size,
true,
);
let lstm2 = TimeLSTM::new(
¶ms.lstm_wx2,
¶ms.lstm_wh2,
¶ms.lstm_b2,
time_size,
true,
);
let affine = TimeAffine::new(¶ms.affine_w, ¶ms.affine_b);
Self {
vocab_size,
time_size,
embed,
dropouts: [
Dropout::new(dropout_ratio),
Dropout::new(dropout_ratio),
Dropout::new(dropout_ratio),
],
rnn: [lstm1, lstm2],
affine,
loss_layer: Default::default(),
}
}
pub fn predict(&mut self, x: Array2<usize>) -> Arr2d {
let mut x = self.embed.forward(x);
for i in 0..2 {
// dropoutなし
x = self.rnn[i].forward(x);
}
let x = remove_axis(x); // x: (timebatch, hidden)
let x = self.affine.forward(x); // x: (timebatch, vocab) <- さっきのxと各全ての単語との内積を取る
self.loss_layer.predict(x) // (timebatch, vocab) <- 各単語の確率
}
}
impl<'a> Rnnlm for RnnlmLSTM<'a> {
fn forward(&mut self, x: Array2<usize>, t: Array2<usize>) -> f32 {
let mut x = self.embed.forward(x);
for i in 0..2 {
x = self.dropouts[i].train_forward(x);
x = self.rnn[i].forward(x);
}
let x = remove_axis(self.dropouts[2].train_forward(x));
let x = self.affine.forward(x);
let batch_time_size = t.len();
let t = t.into_shape((batch_time_size,)).unwrap();
self.loss_layer.forward2(x, t)
}
fn backward(&mut self) {
let dout2d = self.loss_layer.backward();
let dout2d = self.affine.backward(dout2d);
let mut dout3d = self.rnn[1].conv_2d_3d(dout2d);
dout3d = self.dropouts[2].backward(dout3d);
for i in (0..2).rev() {
dout3d = self.rnn[i].backward(dout3d);
dout3d = self.dropouts[i].backward(dout3d);
}
self.embed.backward(dout3d);
}
fn eval_forward(&mut self, x: Array2<usize>, t: Array2<usize>) -> f32 {
let mut x = self.embed.forward(x);
for i in 0..2 {
// x = self.dropouts[i].train_forward(x);
x = self.rnn[i].forward(x);
}
let x = remove_axis(x);
let x = self.affine.forward(x);
let batch_time_size = t.len();
let t = t.into_shape((batch_time_size,)).unwrap();
self.loss_layer.forward2(x, t)
}
fn reset_state(&mut self) {
for _r in self.rnn.iter_mut() {
_r.reset_state();
}
}
}
pub trait SavableParams {
fn param_names() -> (Vec<&'static str>, Vec<&'static str>) {
(
vec!["embed_w", "lstm_wx1", "lstm_wx2", "lstm_wx2", "lstm_wh2"],
vec!["lstm_b1", "lstm_b2", "affine_b"],
)
}
fn params_to_save(&self) -> Vec<(&Save, &str)>;
fn load_new(params1: Vec<P1<Arr1d>>, params2: Vec<P1<Arr2d>>) -> Self;
}
impl SavableParams for RnnlmLSTMParams {
fn param_names() -> (Vec<&'static str>, Vec<&'static str>) {
(
vec!["rnn_b", "lstm2_b", "affine_b"],
vec!["embed_w", "lstm1_wx", "lstm1_wh", "lstm2_wx", "lstm2_wh"],
)
}
fn params_to_save(&self) -> Vec<(&Save, &str)> {
vec![
(self.affine_w.t(), "embed_w"),
(&self.lstm_wx1, "lstm1_wx"),
(&self.lstm_wh1, "lstm1_wh"),
(&self.lstm_b1, "lstm1_b"),
(&self.lstm_wx2, "lstm2_wx"),
(&self.lstm_wh2, "lstm2_wh"),
(&self.lstm_b2, "lstm2_b"),
(&self.affine_b, "affine_b"),
]
}
fn load_new(params1: Vec<P1<Arr1d>>, params2: Vec<P1<Arr2d>>) -> Self {
// next.unwrap多すぎ
let mut params1 = params1.into_iter();
let mut params2 = params2.into_iter();
let embed_w = params2.next().unwrap();
Self {
// embed_w,
lstm_wx1: params2.next().unwrap(),
lstm_wh1: params2.next().unwrap(),
lstm_b1: params1.next().unwrap(),
lstm_wx2: params2.next().unwrap(),
lstm_wh2: params2.next().unwrap(),
lstm_b2: params1.next().unwrap(),
affine_w: embed_w.t(),
affine_b: params1.next().unwrap(),
}
}
}
impl SavableParams for SimpleRnnlmParams {
fn param_names() -> (Vec<&'static str>, Vec<&'static str>) {
(
vec!["rnn_b", "affine_b"],
vec!["embed_w", "rnn_wx", "rnn_wh", "affine_w"],
)
}
fn params_to_save(&self) -> Vec<(&Save, &str)> {
vec![
(&self.embed_w, "embed_w"),
(&self.rnn_wx, "rnn_wx"),
(&self.rnn_wh, "rnn_wh"),
(&self.rnn_b, "rnn_b"),
(&self.affine_w, "affine_w"),
(&self.affine_b, "affine_b"),
]
}
fn load_new(params1: Vec<P1<Arr1d>>, params2: Vec<P1<Arr2d>>) -> Self {
// next.unwrap多すぎ
let mut params1 = params1.into_iter();
let mut params2 = params2.into_iter();
Self {
embed_w: params2.next().unwrap(),
rnn_wx: params2.next().unwrap(),
rnn_wh: params2.next().unwrap(),
rnn_b: params1.next().unwrap(),
affine_w: params2.next().unwrap(),
affine_b: params1.next().unwrap(),
}
}
}
pub trait RnnlmGen {
fn generate(
&mut self,
start_ids: usize,
skip_ids: Vec<usize>,
sample_size: usize,
) -> Vec<usize>;
}
use std::collections::HashMap;
impl RnnlmGen for RnnlmLSTM<'_> {
fn generate(
&mut self,
start_id: usize,
skip_ids: Vec<usize>,
sample_size: usize,
) -> Vec<usize> {
assert_eq!(self.time_size, 1, "for rnnlmgen, self.time_size must be 1!");
let mut word_ids = vec![start_id]; // ここに次の単語を追加していく
let mut rng = thread_rng(); // random number generator
for _ in 0..sample_size {
let mut prob = self // 前回のサンプルを元に次の単語を予測
.predict(Array2::from_elem((1, 1), word_ids[word_ids.len()-1]))
.into_shape((self.vocab_size,))
.unwrap();
for i in &skip_ids {
// まずい単語の確率をゼロにする
prob[[*i]] = 0.0;
}
let dist = WeightedIndex::new(&prob).unwrap();
let sample = dist.sample(&mut rng);
// id_to_word.map(|dic| print!("{} ", dic[&sample]));
word_ids.push(sample);
}
word_ids
}
}
use rand::distributions::{Distribution, WeightedIndex};
use rand::prelude::thread_rng;
#[test]
fn rand_test() {
let choices = ['a', 'b', 'c'];
let weights = [2, 1, 0];
let dist = WeightedIndex::new(&weights).unwrap();
let mut rng = thread_rng();
for _ in 0..100 {
// 50% chance to print 'a', 25% chance to print 'b', 25% chance to print 'c'
println!("{}", choices[dist.sample(&mut rng)]);
}
}
|
// **************************************
// 多线程
// **************************************
use std::process;
use std::thread;
use std::time::Duration;
fn main() {
simple_spawn();
join_spawn();
}
// thread::spawn创建一个子线程闭包.
fn simple_spawn() {
println!("pid from main thread {}", process::id());
thread::spawn(|| {
println!("pid from child thread{}", process::id());
for i in 1..10 {
println!("hi number {} from the spawned thread!", i);
thread::sleep(Duration::from_millis(1));
}
});
for i in 1..5 {
println!("hi number {} from the main thread!", i);
thread::sleep(Duration::from_millis(1));
}
// 主线程和子线程是并发的, 可能在主线程退出的时候子线程还没有结束;
// 这里简单用睡眠延时, 正确方式是用子线程join()确保运行完成.
thread::sleep(Duration::from_millis(50));
}
fn join_spawn() {
let handle = thread::spawn(|| {
thread::sleep(Duration::from_millis(50));
println!("hello spawn thread");
});
handle.join().unwrap();
println!("exit main thread");
}
|
mod compress;
mod decompress;
mod file_io;
use std::time::Instant;
fn main() {
const FILE_NAME: &str = "testFile.hps";
const BLOCK_SIZE: usize = 5;
{
let mut to_compress: Vec<u8> = file_io::read_from_file("EnglishShortened.txt");
// let mut to_compress: Vec<u8> = String::from("Hello world!Hello world!Hello world!Hello world!Hello world!Hello worl d!").into_bytes();
let compressed: Vec<u32> = compress::compress(BLOCK_SIZE, &mut to_compress);
file_io::write_to_file_serialized(&compressed, FILE_NAME);
}
let mut from_file = file_io::read_from_file_deserialized(FILE_NAME);
let start = Instant::now();
let decompressed = decompress::decompress(&mut from_file, BLOCK_SIZE);
println!("Seconds to decompress: {}\n", start.elapsed().as_millis() as f64/1000 as f64);
// file_io::write_vec8_to_file(&decompressed, "testFile.mp4");
// println!("{}", String::from_utf8_lossy(&decompressed));
}
|
//! This module contains resources specific to a game.
//! They should be set up/added to the world when creating a new game, or loading a savegame,
//! and be removed when the player exits to the MainMenu or ends the application.
mod game_session;
mod savegame_path;
pub mod game_world;
//pub mod planet;
pub use self::{game_session::GameSessionData, savegame_path::SavegamePaths};
|
#[derive(Debug)]
pub enum Transport {
Udp,
Tcp,
Invalid,
}
impl From<&str> for Transport {
fn from(transport: &str) -> Self {
match transport {
"UDP" => Self::Udp,
"TCP" => Self::Tcp,
_ => Self::Invalid,
}
}
}
|
// Copyright 2018 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//! Stream-based Fuchsia VFS directory watcher
#![deny(warnings)]
#![deny(missing_docs)]
#[macro_use]
extern crate fdio;
extern crate fuchsia_async as async;
#[macro_use]
extern crate fuchsia_zircon as zx;
#[macro_use]
extern crate futures;
use fdio::fdio_sys;
use futures::{Async, Stream, task};
use std::ffi::OsStr;
use std::fs::File;
use std::io;
use std::os::raw;
use std::os::unix::ffi::OsStrExt;
use std::os::unix::io::AsRawFd;
use std::path::PathBuf;
use zx::HandleBased;
/// Describes the type of event that occurred in the direcotry being watched.
#[repr(C)]
#[derive(Copy, Clone, Eq, PartialEq)]
pub struct WatchEvent(u8);
assoc_values!(WatchEvent, [
/// A file was added.
ADD_FILE = VFS_WATCH_EVT_ADDED;
/// A file was removed.
REMOVE_FILE = VFS_WATCH_EVT_REMOVED;
/// A file existed at the time the Watcher was created.
EXISTING = VFS_WATCH_EVT_EXISTING;
/// All existing files have been enumerated.
IDLE = VFS_WATCH_EVT_IDLE;
]);
/// A message containing a `WatchEvent` and the filename (relative to the directory being watched)
/// that triggered the event.
#[derive(Debug)]
pub struct WatchMessage {
/// The event that occurred.
pub event: WatchEvent,
/// The filename that triggered the message.
pub filename: PathBuf,
}
/// Provides a Stream of WatchMessages corresponding to filesystem events for a given directory.
#[derive(Debug)]
#[must_use = "futures/streams must be polled"]
pub struct Watcher {
ch: async::Channel,
// If idx >= buf.bytes().len(), you must call reset_buf() before get_next_msg().
buf: zx::MessageBuf,
idx: usize,
}
impl Watcher {
/// Creates a new `Watcher` for the directory given by `dir`.
pub fn new(dir: &File) -> Result<Watcher, zx::Status> {
let (h0, h1) = zx::Channel::create()?;
let vwd = vfs_watch_dir_t {
h: h1.into_raw(),
mask: VFS_WATCH_MASK_ALL,
options: 0,
};
zx::Status::ok(
// This is safe because no memory ownership is passed via fdio::ioctl.
unsafe { fdio::ioctl_raw(dir.as_raw_fd(),
IOCTL_VFS_WATCH_DIR,
&vwd as *const _ as *const raw::c_void,
::std::mem::size_of::<vfs_watch_dir_t>(),
std::ptr::null_mut(),
0) as i32 }
)?;
let mut buf = zx::MessageBuf::new();
buf.ensure_capacity_bytes(VFS_WATCH_MSG_MAX);
Ok(Watcher{ ch: async::Channel::from_channel(h0)?, buf: buf, idx: 0})
}
fn reset_buf(&mut self) {
self.idx = 0;
self.buf.clear();
}
fn get_next_msg(&mut self) -> WatchMessage {
assert!(self.idx < self.buf.bytes().len());
let next_msg = VfsWatchMsg::from_raw(&self.buf.bytes()[self.idx..])
.expect("Invalid buffer received by Watcher!");
self.idx += next_msg.len();
let mut pathbuf = PathBuf::new();
pathbuf.push(OsStr::from_bytes(next_msg.name()));
let event = next_msg.event();
WatchMessage { event: event, filename: pathbuf }
}
}
impl Stream for Watcher {
type Item = WatchMessage;
type Error = io::Error;
fn poll_next(&mut self, cx: &mut task::Context) -> futures::Poll<Option<Self::Item>, Self::Error> {
if self.idx >= self.buf.bytes().len() {
self.reset_buf();
}
if self.idx == 0 {
try_ready!(self.ch.recv_from(&mut self.buf, cx));
}
Ok(Async::Ready(Some(self.get_next_msg())))
}
}
#[repr(C)]
#[derive(Debug)]
struct vfs_watch_dir_t {
h: zx::sys::zx_handle_t,
mask: u32,
options: u32,
}
#[repr(C)]
#[derive(Debug)]
struct vfs_watch_msg_t {
event: u8,
len: u8,
name: fdio_sys::__IncompleteArrayField<u8>,
}
#[derive(Debug)]
struct VfsWatchMsg<'a> {
inner: &'a vfs_watch_msg_t,
}
impl<'a> VfsWatchMsg<'a> {
fn from_raw(buf: &'a [u8]) -> Option<VfsWatchMsg<'a>> {
if buf.len() < ::std::mem::size_of::<vfs_watch_msg_t>() {
return None;
}
// This is safe as long as the buffer is at least as large as a vfs_watch_msg_t, which we
// just verified. Further, we verify that the buffer has enough bytes to hold the
// "incomplete array field" member.
let m = unsafe { VfsWatchMsg{ inner: &*(buf.as_ptr() as *const vfs_watch_msg_t) } };
if buf.len() < ::std::mem::size_of::<vfs_watch_msg_t>() + m.namelen() {
return None;
}
Some(m)
}
fn len(&self) -> usize {
::std::mem::size_of::<vfs_watch_msg_t>() + self.namelen()
}
fn event(&self) -> WatchEvent {
WatchEvent(self.inner.event)
}
fn namelen(&self) -> usize {
self.inner.len as usize
}
fn name(&self) -> &'a [u8] {
// This is safe because we verified during construction that the inner name field has at
// least namelen() bytes in it.
unsafe { self.inner.name.as_slice(self.namelen()) }
}
}
const VFS_WATCH_EVT_ADDED: u8 = 1;
const VFS_WATCH_EVT_REMOVED: u8 = 2;
const VFS_WATCH_EVT_EXISTING: u8 = 3;
const VFS_WATCH_EVT_IDLE: u8 = 4;
const VFS_WATCH_MASK_ALL: u32 = 0x1fu32;
const VFS_WATCH_MSG_MAX: usize = 8192;
const IOCTL_VFS_WATCH_DIR: raw::c_int = make_ioctl!(
fdio_sys::IOCTL_KIND_SET_HANDLE,
fdio_sys::IOCTL_FAMILY_VFS,
8
);
#[cfg(test)]
mod tests {
extern crate tempdir;
use super::*;
use async::TimeoutExt;
use futures::prelude::*;
use self::tempdir::TempDir;
use std::fmt::Debug;
use std::path::Path;
use zx::prelude::*;
fn one_step<S: Stream>(exec: &mut async::Executor, s: S) -> (S::Item, S)
where S::Error: Debug
{
let f = s.next();
let f = f.on_timeout(
500.millis().after_now(),
|| panic!("timeout waiting for watcher")
).unwrap();
let (next, stream) =
exec.run_singlethreaded(f)
.unwrap_or_else(|(e, _s)| panic!("Error waiting for watcher: {:?}", e));
(next.expect("the stream yielded no next item"), stream)
}
#[test]
fn test_existing() {
let tmp_dir = TempDir::new("vfs-watcher-test-existing").unwrap();
let _ = File::create(tmp_dir.path().join("file1")).unwrap();
let exec = &mut async::Executor::new().unwrap();
let dir = File::open(tmp_dir.path()).unwrap();
let w = Watcher::new(&dir).unwrap();
// TODO(tkilbourn): this assumes "." always comes before "file1". If this test ever starts
// flaking, handle the case of unordered EXISTING files.
let (msg, rest) = one_step(exec, w);
assert_eq!(WatchEvent::EXISTING, msg.event);
assert_eq!(Path::new("."), msg.filename);
let (msg, rest) = one_step(exec, rest);
assert_eq!(WatchEvent::EXISTING, msg.event);
assert_eq!(Path::new("file1"), msg.filename);
let (msg, _) = one_step(exec, rest);
assert_eq!(WatchEvent::IDLE, msg.event);
}
#[test]
fn test_add() {
let tmp_dir = TempDir::new("vfs-watcher-test-add").unwrap();
let exec = &mut async::Executor::new().unwrap();
let dir = File::open(tmp_dir.path()).unwrap();
let mut w = Watcher::new(&dir).unwrap();
loop {
let (msg, rest) = one_step(exec, w);
w = rest;
match msg.event {
WatchEvent::EXISTING => continue,
WatchEvent::IDLE => break,
_ => panic!("Unexpected watch event!"),
}
}
let _ = File::create(tmp_dir.path().join("file1")).unwrap();
let (msg, _) = one_step(exec, w);
assert_eq!(WatchEvent::ADD_FILE, msg.event);
assert_eq!(Path::new("file1"), msg.filename);
}
#[test]
fn test_remove() {
let tmp_dir = TempDir::new("vfs-watcher-test-remove").unwrap();
let filename = "file1";
let filepath = tmp_dir.path().join(filename);
let _ = File::create(&filepath).unwrap();
let exec = &mut async::Executor::new().unwrap();
let dir = File::open(tmp_dir.path()).unwrap();
let mut w = Watcher::new(&dir).unwrap();
loop {
let (msg, rest) = one_step(exec, w);
w = rest;
match msg.event {
WatchEvent::EXISTING => continue,
WatchEvent::IDLE => break,
_ => panic!("Unexpected watch event!"),
}
}
::std::fs::remove_file(&filepath).unwrap();
let (msg, _) = one_step(exec, w);
assert_eq!(WatchEvent::REMOVE_FILE, msg.event);
assert_eq!(Path::new(filename), msg.filename);
}
#[test]
#[should_panic]
fn test_timeout() {
let tmp_dir = TempDir::new("vfs-watcher-test-timeout").unwrap();
let exec = &mut async::Executor::new().unwrap();
let dir = File::open(tmp_dir.path()).unwrap();
let mut w = Watcher::new(&dir).unwrap();
loop {
let (msg, rest) = one_step(exec, w);
w = rest;
match msg.event {
WatchEvent::EXISTING => continue,
WatchEvent::IDLE => break,
_ => panic!("Unexpected watch event!"),
}
}
// Ensure that our test timeouts actually work by waiting for another event that will never
// arrive.
let _ = one_step(exec, w);
}
}
|
//! Streaming SIMD Extensions 2 (SSE2)
pub use arch::_mm_stream_si64;
use mem::transmute;
use simd::*;
/// Convert the lower double-precision (64-bit) floating-point element in a to
/// a 64-bit integer.
#[inline]
#[target_feature(enable = "sse2")]
pub unsafe fn _mm_cvtsd_si64(a: f64x2) -> i64 {
::arch::_mm_cvtsd_si64(transmute(a))
}
/// Alias for `_mm_cvtsd_si64`
#[inline]
#[target_feature(enable = "sse2")]
pub unsafe fn _mm_cvtsd_si64x(a: f64x2) -> i64 {
::arch::_mm_cvtsd_si64x(transmute(a))
}
/// Convert the lower double-precision (64-bit) floating-point element in `a`
/// to a 64-bit integer with truncation.
#[inline]
#[target_feature(enable = "sse2")]
pub unsafe fn _mm_cvttsd_si64(a: f64x2) -> i64 {
::arch::_mm_cvttsd_si64(transmute(a))
}
/// Alias for `_mm_cvttsd_si64`
#[inline]
#[target_feature(enable = "sse2")]
pub unsafe fn _mm_cvttsd_si64x(a: f64x2) -> i64 {
::arch::_mm_cvttsd_si64x(transmute(a))
}
/// Return a vector whose lowest element is `a` and all higher elements are `0`.
#[inline]
#[target_feature(enable = "sse2")]
pub unsafe fn _mm_cvtsi64_si128(a: i64) -> i64x2 {
transmute(::arch::_mm_cvtsi64_si128(a))
}
/// Return a vector whose lowest element is `a` and all higher elements are
/// `0`.
#[inline]
#[target_feature(enable = "sse2")]
pub unsafe fn _mm_cvtsi64x_si128(a: i64) -> i64x2 {
transmute(::arch::_mm_cvtsi64x_si128(a))
}
/// Return the lowest element of `a`.
#[inline]
#[target_feature(enable = "sse2")]
pub unsafe fn _mm_cvtsi128_si64(a: i64x2) -> i64 {
::arch::_mm_cvtsi128_si64(transmute(a))
}
/// Return the lowest element of `a`.
#[inline]
#[target_feature(enable = "sse2")]
pub unsafe fn _mm_cvtsi128_si64x(a: i64x2) -> i64 {
::arch::_mm_cvtsi128_si64(transmute(a))
}
/// Return `a` with its lower element replaced by `b` after converting it to
/// an `f64`.
#[inline]
#[target_feature(enable = "sse2")]
pub unsafe fn _mm_cvtsi64_sd(a: f64x2, b: i64) -> f64x2 {
transmute(::arch::_mm_cvtsi64_sd(transmute(a), b))
}
/// Return `a` with its lower element replaced by `b` after converting it to
/// an `f64`.
#[inline]
#[target_feature(enable = "sse2")]
pub unsafe fn _mm_cvtsi64x_sd(a: f64x2, b: i64) -> f64x2 {
transmute(::arch::_mm_cvtsi64x_sd(transmute(a), b))
}
|
pub fn gcd(a:u64, b:u64) -> u64 {
if b < a {
return gcd(b, a);
}
let remainder = b % a;
if remainder == 0 {
return a;
}
return gcd(remainder, a);
}
pub fn lcm(a:u64, b:u64) -> u64 {
return (a * b) / gcd(a, b);
}
|
use std::{fs, io};
fn check_passport(passport: &str) -> bool {
let passport = passport
.split(|c| c == ' ' || c == '\n')
.map(|s| s.split(':').next().unwrap())
.collect::<Vec<_>>();
for required in &["byr", "iyr", "eyr", "hgt", "hcl", "ecl", "pid"] {
if !passport.contains(&required) {
return false;
}
}
true
}
fn check_passports(passports: &str) -> usize {
passports
.split("\n\n")
.filter(|p| check_passport(p))
.count()
}
fn main() -> io::Result<()> {
let input = fs::read_to_string("input.txt")?;
println!("{:?}", check_passports(&input));
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
use test_case::test_case;
#[test_case("ecl:gry pid:860033327 eyr:2020 hcl:#fffffd
byr:1937 iyr:2017 cid:147 hgt:183cm
iyr:2013 ecl:amb cid:350 eyr:2023 pid:028048884
hcl:#cfa07d byr:1929
hcl:#ae17e1 iyr:2013
eyr:2024
ecl:brn pid:760753108 byr:1931
hgt:179cm
hcl:#cfa07d eyr:2025 pid:166559648
iyr:2011 ecl:brn hgt:59in" => 2)]
#[test_case("ecl:gry pid:860033327 eyr:2020 hcl:#fffffd
byr:1937 iyr:2017 cid:147 hgt:183cm" => 1)]
#[test_case("iyr:2013 ecl:amb cid:350 eyr:2023 pid:028048884
hcl:#cfa07d byr:1929" => 0)]
#[test_case("hcl:#ae17e1 iyr:2013
eyr:2024
ecl:brn pid:760753108 byr:1931
hgt:179cm" => 1)]
#[test_case("hcl:#cfa07d eyr:2025 pid:166559648
iyr:2011 ecl:brn hgt:59in" => 0)]
fn first(input: &str) -> usize {
check_passports(input)
}
}
|
mod common;
use std::sync::mpsc::Receiver;
use std::sync::mpsc::Sender;
#[derive(PartialEq, Eq, Copy, Clone)]
enum Op {
Add,
Mul,
}
#[derive(PartialEq, Eq, Copy, Clone)]
enum Cnd {
True,
False,
}
#[derive(PartialEq, Eq, Copy, Clone)]
enum Cmp {
LessThan,
Equal,
}
#[derive(PartialEq, Eq, Copy, Clone)]
enum OpCode {
Arith(Op),
Input,
Output,
JumpIf(Cnd),
Compare(Cmp),
Halt,
}
struct ParamOpCode {
opcode: OpCode,
params: [u8; 3],
}
fn parse_opcode(code: i32) -> ParamOpCode {
let param1 = ((code / 100) % 10) as u8;
let param2 = ((code / 1000) % 10) as u8;
let param3 = ((code / 10000) % 10) as u8;
let opcode = match code % 100 {
1 => OpCode::Arith(Op::Add),
2 => OpCode::Arith(Op::Mul),
3 => OpCode::Input,
4 => OpCode::Output,
5 => OpCode::JumpIf(Cnd::True),
6 => OpCode::JumpIf(Cnd::False),
7 => OpCode::Compare(Cmp::LessThan),
8 => OpCode::Compare(Cmp::Equal),
99 => OpCode::Halt,
_ => panic!("unknown opcode"),
};
ParamOpCode {
opcode,
params: [param1, param2, param3],
}
}
struct ProgramState {
mem: Vec<i32>,
pointer: usize,
}
impl ProgramState {
fn fetch_opcode(&self) -> i32 {
self.mem[self.pointer]
}
fn increase_pointer(&mut self, position: usize) {
self.pointer += position;
}
fn set_pointer(&mut self, position: usize) {
self.pointer = position;
}
fn fetch_value(&self, position: usize, p: &ParamOpCode) -> i32 {
if p.params[position] == 0 {
self.mem[self.mem[self.pointer + position + 1] as usize]
} else {
self.mem[self.pointer + position + 1]
}
}
fn write_value(&mut self, position: usize, value: i32) {
let target = self.mem[self.pointer + position + 1] as usize;
self.mem[target] = value;
}
}
struct ProgramIO {
is: Sender<i32>,
or: Receiver<Option<i32>>,
}
struct OutputIterator<'a> {
io: &'a ProgramIO,
}
impl<'a> Iterator for OutputIterator<'a> {
type Item = i32;
fn next(&mut self) -> Option<Self::Item> {
self.io.next_output()
}
}
impl ProgramIO {
fn new(is: Sender<i32>, or: Receiver<Option<i32>>) -> ProgramIO {
ProgramIO { is, or }
}
fn send_input(&self, i: i32) {
self.is.send(i).expect("could not send input");
}
fn next_output(&self) -> Option<i32> {
self.or.recv().expect("could not send input")
}
fn output_iter(&self) -> OutputIterator {
OutputIterator { io: &self }
}
fn collect_outputs(&self) -> Vec<i32> {
self.output_iter().collect()
}
}
struct Program {
program: Vec<i32>,
ir: Receiver<i32>,
os: Sender<Option<i32>>,
}
impl Program {
fn new(program: &[i32], ir: Receiver<i32>, os: Sender<Option<i32>>) -> Program {
Program {
program: program.to_vec(),
ir,
os,
}
}
fn run(&self) {
let mut state = ProgramState {
mem: self.program.clone(),
pointer: 0,
};
loop {
let p = parse_opcode(state.fetch_opcode());
match p.opcode {
OpCode::Arith(op) => {
let x = state.fetch_value(0, &p);
let y = state.fetch_value(1, &p);
let z = match op {
Op::Add => x + y,
Op::Mul => x * y,
};
state.write_value(2, z);
state.increase_pointer(4);
}
OpCode::Input => {
let i = self.ir.recv().unwrap();
state.write_value(0, i);
state.increase_pointer(2);
}
OpCode::Output => {
let o = state.fetch_value(0, &p);
self.os.send(Some(o)).unwrap();
state.increase_pointer(2);
}
OpCode::JumpIf(condition) => {
let x = state.fetch_value(0, &p);
let matched = match condition {
Cnd::True => x != 0,
Cnd::False => x == 0,
};
if matched {
let y = state.fetch_value(1, &p);
state.set_pointer(y as usize);
} else {
state.increase_pointer(3);
}
}
OpCode::Compare(comparison) => {
let x = state.fetch_value(0, &p);
let y = state.fetch_value(1, &p);
let result = match comparison {
Cmp::LessThan => x < y,
Cmp::Equal => x == y,
};
if result {
state.write_value(2, 1);
} else {
state.write_value(2, 0);
}
state.increase_pointer(4);
}
OpCode::Halt => {
self.os.send(None).expect("could not send halt output");
break;
}
}
}
}
}
struct ProgramRunner {
program: Program,
io: ProgramIO,
}
impl ProgramRunner {
fn new(program: &[i32]) -> ProgramRunner {
let (is, ir) = std::sync::mpsc::channel::<i32>();
let (os, or) = std::sync::mpsc::channel::<Option<i32>>();
ProgramRunner {
program: Program::new(program, ir, os),
io: ProgramIO::new(is, or),
}
}
fn run_with(&self, inputs: &[i32]) -> Vec<i32> {
for &i in inputs {
self.io.send_input(i);
}
self.program.run();
self.io.collect_outputs()
}
}
fn main() {
let input: Vec<Vec<i32>> = common::get_lines()
.into_iter()
.map(|l| {
l.split(',')
.map(|i| i.parse::<i32>().expect("could not parse number"))
.collect()
})
.collect();
for program in input {
let p = ProgramRunner::new(&program);
let output1 = p.run_with(&[1]);
println!("Part1: Program output is: {:?}", output1);
let output2 = p.run_with(&[5]);
println!("Part2: Program output is: {:?}", output2);
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_threaded_run() {
let ProgramRunner { program, io } = ProgramRunner::new(&[3, 0, 4, 0, 99]);
let thread = std::thread::spawn(move || {
program.run();
});
io.send_input(21);
let out = io.next_output();
let halt = io.next_output();
assert_eq!(out, Some(21));
assert_eq!(halt, None);
thread.join().expect("could not join thread");
}
#[test]
fn test_io() {
let p = ProgramRunner::new(&[3, 0, 4, 0, 99]);
let output = p.run_with(&[42]);
assert_eq!(output, vec![42]);
}
#[test]
fn test_param() {
let p = ProgramRunner::new(&[1101, 100, -1, 4, 0]);
let output = p.run_with(&[]);
assert_eq!(output, vec![]);
}
#[test]
fn test_compare_equal_position_mode() {
let p = ProgramRunner::new(&[3, 9, 8, 9, 10, 9, 4, 9, 99, -1, 8]);
let output1 = p.run_with(&[7]);
let output2 = p.run_with(&[8]);
let output3 = p.run_with(&[9]);
assert_eq!(output1, vec![0]);
assert_eq!(output2, vec![1]);
assert_eq!(output3, vec![0]);
}
#[test]
fn test_compare_less_than_position_mode() {
let p = ProgramRunner::new(&[3, 9, 7, 9, 10, 9, 4, 9, 99, -1, 8]);
let output1 = p.run_with(&[7]);
let output2 = p.run_with(&[8]);
let output3 = p.run_with(&[9]);
assert_eq!(output1, vec![1]);
assert_eq!(output2, vec![0]);
assert_eq!(output3, vec![0]);
}
#[test]
fn test_compare_equal_immediate_mode() {
let p = ProgramRunner::new(&[3, 3, 1108, -1, 8, 3, 4, 3, 99]);
let output1 = p.run_with(&[7]);
let output2 = p.run_with(&[8]);
let output3 = p.run_with(&[9]);
assert_eq!(output1, vec![0]);
assert_eq!(output2, vec![1]);
assert_eq!(output3, vec![0]);
}
#[test]
fn test_compare_less_than_immediate_mode() {
let p = ProgramRunner::new(&[3, 3, 1107, -1, 8, 3, 4, 3, 99]);
let output1 = p.run_with(&[7]);
let output2 = p.run_with(&[8]);
let output3 = p.run_with(&[9]);
assert_eq!(output1, vec![1]);
assert_eq!(output2, vec![0]);
assert_eq!(output3, vec![0]);
}
#[test]
fn test_jump_position() {
let p = ProgramRunner::new(&[3, 12, 6, 12, 15, 1, 13, 14, 13, 4, 13, 99, -1, 0, 1, 9]);
let output1 = p.run_with(&[0]);
let output2 = p.run_with(&[2]);
assert_eq!(output1, vec![0]);
assert_eq!(output2, vec![1]);
}
#[test]
fn test_jump_immediate() {
let p = ProgramRunner::new(&[3, 3, 1105, -1, 9, 1101, 0, 0, 12, 4, 12, 99, 1]);
let output1 = p.run_with(&[0]);
let output2 = p.run_with(&[2]);
assert_eq!(output1, vec![0]);
assert_eq!(output2, vec![1]);
}
#[test]
fn test_large() {
let p = ProgramRunner::new(&[
3, 21, 1008, 21, 8, 20, 1005, 20, 22, 107, 8, 21, 20, 1006, 20, 31, 1106, 0, 36, 98, 0,
0, 1002, 21, 125, 20, 4, 20, 1105, 1, 46, 104, 999, 1105, 1, 46, 1101, 1000, 1, 20, 4,
20, 1105, 1, 46, 98, 99,
]);
let output1 = p.run_with(&[5]);
let output2 = p.run_with(&[8]);
let output3 = p.run_with(&[13]);
assert_eq!(output1, vec![999]);
assert_eq!(output2, vec![1000]);
assert_eq!(output3, vec![1001]);
}
#[test]
fn test_quine() {
let intcode = vec![
4, 0, 1005, 1, 19, 104, 0, 104, 1005, 104, 1, 104, 19, 104, 104, 101, 5, 1, 1, 101, 1,
1, 1, 1007, 1, 31, 5, 1005, 5, 0, 99,
];
let p = ProgramRunner::new(&intcode);
let output = p.run_with(&[]);
assert_eq!(output, intcode);
}
}
|
fn order(sentence: &str) -> String {
let mut str_vec: Vec<&str> = sentence.split_whitespace().collect();
str_vec.sort_by_key(|s| s.trim_matches(char::is_alphabetic));
str_vec.join(" ")
}
#[test]
fn returns_expected() {
assert_eq!(order("is2 Thi1s T4est 3a"), "Thi1s is2 3a T4est");
assert_eq!(order("is200 Thi12s T488908est -23a"), "-23a Thi12s is200 T488908est");
assert_eq!(order("test10 test1 test-10"), "test-10 test1 test10");
assert_eq!(order(""), "");
}
|
pub fn create_generator<'a>(
pos: ::na::Vector3<f32>,
generated_entity: ::component::GeneratedEntity,
salvo: usize,
time_between_salvo: f32,
eraser_probability: f32,
generators: &mut ::specs::WriteStorage<'a, ::component::Generator>,
entities: &::specs::Entities,
) {
let entity = entities.create();
generators.insert(
entity,
::component::Generator {
pos,
entity: generated_entity,
salvo,
timer: 0.0,
time_between_salvo,
eraser_probability,
},
);
}
|
use actix_web::{guard, web, App, HttpResponse, HttpServer};
use async_graphql::http::{playground_source, GraphQLPlaygroundConfig, MultipartOptions};
use async_graphql::{EmptySubscription, Schema};
use async_graphql_actix_web::{Request, Response};
use files::{FilesSchema, MutationRoot, QueryRoot, Storage};
async fn index(schema: web::Data<FilesSchema>, req: Request) -> Response {
schema.execute(req.into_inner()).await.into()
}
async fn gql_playgound() -> HttpResponse {
HttpResponse::Ok()
.content_type("text/html; charset=utf-8")
.body(playground_source(GraphQLPlaygroundConfig::new("/")))
}
#[actix_rt::main]
async fn main() -> std::io::Result<()> {
let schema = Schema::build(QueryRoot, MutationRoot, EmptySubscription)
.data(Storage::default())
.finish();
println!("Playground: http://localhost:8000");
HttpServer::new(move || {
App::new()
.data(schema.clone())
.service(
web::resource("/")
.guard(guard::Post())
.to(index)
.app_data(MultipartOptions::default().max_num_files(3)),
)
.service(web::resource("/").guard(guard::Get()).to(gql_playgound))
})
.bind("127.0.0.1:8000")?
.run()
.await
}
|
use std::fmt;
use std::error::{self, Error};
///////////////////////////////////////////
// Aux Macro for creating the error type
///////////////////////////////////////////
macro_rules! new_error_type {
($Error: ident) => (
new_error_type!($Error, stringify!($Error));
);
($Error: ident, $Description: expr) => (
#[derive(Debug)]
pub struct $Error;
impl error::Error for $Error {
fn description(&self) -> &str {
$Description
}
}
impl_display_for_error!($Error);
);
}
macro_rules! impl_display_for_error {
($Error: ty) => (
impl fmt::Display for $Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.description().fmt(f)
}
}
);
}
#[derive(Debug)]
pub struct EvalError {
pub kind: EvalErrorKind,
}
#[derive(Debug)]
pub enum EvalErrorKind {
DivideByZero,
Overflow,
}
impl error::Error for EvalError {
fn description(&self) -> &str {
match self.kind {
EvalErrorKind::DivideByZero => "cannot divide by zero",
EvalErrorKind::Overflow => "evaluation overflow",
}
}
}
impl_display_for_error!(EvalError); |
use std::fmt::Debug;
use crate::Fingerprint;
/// An alias for [`Result`](std::result::Result)'s with [`StorageError`].
pub type Result<T> = std::result::Result<T, StorageError>;
/// A sum type for all error conditions that can arise in this crate.
#[derive(Debug, thiserror::Error)]
pub enum StorageError {
/// The storage layer contains some dirty changes that must be either saved
/// or rollbacked before attempting such operation.
#[error("Please checkout dirty changes or rollback to avoid data loss.")]
DirtyChanges,
/// Two checkpoints have resulted in key collision, which prevents unordered
/// transaction replaying.
#[error("Key collision from two different checkpoints.")]
KeyCollision {
/// They Blake3 hash of the key that caused this collision.
key_hash: Fingerprint,
},
/// A SQLite error happened.
#[error("SQLite error.")]
Sqlite(#[from] sqlx::Error),
}
|
//! `Context` is a top level module contains static context and dynamic context for each request
use std::sync::Arc;
use diesel::connection::AnsiTransactionManager;
use diesel::pg::Pg;
use diesel::Connection;
use futures_cpupool::CpuPool;
use r2d2::{ManageConnection, Pool};
use stq_http::client::ClientHandle;
use stq_router::RouteParser;
use stq_types::UserId;
use super::routes::*;
use config::Config;
use repos::repo_factory::*;
/// Static context for all app
pub struct StaticContext<T, M, F>
where
T: Connection<Backend = Pg, TransactionManager = AnsiTransactionManager> + 'static,
M: ManageConnection<Connection = T>,
F: ReposFactory<T>,
{
pub db_pool: Pool<M>,
pub cpu_pool: CpuPool,
pub config: Arc<Config>,
pub route_parser: Arc<RouteParser<Route>>,
pub client_handle: ClientHandle,
pub repo_factory: F,
}
impl<
T: Connection<Backend = Pg, TransactionManager = AnsiTransactionManager> + 'static,
M: ManageConnection<Connection = T>,
F: ReposFactory<T>,
> StaticContext<T, M, F>
{
/// Create a new static context
pub fn new(db_pool: Pool<M>, cpu_pool: CpuPool, client_handle: ClientHandle, config: Arc<Config>, repo_factory: F) -> Self {
let route_parser = Arc::new(create_route_parser());
Self {
route_parser,
db_pool,
cpu_pool,
client_handle,
config,
repo_factory,
}
}
}
impl<
T: Connection<Backend = Pg, TransactionManager = AnsiTransactionManager> + 'static,
M: ManageConnection<Connection = T>,
F: ReposFactory<T>,
> Clone for StaticContext<T, M, F>
{
fn clone(&self) -> Self {
Self {
cpu_pool: self.cpu_pool.clone(),
db_pool: self.db_pool.clone(),
route_parser: self.route_parser.clone(),
client_handle: self.client_handle.clone(),
config: self.config.clone(),
repo_factory: self.repo_factory.clone(),
}
}
}
/// Dynamic context for each request
#[derive(Clone)]
pub struct DynamicContext {
pub user_id: Option<UserId>,
pub correlation_token: String,
}
impl DynamicContext {
/// Create a new dynamic context for each request
pub fn new(user_id: Option<UserId>, correlation_token: String) -> Self {
Self {
user_id,
correlation_token,
}
}
}
|
mod covid19;
mod config;
use actix_web::{HttpServer, App, get, Responder, HttpResponse};
#[get("/health-check")]
async fn health_check() -> impl Responder {
HttpResponse::Ok()
}
#[actix_web::main]
async fn main() -> std::io::Result<()> {
HttpServer::new(|| {
App::new()
.service(health_check)
.service(covid19::presenter::get)
})
.bind(format!("0.0.0.0:{}", &config::CONFIG.port))?
.run()
.await
}
|
use core::*;
pub trait Frame {
fn frame<F: Framer>(self, framer: F) -> Stream<Bytes>;
fn frame_one<F: Framer>(self, framer: F) -> Stream<Bytes>;
}
impl Frame for Stream<Bytes> {
fn frame<F: Framer>(self, framer: F) -> Stream<Bytes> {
let (tx, rx) = Stream::pair();
tx.receive(move |res| {
if let Ok(tx) = res {
frame(self, tx, framer);
}
});
rx
}
fn frame_one<F: Framer>(self, framer: F) -> Stream<Bytes> {
let (tx, rx) = Future::pair();
tx.receive(move |res| {
if let Ok(tx) = res {
frame_one(self, tx, framer);
}
});
rx.to_stream()
}
}
pub fn frame<F>(src: Stream<Bytes>,
dst: Sender<Bytes>,
mut framer: F)
where F: Framer {
match framer.next() {
Some(bytes) => {
dst.send(bytes).receive(move |res| {
if let Ok(dst) = res {
frame(src, dst, framer);
}
});
}
None => {
src.receive(move |res| {
match res {
Ok(Some((chunk, rest))) => {
framer.buffer(chunk);
frame(rest, dst, framer);
}
Ok(None) => {
if let Some(bytes) = framer.flush() {
dst.send(bytes);
}
}
Err(AsyncError::Failed(e)) => {
dst.fail(e);
}
Err(AsyncError::Aborted) => {
}
}
});
}
}
}
pub fn frame_one<F>(src: Stream<Bytes>,
dst: Complete<Option<(Bytes, Stream<Bytes>)>>,
mut framer: F)
where F: Framer {
src.receive(move |res| {
match res {
Ok(Some((chunk, rest))) => {
framer.buffer(chunk);
match framer.next() {
Some(bytes) => {
// Figure out if there is any data still buffered
let rest = match framer.flush() {
Some(bytes) => Future::of(Some((bytes, rest))).to_stream(),
None => rest,
};
dst.complete(Some((bytes, rest)));
}
None => frame_one(rest, dst, framer),
}
}
Ok(None) => {
match framer.next() {
Some(bytes) => dst.complete(Some((bytes, Stream::empty()))),
None => dst.complete(None),
}
}
Err(AsyncError::Failed(e)) => {
dst.fail(e);
}
Err(AsyncError::Aborted) => {}
}
});
}
pub trait Framer : Send + 'static {
/// Buffer more data into the framer
fn buffer(&mut self, bytes: Bytes);
/// Get the next frame
fn next(&mut self) -> Option<Bytes>;
/// Get all buffered up data
fn flush(&mut self) -> Option<Bytes>;
}
pub struct Len {
len: usize,
buf: Option<Bytes>,
}
impl Len {
pub fn new(len: usize) -> Len {
Len {
len: len,
buf: None,
}
}
}
impl Framer for Len {
fn buffer(&mut self, bytes: Bytes) {
self.buf = match self.buf.take() {
Some(curr) => Some(curr.concat(&bytes)),
None => Some(bytes),
}
}
fn next(&mut self) -> Option<Bytes> {
if let Some(bytes) = self.buf.take() {
if bytes.len() > self.len {
let (a, b) = bytes.split_at(self.len);
self.buf = Some(b);
return Some(a);
} else if bytes.len() == self.len {
return Some(bytes);
} else {
self.buf = Some(bytes);
}
}
None
}
fn flush(&mut self) -> Option<Bytes> {
self.buf.take()
}
}
|
#[doc = "Register `SMPR2` reader"]
pub type R = crate::R<SMPR2_SPEC>;
#[doc = "Register `SMPR2` writer"]
pub type W = crate::W<SMPR2_SPEC>;
#[doc = "Field `SMP0` reader - Channel 0 sampling time selection"]
pub type SMP0_R = crate::FieldReader<SMP0_A>;
#[doc = "Channel 0 sampling time selection\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(u8)]
pub enum SMP0_A {
#[doc = "0: 3 cycles"]
Cycles3 = 0,
#[doc = "1: 15 cycles"]
Cycles15 = 1,
#[doc = "2: 28 cycles"]
Cycles28 = 2,
#[doc = "3: 56 cycles"]
Cycles56 = 3,
#[doc = "4: 84 cycles"]
Cycles84 = 4,
#[doc = "5: 112 cycles"]
Cycles112 = 5,
#[doc = "6: 144 cycles"]
Cycles144 = 6,
#[doc = "7: 480 cycles"]
Cycles480 = 7,
}
impl From<SMP0_A> for u8 {
#[inline(always)]
fn from(variant: SMP0_A) -> Self {
variant as _
}
}
impl crate::FieldSpec for SMP0_A {
type Ux = u8;
}
impl SMP0_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SMP0_A {
match self.bits {
0 => SMP0_A::Cycles3,
1 => SMP0_A::Cycles15,
2 => SMP0_A::Cycles28,
3 => SMP0_A::Cycles56,
4 => SMP0_A::Cycles84,
5 => SMP0_A::Cycles112,
6 => SMP0_A::Cycles144,
7 => SMP0_A::Cycles480,
_ => unreachable!(),
}
}
#[doc = "3 cycles"]
#[inline(always)]
pub fn is_cycles3(&self) -> bool {
*self == SMP0_A::Cycles3
}
#[doc = "15 cycles"]
#[inline(always)]
pub fn is_cycles15(&self) -> bool {
*self == SMP0_A::Cycles15
}
#[doc = "28 cycles"]
#[inline(always)]
pub fn is_cycles28(&self) -> bool {
*self == SMP0_A::Cycles28
}
#[doc = "56 cycles"]
#[inline(always)]
pub fn is_cycles56(&self) -> bool {
*self == SMP0_A::Cycles56
}
#[doc = "84 cycles"]
#[inline(always)]
pub fn is_cycles84(&self) -> bool {
*self == SMP0_A::Cycles84
}
#[doc = "112 cycles"]
#[inline(always)]
pub fn is_cycles112(&self) -> bool {
*self == SMP0_A::Cycles112
}
#[doc = "144 cycles"]
#[inline(always)]
pub fn is_cycles144(&self) -> bool {
*self == SMP0_A::Cycles144
}
#[doc = "480 cycles"]
#[inline(always)]
pub fn is_cycles480(&self) -> bool {
*self == SMP0_A::Cycles480
}
}
#[doc = "Field `SMP0` writer - Channel 0 sampling time selection"]
pub type SMP0_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 3, O, SMP0_A>;
impl<'a, REG, const O: u8> SMP0_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
REG::Ux: From<u8>,
{
#[doc = "3 cycles"]
#[inline(always)]
pub fn cycles3(self) -> &'a mut crate::W<REG> {
self.variant(SMP0_A::Cycles3)
}
#[doc = "15 cycles"]
#[inline(always)]
pub fn cycles15(self) -> &'a mut crate::W<REG> {
self.variant(SMP0_A::Cycles15)
}
#[doc = "28 cycles"]
#[inline(always)]
pub fn cycles28(self) -> &'a mut crate::W<REG> {
self.variant(SMP0_A::Cycles28)
}
#[doc = "56 cycles"]
#[inline(always)]
pub fn cycles56(self) -> &'a mut crate::W<REG> {
self.variant(SMP0_A::Cycles56)
}
#[doc = "84 cycles"]
#[inline(always)]
pub fn cycles84(self) -> &'a mut crate::W<REG> {
self.variant(SMP0_A::Cycles84)
}
#[doc = "112 cycles"]
#[inline(always)]
pub fn cycles112(self) -> &'a mut crate::W<REG> {
self.variant(SMP0_A::Cycles112)
}
#[doc = "144 cycles"]
#[inline(always)]
pub fn cycles144(self) -> &'a mut crate::W<REG> {
self.variant(SMP0_A::Cycles144)
}
#[doc = "480 cycles"]
#[inline(always)]
pub fn cycles480(self) -> &'a mut crate::W<REG> {
self.variant(SMP0_A::Cycles480)
}
}
#[doc = "Field `SMP1` reader - Channel 1 sampling time selection"]
pub use SMP0_R as SMP1_R;
#[doc = "Field `SMP2` reader - Channel 2 sampling time selection"]
pub use SMP0_R as SMP2_R;
#[doc = "Field `SMP3` reader - Channel 3 sampling time selection"]
pub use SMP0_R as SMP3_R;
#[doc = "Field `SMP4` reader - Channel 4 sampling time selection"]
pub use SMP0_R as SMP4_R;
#[doc = "Field `SMP5` reader - Channel 5 sampling time selection"]
pub use SMP0_R as SMP5_R;
#[doc = "Field `SMP6` reader - Channel 6 sampling time selection"]
pub use SMP0_R as SMP6_R;
#[doc = "Field `SMP7` reader - Channel 7 sampling time selection"]
pub use SMP0_R as SMP7_R;
#[doc = "Field `SMP8` reader - Channel 8 sampling time selection"]
pub use SMP0_R as SMP8_R;
#[doc = "Field `SMP9` reader - Channel 9 sampling time selection"]
pub use SMP0_R as SMP9_R;
#[doc = "Field `SMP1` writer - Channel 1 sampling time selection"]
pub use SMP0_W as SMP1_W;
#[doc = "Field `SMP2` writer - Channel 2 sampling time selection"]
pub use SMP0_W as SMP2_W;
#[doc = "Field `SMP3` writer - Channel 3 sampling time selection"]
pub use SMP0_W as SMP3_W;
#[doc = "Field `SMP4` writer - Channel 4 sampling time selection"]
pub use SMP0_W as SMP4_W;
#[doc = "Field `SMP5` writer - Channel 5 sampling time selection"]
pub use SMP0_W as SMP5_W;
#[doc = "Field `SMP6` writer - Channel 6 sampling time selection"]
pub use SMP0_W as SMP6_W;
#[doc = "Field `SMP7` writer - Channel 7 sampling time selection"]
pub use SMP0_W as SMP7_W;
#[doc = "Field `SMP8` writer - Channel 8 sampling time selection"]
pub use SMP0_W as SMP8_W;
#[doc = "Field `SMP9` writer - Channel 9 sampling time selection"]
pub use SMP0_W as SMP9_W;
impl R {
#[doc = "Bits 0:2 - Channel 0 sampling time selection"]
#[inline(always)]
pub fn smp0(&self) -> SMP0_R {
SMP0_R::new((self.bits & 7) as u8)
}
#[doc = "Bits 3:5 - Channel 1 sampling time selection"]
#[inline(always)]
pub fn smp1(&self) -> SMP1_R {
SMP1_R::new(((self.bits >> 3) & 7) as u8)
}
#[doc = "Bits 6:8 - Channel 2 sampling time selection"]
#[inline(always)]
pub fn smp2(&self) -> SMP2_R {
SMP2_R::new(((self.bits >> 6) & 7) as u8)
}
#[doc = "Bits 9:11 - Channel 3 sampling time selection"]
#[inline(always)]
pub fn smp3(&self) -> SMP3_R {
SMP3_R::new(((self.bits >> 9) & 7) as u8)
}
#[doc = "Bits 12:14 - Channel 4 sampling time selection"]
#[inline(always)]
pub fn smp4(&self) -> SMP4_R {
SMP4_R::new(((self.bits >> 12) & 7) as u8)
}
#[doc = "Bits 15:17 - Channel 5 sampling time selection"]
#[inline(always)]
pub fn smp5(&self) -> SMP5_R {
SMP5_R::new(((self.bits >> 15) & 7) as u8)
}
#[doc = "Bits 18:20 - Channel 6 sampling time selection"]
#[inline(always)]
pub fn smp6(&self) -> SMP6_R {
SMP6_R::new(((self.bits >> 18) & 7) as u8)
}
#[doc = "Bits 21:23 - Channel 7 sampling time selection"]
#[inline(always)]
pub fn smp7(&self) -> SMP7_R {
SMP7_R::new(((self.bits >> 21) & 7) as u8)
}
#[doc = "Bits 24:26 - Channel 8 sampling time selection"]
#[inline(always)]
pub fn smp8(&self) -> SMP8_R {
SMP8_R::new(((self.bits >> 24) & 7) as u8)
}
#[doc = "Bits 27:29 - Channel 9 sampling time selection"]
#[inline(always)]
pub fn smp9(&self) -> SMP9_R {
SMP9_R::new(((self.bits >> 27) & 7) as u8)
}
}
impl W {
#[doc = "Bits 0:2 - Channel 0 sampling time selection"]
#[inline(always)]
#[must_use]
pub fn smp0(&mut self) -> SMP0_W<SMPR2_SPEC, 0> {
SMP0_W::new(self)
}
#[doc = "Bits 3:5 - Channel 1 sampling time selection"]
#[inline(always)]
#[must_use]
pub fn smp1(&mut self) -> SMP1_W<SMPR2_SPEC, 3> {
SMP1_W::new(self)
}
#[doc = "Bits 6:8 - Channel 2 sampling time selection"]
#[inline(always)]
#[must_use]
pub fn smp2(&mut self) -> SMP2_W<SMPR2_SPEC, 6> {
SMP2_W::new(self)
}
#[doc = "Bits 9:11 - Channel 3 sampling time selection"]
#[inline(always)]
#[must_use]
pub fn smp3(&mut self) -> SMP3_W<SMPR2_SPEC, 9> {
SMP3_W::new(self)
}
#[doc = "Bits 12:14 - Channel 4 sampling time selection"]
#[inline(always)]
#[must_use]
pub fn smp4(&mut self) -> SMP4_W<SMPR2_SPEC, 12> {
SMP4_W::new(self)
}
#[doc = "Bits 15:17 - Channel 5 sampling time selection"]
#[inline(always)]
#[must_use]
pub fn smp5(&mut self) -> SMP5_W<SMPR2_SPEC, 15> {
SMP5_W::new(self)
}
#[doc = "Bits 18:20 - Channel 6 sampling time selection"]
#[inline(always)]
#[must_use]
pub fn smp6(&mut self) -> SMP6_W<SMPR2_SPEC, 18> {
SMP6_W::new(self)
}
#[doc = "Bits 21:23 - Channel 7 sampling time selection"]
#[inline(always)]
#[must_use]
pub fn smp7(&mut self) -> SMP7_W<SMPR2_SPEC, 21> {
SMP7_W::new(self)
}
#[doc = "Bits 24:26 - Channel 8 sampling time selection"]
#[inline(always)]
#[must_use]
pub fn smp8(&mut self) -> SMP8_W<SMPR2_SPEC, 24> {
SMP8_W::new(self)
}
#[doc = "Bits 27:29 - Channel 9 sampling time selection"]
#[inline(always)]
#[must_use]
pub fn smp9(&mut self) -> SMP9_W<SMPR2_SPEC, 27> {
SMP9_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "sample time register 2\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`smpr2::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`smpr2::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct SMPR2_SPEC;
impl crate::RegisterSpec for SMPR2_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`smpr2::R`](R) reader structure"]
impl crate::Readable for SMPR2_SPEC {}
#[doc = "`write(|w| ..)` method takes [`smpr2::W`](W) writer structure"]
impl crate::Writable for SMPR2_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets SMPR2 to value 0"]
impl crate::Resettable for SMPR2_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
// RGB Rust Library
// Written in 2019 by
// Dr. Maxim Orlovsky <dr.orlovsky@gmail.com>
// basing on ideas from the original RGB rust library by
// Alekos Filini <alekos.filini@gmail.com>
//
// To the extent possible under law, the author(s) have dedicated all
// copyright and related and neighboring rights to this software to
// the public domain worldwide. This software is distributed without
// any warranty.
//
// You should have received a copy of the MIT License
// along with this software.
// If not, see <https://opensource.org/licenses/MIT>.
use std::convert::From;
use std::fmt::{Display, Error, Formatter};
use std::io::{self, Cursor};
use std::rc::Rc;
use bitcoin::consensus::encode::*;
use bitcoin_hashes::error::Error as BitcoinHashError;
use bitcoin_hashes::sha256d;
use crate::*;
///! Error types for RGB protocol
pub enum RgbError<'a, B: ContractBody> {
BitcoinHashError(BitcoinHashError),
IoError(io::Error),
ProofWithoutContract(&'a Proof<B>),
ContractWithoutRootProof(Rc<Contract<B>>),
ProofWihoutInputs(&'a Proof<B>),
MissingVout(&'a Proof<B>, u32),
WrongScript(sha256d::Hash, u32),
AssetsNotEqual(&'a Proof<B>),
AmountsNotEqual(&'a Proof<B>, AssetId),
NoInputs(&'a Proof<B>),
OutdatedContractVersion(Rc<Contract<B>>),
UnknownContractVersion(Rc<Contract<B>>),
UnsupportedCommitmentScheme(CommitmentScheme),
NoOriginalPubKey(IdentityHash),
ProofStructureNotMatchingContract(&'a Proof<B>),
InternalContractIncosistency(Rc<Contract<B>>, &'a str),
}
impl<'a, T: ContractBody + Encodable<Cursor<Vec<u8>>>> Display for RgbError<'a, T> {
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
match self {
RgbError::BitcoinHashError(err) => Display::fmt(err, f),
RgbError::IoError(err) => Display::fmt(err, f),
RgbError::ProofWithoutContract(id) => {
write!(f, "Root proof {} does not reference a contract", **id)
}
RgbError::ContractWithoutRootProof(id) => {
write!(f, "Contract {} does not reference a root proof", **id)
}
RgbError::ProofWihoutInputs(id) => write!(
f,
"Non-root proof {} does not have any upstream proofs",
**id
),
RgbError::MissingVout(id, vout) => write!(
f,
"Proof {} references unexisting output {} in its bouding tx",
**id, vout
),
RgbError::WrongScript(txid, vout) => write!(
f,
"Output {} for the transaction {} is not colored with a proper script",
txid, vout
),
RgbError::AssetsNotEqual(id) => write!(
f,
"Input and output assets for the proof {} do not match",
**id
),
RgbError::AmountsNotEqual(proof, asset_id) => write!(
f,
"Input and output asset {} amounts for the proof {} are not equal",
*asset_id, **proof
),
RgbError::NoInputs(proof) => {
write!(f, "Non-root proof {} has no transaction inputs", **proof)
}
RgbError::OutdatedContractVersion(contract) => write!(
f,
"Unsupported contract version for contract {}",
**contract
),
RgbError::UnknownContractVersion(contract) => {
write!(f, "Unknown future version found in contract {}", **contract)
}
RgbError::UnsupportedCommitmentScheme(ref scheme) => {
write!(f, "Unknown commitment scheme with id {}", {
let s: u8 = scheme.clone().into();
s
})
}
RgbError::NoOriginalPubKey(ref hash) => write!(
f,
"No original public key is found pay-to-contract proof {}",
*hash
),
RgbError::ProofStructureNotMatchingContract(id) => write!(
f,
"Proof structure for {} does not match RGB contract structure",
**id
),
RgbError::InternalContractIncosistency(contract, msg) => write!(
f,
"Internal inconsistency found for the contract {}: {}",
**contract, msg
),
}
}
}
impl<'a, B: ContractBody> From<BitcoinHashError> for RgbError<'a, B> {
fn from(err: BitcoinHashError) -> Self {
RgbError::BitcoinHashError(err)
}
}
impl<'a, B: ContractBody> From<io::Error> for RgbError<'a, B> {
fn from(err: io::Error) -> Self {
RgbError::IoError(err)
}
}
|
use super::helpers::{allocations, fixtures::get_language};
use tree_sitter::Parser;
#[test]
fn test_pathological_example_1() {
let language = "cpp";
let source = r#"*ss<s"ss<sqXqss<s._<s<sq<(qqX<sqss<s.ss<sqsssq<(qss<qssqXqss<s._<s<sq<(qqX<sqss<s.ss<sqsssq<(qss<sqss<sqss<s._<s<sq>(qqX<sqss<s.ss<sqsssq<(qss<sq&=ss<s<sqss<s._<s<sq<(qqX<sqss<s.ss<sqs"#;
allocations::record(|| {
let mut parser = Parser::new();
parser.set_language(get_language(language)).unwrap();
parser.parse(source, None).unwrap();
});
}
|
#[doc = r"Register block"]
#[repr(C)]
pub struct RegisterBlock {
#[doc = "0x00 - Control Register 1"]
pub cr1: CR1,
#[doc = "0x04 - Control Register 2"]
pub cr2: CR2,
#[doc = "0x08 - Interrupt Status Register"]
pub isr: ISR,
#[doc = "0x0c - Interrupt Clear Register"]
pub icr: ICR,
#[doc = "0x10 - Interrupt Enable Register"]
pub ier: IER,
#[doc = "0x14 - Output Enable Register"]
pub oenr: OENR,
#[doc = "0x18 - DISR"]
pub disr: DISR,
#[doc = "0x1c - Output Disable Status Register"]
pub odsr: ODSR,
#[doc = "0x20 - Burst Mode Control Register"]
pub bmcr: BMCR,
#[doc = "0x24 - BMTRG"]
pub bmtrg: BMTRG,
#[doc = "0x28 - BMCMPR6"]
pub bmcmpr6: BMCMPR6,
#[doc = "0x2c - Burst Mode Period Register"]
pub bmper: BMPER,
#[doc = "0x30 - Timer External Event Control Register 1"]
pub eecr1: EECR1,
#[doc = "0x34 - Timer External Event Control Register 2"]
pub eecr2: EECR2,
#[doc = "0x38 - Timer External Event Control Register 3"]
pub eecr3: EECR3,
#[doc = "0x3c - ADC Trigger 1 Register"]
pub adc1r: ADC1R,
#[doc = "0x40 - ADC Trigger 2 Register"]
pub adc2r: ADC2R,
#[doc = "0x44 - ADC Trigger 3 Register"]
pub adc3r: ADC3R,
#[doc = "0x48 - ADC Trigger 4 Register"]
pub adc4r: ADC4R,
#[doc = "0x4c - DLL Control Register"]
pub dllcr: DLLCR,
#[doc = "0x50 - HRTIM Fault Input Register 1"]
pub fltinr1: FLTINR1,
#[doc = "0x54 - HRTIM Fault Input Register 2"]
pub fltinr2: FLTINR2,
#[doc = "0x58 - BDMUPDR"]
pub bdmupdr: BDMUPDR,
#[doc = "0x5c - Burst DMA Timerx update Register"]
pub bdtx_upr: BDTX_UPR,
_reserved24: [u8; 0x10],
#[doc = "0x70 - Burst DMA Data Register"]
pub bdmadr: BDMADR,
}
#[doc = "CR1 (rw) register accessor: Control Register 1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cr1::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cr1::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cr1`]
module"]
pub type CR1 = crate::Reg<cr1::CR1_SPEC>;
#[doc = "Control Register 1"]
pub mod cr1;
#[doc = "CR2 (rw) register accessor: Control Register 2\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cr2::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cr2::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cr2`]
module"]
pub type CR2 = crate::Reg<cr2::CR2_SPEC>;
#[doc = "Control Register 2"]
pub mod cr2;
#[doc = "ISR (rw) register accessor: Interrupt Status Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`isr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`isr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`isr`]
module"]
pub type ISR = crate::Reg<isr::ISR_SPEC>;
#[doc = "Interrupt Status Register"]
pub mod isr;
#[doc = "ICR (rw) register accessor: Interrupt Clear Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`icr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`icr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`icr`]
module"]
pub type ICR = crate::Reg<icr::ICR_SPEC>;
#[doc = "Interrupt Clear Register"]
pub mod icr;
#[doc = "IER (rw) register accessor: Interrupt Enable Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ier::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ier::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`ier`]
module"]
pub type IER = crate::Reg<ier::IER_SPEC>;
#[doc = "Interrupt Enable Register"]
pub mod ier;
#[doc = "OENR (w) register accessor: Output Enable Register\n\nYou can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`oenr::W`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`oenr`]
module"]
pub type OENR = crate::Reg<oenr::OENR_SPEC>;
#[doc = "Output Enable Register"]
pub mod oenr;
#[doc = "DISR (rw) register accessor: DISR\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`disr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`disr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`disr`]
module"]
pub type DISR = crate::Reg<disr::DISR_SPEC>;
#[doc = "DISR"]
pub mod disr;
#[doc = "ODSR (r) register accessor: Output Disable Status Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`odsr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`odsr`]
module"]
pub type ODSR = crate::Reg<odsr::ODSR_SPEC>;
#[doc = "Output Disable Status Register"]
pub mod odsr;
#[doc = "BMCR (rw) register accessor: Burst Mode Control Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`bmcr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`bmcr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`bmcr`]
module"]
pub type BMCR = crate::Reg<bmcr::BMCR_SPEC>;
#[doc = "Burst Mode Control Register"]
pub mod bmcr;
#[doc = "BMTRG (rw) register accessor: BMTRG\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`bmtrg::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`bmtrg::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`bmtrg`]
module"]
pub type BMTRG = crate::Reg<bmtrg::BMTRG_SPEC>;
#[doc = "BMTRG"]
pub mod bmtrg;
#[doc = "BMCMPR6 (rw) register accessor: BMCMPR6\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`bmcmpr6::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`bmcmpr6::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`bmcmpr6`]
module"]
pub type BMCMPR6 = crate::Reg<bmcmpr6::BMCMPR6_SPEC>;
#[doc = "BMCMPR6"]
pub mod bmcmpr6;
#[doc = "BMPER (rw) register accessor: Burst Mode Period Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`bmper::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`bmper::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`bmper`]
module"]
pub type BMPER = crate::Reg<bmper::BMPER_SPEC>;
#[doc = "Burst Mode Period Register"]
pub mod bmper;
#[doc = "EECR1 (rw) register accessor: Timer External Event Control Register 1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`eecr1::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`eecr1::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`eecr1`]
module"]
pub type EECR1 = crate::Reg<eecr1::EECR1_SPEC>;
#[doc = "Timer External Event Control Register 1"]
pub mod eecr1;
#[doc = "EECR2 (rw) register accessor: Timer External Event Control Register 2\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`eecr2::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`eecr2::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`eecr2`]
module"]
pub type EECR2 = crate::Reg<eecr2::EECR2_SPEC>;
#[doc = "Timer External Event Control Register 2"]
pub mod eecr2;
#[doc = "EECR3 (rw) register accessor: Timer External Event Control Register 3\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`eecr3::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`eecr3::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`eecr3`]
module"]
pub type EECR3 = crate::Reg<eecr3::EECR3_SPEC>;
#[doc = "Timer External Event Control Register 3"]
pub mod eecr3;
#[doc = "ADC1R (rw) register accessor: ADC Trigger 1 Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`adc1r::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`adc1r::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`adc1r`]
module"]
pub type ADC1R = crate::Reg<adc1r::ADC1R_SPEC>;
#[doc = "ADC Trigger 1 Register"]
pub mod adc1r;
#[doc = "ADC2R (rw) register accessor: ADC Trigger 2 Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`adc2r::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`adc2r::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`adc2r`]
module"]
pub type ADC2R = crate::Reg<adc2r::ADC2R_SPEC>;
#[doc = "ADC Trigger 2 Register"]
pub mod adc2r;
#[doc = "ADC3R (rw) register accessor: ADC Trigger 3 Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`adc3r::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`adc3r::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`adc3r`]
module"]
pub type ADC3R = crate::Reg<adc3r::ADC3R_SPEC>;
#[doc = "ADC Trigger 3 Register"]
pub mod adc3r;
#[doc = "ADC4R (rw) register accessor: ADC Trigger 4 Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`adc4r::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`adc4r::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`adc4r`]
module"]
pub type ADC4R = crate::Reg<adc4r::ADC4R_SPEC>;
#[doc = "ADC Trigger 4 Register"]
pub mod adc4r;
#[doc = "DLLCR (rw) register accessor: DLL Control Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dllcr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`dllcr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`dllcr`]
module"]
pub type DLLCR = crate::Reg<dllcr::DLLCR_SPEC>;
#[doc = "DLL Control Register"]
pub mod dllcr;
#[doc = "FLTINR1 (rw) register accessor: HRTIM Fault Input Register 1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`fltinr1::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`fltinr1::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`fltinr1`]
module"]
pub type FLTINR1 = crate::Reg<fltinr1::FLTINR1_SPEC>;
#[doc = "HRTIM Fault Input Register 1"]
pub mod fltinr1;
#[doc = "FLTINR2 (rw) register accessor: HRTIM Fault Input Register 2\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`fltinr2::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`fltinr2::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`fltinr2`]
module"]
pub type FLTINR2 = crate::Reg<fltinr2::FLTINR2_SPEC>;
#[doc = "HRTIM Fault Input Register 2"]
pub mod fltinr2;
#[doc = "BDMUPDR (rw) register accessor: BDMUPDR\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`bdmupdr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`bdmupdr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`bdmupdr`]
module"]
pub type BDMUPDR = crate::Reg<bdmupdr::BDMUPDR_SPEC>;
#[doc = "BDMUPDR"]
pub mod bdmupdr;
#[doc = "BDTxUPR (rw) register accessor: Burst DMA Timerx update Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`bdtx_upr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`bdtx_upr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`bdtx_upr`]
module"]
pub type BDTX_UPR = crate::Reg<bdtx_upr::BDTX_UPR_SPEC>;
#[doc = "Burst DMA Timerx update Register"]
pub mod bdtx_upr;
#[doc = "BDMADR (rw) register accessor: Burst DMA Data Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`bdmadr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`bdmadr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`bdmadr`]
module"]
pub type BDMADR = crate::Reg<bdmadr::BDMADR_SPEC>;
#[doc = "Burst DMA Data Register"]
pub mod bdmadr;
|
/* borrow */
/* use reference(&Vec<i32>) as arguments */
/*&T ← reference type is a type which borrows ownership; 1 or more references to resource*/
/* &mut T ← mutable reference; only one mutable reference */
fn foo(v1: &mut Vec<i32>,v2: &mut Vec<i32>) -> Option<i32> {
let v1_max: Option<i32> = v1.pop();
let v2_max: Option<i32> = v2.pop();
if(v1_max > v2_max){
v1_max
} else {
v2_max
}
}
fn main(){
let mut v1 = vec![1,2,3];
let mut v2 = vec![4,5,6];
let answer = foo(&mut v1,&mut v2);
println!("{:?}",answer);
let mut x = 5;
{
/* y is mutable reference to x */
let y = &mut x; //start &mut borrow of x from here
//
*y = *y + 1; //
} //end
println!("{}",x); //attempt to borrow x
}
|
use crate::blob::blob::requests::*;
use crate::blob::prelude::*;
use crate::core::prelude::*;
use azure_core::prelude::*;
use azure_core::HttpClient;
use bytes::Bytes;
use http::method::Method;
use http::request::{Builder, Request};
use std::sync::Arc;
pub trait AsBlobLeaseClient {
fn as_blob_lease_client(&self, lease_id: LeaseId) -> Arc<BlobLeaseClient>;
}
impl AsBlobLeaseClient for Arc<BlobClient> {
fn as_blob_lease_client(&self, lease_id: LeaseId) -> Arc<BlobLeaseClient> {
BlobLeaseClient::new(self.clone(), lease_id)
}
}
#[derive(Debug, Clone)]
pub struct BlobLeaseClient {
blob_client: Arc<BlobClient>,
lease_id: LeaseId,
}
impl BlobLeaseClient {
pub(crate) fn new(blob_client: Arc<BlobClient>, lease_id: LeaseId) -> Arc<Self> {
Arc::new(Self {
blob_client,
lease_id,
})
}
pub fn lease_id(&self) -> &LeaseId {
&self.lease_id
}
pub(crate) fn http_client(&self) -> &dyn HttpClient {
self.blob_client.http_client()
}
#[allow(dead_code)]
pub(crate) fn storage_account_client(&self) -> &StorageAccountClient {
self.blob_client.storage_account_client()
}
#[allow(dead_code)]
pub(crate) fn container_client(&self) -> &ContainerClient {
self.blob_client.container_client()
}
#[allow(dead_code)]
pub(crate) fn blob_client(&self) -> &BlobClient {
self.blob_client.as_ref()
}
pub(crate) fn url_with_segments<'a, I>(
&'a self,
segments: I,
) -> Result<url::Url, url::ParseError>
where
I: IntoIterator<Item = &'a str>,
{
self.blob_client.url_with_segments(segments)
}
pub fn change<'a>(&'a self, proposed_lease_id: &'a ProposedLeaseId) -> ChangeLeaseBuilder<'a> {
ChangeLeaseBuilder::new(self, proposed_lease_id)
}
pub fn release<'a>(&'a self) -> ReleaseLeaseBuilder {
ReleaseLeaseBuilder::new(self)
}
pub fn renew<'a>(&'a self) -> RenewLeaseBuilder {
RenewLeaseBuilder::new(self)
}
pub(crate) fn prepare_request(
&self,
url: &str,
method: &Method,
http_header_adder: &dyn Fn(Builder) -> Builder,
request_body: Option<Bytes>,
) -> crate::Result<(Request<Bytes>, url::Url)> {
self.blob_client
.prepare_request(url, method, http_header_adder, request_body)
}
}
|
extern crate wingui;
use ::window::WindowBuilder;
use super::Backend as AbsBackend;
mod window;
pub struct Backend;
impl AbsBackend for Backend {
type Window = window::Window;
fn start(builder: WindowBuilder) {
}
}
|
#[doc = "Register `DAC_DHR8R2` reader"]
pub type R = crate::R<DAC_DHR8R2_SPEC>;
#[doc = "Register `DAC_DHR8R2` writer"]
pub type W = crate::W<DAC_DHR8R2_SPEC>;
#[doc = "Field `DACC2DHR` reader - DACC2DHR"]
pub type DACC2DHR_R = crate::FieldReader;
#[doc = "Field `DACC2DHR` writer - DACC2DHR"]
pub type DACC2DHR_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 8, O>;
impl R {
#[doc = "Bits 0:7 - DACC2DHR"]
#[inline(always)]
pub fn dacc2dhr(&self) -> DACC2DHR_R {
DACC2DHR_R::new((self.bits & 0xff) as u8)
}
}
impl W {
#[doc = "Bits 0:7 - DACC2DHR"]
#[inline(always)]
#[must_use]
pub fn dacc2dhr(&mut self) -> DACC2DHR_W<DAC_DHR8R2_SPEC, 0> {
DACC2DHR_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "This register is available only on dual-channel DACs. Refer to Section29.3: DAC implementation.\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dac_dhr8r2::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`dac_dhr8r2::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct DAC_DHR8R2_SPEC;
impl crate::RegisterSpec for DAC_DHR8R2_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`dac_dhr8r2::R`](R) reader structure"]
impl crate::Readable for DAC_DHR8R2_SPEC {}
#[doc = "`write(|w| ..)` method takes [`dac_dhr8r2::W`](W) writer structure"]
impl crate::Writable for DAC_DHR8R2_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets DAC_DHR8R2 to value 0"]
impl crate::Resettable for DAC_DHR8R2_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
extern crate winit;
/// an integer 2d position
#[allow(missing_docs)]
#[derive(PartialEq, Clone, Copy, Debug)]
pub struct Position {
pub x: i64,
pub y: i64,
}
/// an integer 2d delta
pub type Delta = Position;
/// an integer 2d size
#[allow(missing_docs)]
#[derive(PartialEq, Clone, Copy)]
pub struct Size {
pub width: i64,
pub height: i64,
}
impl From<(f64, f64)> for Size {
#[inline]
fn from((width, height): (f64, f64)) -> Self {
Self {
width: width as i64,
height: height as i64,
}
}
}
impl Into<hal::window::Extent2D> for Size {
#[inline]
fn into(self) -> hal::window::Extent2D {
hal::window::Extent2D {
width: self.width as u32,
height: self.height as u32,
}
}
}
impl From<(f64, f64)> for Position {
#[inline]
fn from((x, y): (f64, f64)) -> Self {
Self {
x: x as i64,
y: y as i64,
}
}
}
impl From<winit::dpi::LogicalPosition> for Position {
#[inline]
fn from(pos: winit::dpi::LogicalPosition) -> Self {
Self {
x: pos.x as i64,
y: pos.y as i64,
}
}
}
impl From<winit::event::MouseScrollDelta> for Position {
#[inline]
fn from(pos: winit::event::MouseScrollDelta) -> Self {
match pos {
winit::event::MouseScrollDelta::LineDelta(x, y) => Self {
x: x as i64,
y: y as i64,
},
winit::event::MouseScrollDelta::PixelDelta(d) => Self {
x: d.x as i64,
y: d.y as i64,
},
}
}
}
|
use std::{collections::HashMap, io::ErrorKind, net::SocketAddr, rc::Rc, time::Instant};
use bytes::BytesMut;
use mio::{event::Event, net::UdpSocket, Poll, Token};
use rustls::ClientSession;
use crate::{
config::Opts,
proto::{
TrojanRequest, UdpAssociate, UdpParseResult, MAX_BUFFER_SIZE, MAX_PACKET_SIZE,
UDP_ASSOCIATE,
},
proxy::{
idle_pool::IdlePool, next_index, udp_cache::UdpSvrCache, CHANNEL_CNT, CHANNEL_UDP,
MIN_INDEX,
},
resolver::DnsResolver,
sys,
tls_conn::{ConnStatus, TlsConn},
};
pub struct UdpServer {
udp_listener: UdpSocket,
conns: HashMap<usize, Connection>,
src_map: HashMap<SocketAddr, usize>,
next_id: usize,
recv_buffer: Vec<u8>,
opts: &'static Opts,
}
struct Connection {
index: usize,
src_addr: SocketAddr,
send_buffer: BytesMut,
recv_buffer: BytesMut,
server_conn: TlsConn<ClientSession>,
status: ConnStatus,
client_time: Instant,
socket: Rc<UdpSocket>,
dst_addr: SocketAddr,
bytes_read: usize,
bytes_sent: usize,
opts: &'static Opts,
}
impl UdpServer {
pub fn new(udp_listener: UdpSocket, opts: &'static Opts) -> UdpServer {
UdpServer {
udp_listener,
conns: HashMap::new(),
src_map: HashMap::new(),
next_id: MIN_INDEX,
recv_buffer: vec![0u8; MAX_PACKET_SIZE],
opts,
}
}
pub fn accept(
&mut self,
event: &Event,
poll: &Poll,
pool: &mut IdlePool,
udp_cache: &mut UdpSvrCache,
resolver: &DnsResolver,
) {
if event.is_readable() {
loop {
match sys::recv_from_with_destination(
&self.udp_listener,
self.recv_buffer.as_mut_slice(),
) {
Ok((size, src_addr, dst_addr)) => {
log::debug!(
"udp received {} byte from {} to {}",
size,
src_addr,
dst_addr
);
let index = if let Some(index) = self.src_map.get(&src_addr) {
log::debug!(
"connection:{} already exists for address{}",
index,
src_addr
);
*index
} else {
log::debug!(
"address:{} not found, connecting to {}",
src_addr,
self.opts.back_addr.as_ref().unwrap()
);
if let Some(mut conn) = pool.get(poll, resolver) {
if let Some(socket) = udp_cache.get_socket(dst_addr) {
let index = next_index(&mut self.next_id);
conn.reset_index(
index,
Token(index * CHANNEL_CNT + CHANNEL_UDP),
);
let mut conn =
Connection::new(index, conn, src_addr, socket, self.opts);
if conn.setup(poll) {
let _ = self.conns.insert(index, conn);
self.src_map.insert(src_addr, index);
log::debug!("connection:{} is ready", index);
index
} else {
conn.shutdown(poll);
continue;
}
} else {
conn.shutdown(poll);
continue;
}
} else {
log::error!("allocate connection failed");
continue;
}
};
if let Some(conn) = self.conns.get_mut(&index) {
let payload = &self.recv_buffer.as_slice()[..size];
conn.send_request(payload, &dst_addr);
} else {
log::error!("impossible, connection should be found now");
}
}
Err(err) if err.kind() == ErrorKind::WouldBlock => {
log::debug!("udp server got no more data");
break;
}
Err(err) => {
log::error!("recv from udp listener failed:{}", err);
break;
}
}
}
}
}
pub fn ready(&mut self, event: &Event, poll: &Poll, udp_cache: &mut UdpSvrCache) {
let index = Connection::token2index(event.token());
if let Some(conn) = self.conns.get_mut(&index) {
conn.ready(event, poll, udp_cache);
if conn.destroyed() {
let src_addr = conn.src_addr;
self.conns.remove(&index);
self.src_map.remove(&src_addr);
log::debug!("connection:{} removed from list", index);
}
}
}
}
impl Connection {
fn new(
index: usize,
server_conn: TlsConn<ClientSession>,
src_addr: SocketAddr,
socket: Rc<UdpSocket>,
opts: &'static Opts,
) -> Connection {
let dst_addr = socket.local_addr().unwrap();
Connection {
index,
src_addr,
server_conn,
socket,
dst_addr,
send_buffer: BytesMut::new(),
recv_buffer: BytesMut::new(),
status: ConnStatus::Established,
client_time: Instant::now(),
bytes_read: 0,
bytes_sent: 0,
opts,
}
}
fn token2index(token: Token) -> usize {
token.0 / CHANNEL_CNT
}
fn setup(&mut self, poll: &Poll) -> bool {
self.server_conn.setup(poll);
self.recv_buffer.clear();
TrojanRequest::generate(
&mut self.recv_buffer,
UDP_ASSOCIATE,
self.opts.empty_addr.as_ref().unwrap(),
self.opts,
);
self.server_conn.write_session(self.recv_buffer.as_ref())
}
fn destroyed(&self) -> bool {
self.closed() && self.server_conn.closed()
}
fn closed(&self) -> bool {
matches!(self.status, ConnStatus::Closed)
}
fn index(&self) -> usize {
self.index
}
fn send_request(&mut self, payload: &[u8], dst_addr: &SocketAddr) {
if !self.server_conn.writable() {
log::warn!(
"connection:{} too many packets, drop udp packet",
self.index
);
return;
}
self.bytes_read += payload.len();
self.recv_buffer.clear();
UdpAssociate::generate(&mut self.recv_buffer, dst_addr, payload.len() as u16);
if !self.server_conn.write_session(self.recv_buffer.as_ref())
|| !self.server_conn.write_session(payload)
{
self.status = ConnStatus::Closing;
}
self.try_send_server();
}
fn shutdown(&mut self, poll: &Poll) {
log::debug!("connection:{} shutdown now", self.index());
if self.send_buffer.is_empty() {
self.status = ConnStatus::Closing;
self.check_close(poll);
return;
}
self.status = ConnStatus::Shutdown;
}
fn ready(&mut self, event: &Event, poll: &Poll, udp_cache: &mut UdpSvrCache) {
if event.is_readable() {
self.try_read_server(udp_cache);
}
if event.is_writable() {
self.try_send_server();
}
self.reregister(poll);
self.check_close(poll);
self.server_conn.reregister(poll, self.readable());
self.server_conn.check_close(poll);
if self.closed() && !self.server_conn.closed() {
self.server_conn.shutdown(poll);
} else if !self.closed() && self.server_conn.closed() {
self.shutdown(poll);
}
}
fn readable(&self) -> bool {
self.send_buffer.len() < MAX_BUFFER_SIZE
}
fn check_close(&mut self, poll: &Poll) {
if let ConnStatus::Closing = self.status {
self.close_now(poll);
}
}
fn close_now(&mut self, _: &Poll) {
self.status = ConnStatus::Closed;
let secs = self.client_time.elapsed().as_secs();
log::info!(
"connection:{} address:{} closed, time:{} read {} bytes, send {} bytes",
self.index(),
self.dst_addr,
secs,
self.bytes_read,
self.bytes_sent
);
}
fn reregister(&mut self, _: &Poll) {}
fn try_send_server(&mut self) {
self.server_conn.do_send();
}
fn try_read_server(&mut self, udp_cache: &mut UdpSvrCache) {
if let Some(buffer) = self.server_conn.do_read() {
self.try_send_client(buffer.as_slice(), udp_cache);
}
}
pub fn try_send_client(&mut self, buffer: &[u8], udp_cache: &mut UdpSvrCache) {
if self.send_buffer.is_empty() {
self.do_send_client(buffer, udp_cache);
} else {
self.send_buffer.extend_from_slice(buffer);
let buffer = self.send_buffer.split();
self.do_send_client(buffer.as_ref(), udp_cache);
}
}
fn do_send_udp(&mut self, dst_addr: SocketAddr, data: &[u8], udp_cache: &mut UdpSvrCache) {
if self.dst_addr != dst_addr {
log::warn!(
"connection:{} udp target changed to {}",
self.index,
dst_addr
);
if let Some(socket) = udp_cache.get_socket(dst_addr) {
self.socket = socket;
self.dst_addr = dst_addr;
} else {
return;
}
}
match self.socket.send_to(data, self.src_addr) {
Ok(size) => {
self.bytes_sent += size;
log::debug!(
"send {} bytes upd data from {} to {}",
size,
dst_addr,
self.src_addr
);
if size != data.len() {
log::error!("send {} byte to client fragmented to {}", data.len(), size)
}
}
Err(err) => {
log::error!(
"send udp data from {} to {} failed {}",
dst_addr,
self.src_addr,
err
);
}
}
}
fn do_send_client(&mut self, mut buffer: &[u8], udp_cache: &mut UdpSvrCache) {
loop {
match UdpAssociate::parse(buffer) {
UdpParseResult::Continued => {
self.send_buffer.extend_from_slice(buffer);
break;
}
UdpParseResult::Packet(packet) => {
let payload = &packet.payload[..packet.length];
self.do_send_udp(packet.address, payload, udp_cache);
buffer = &packet.payload[packet.length..];
}
UdpParseResult::InvalidProtocol => {
log::error!("connection:{} got invalid protocol", self.index());
self.status = ConnStatus::Closing;
break;
}
}
}
if let ConnStatus::Shutdown = self.status {
if self.send_buffer.is_empty() {
self.status = ConnStatus::Closing;
log::debug!("connection:{} is closing for no data to send", self.index);
}
}
}
}
|
use ggez::{
GameResult, Context,
graphics::Color,
nalgebra::Point2,
input::mouse::MouseButton
};
use crate::utils::loading_screen;
use crate::text::Text;
use crate::button::Button;
use std::collections::HashMap;
use serde::Deserialize;
use urlencoding::encode;
use std:: { fs, env };
#[derive(Deserialize, Debug)]
struct ApiResponse {
response: Response
}
#[derive(Deserialize, Debug)]
struct Response {
success: String,
responses: Vec<FetchResponse>
}
#[derive(Deserialize, Debug)]
struct FetchResponse {
success: String,
data: String
}
enum MenuState {
Main,
Stats,
Settings
}
#[derive(PartialEq, Eq, Hash)]
enum ButtonType {
Back,
Play,
Stats,
Settings,
Quit
}
pub struct Menu {
state: MenuState,
texts: HashMap<String, Text>,
buttons: HashMap<ButtonType, Button>
}
const BUTTON_WIDTH: f32 = 400.0;
const BUTTON_HEIGHT: f32 = 100.0;
const SPACING: f32 = 40.0;
impl Menu {
pub fn new(ctx: &mut Context, screen_size: (f32, f32)) -> GameResult<Menu> {
let font_path = "/Fonts/arial_narrow_7.ttf".to_string();
let mut title_text = Text::new(ctx, String::from("B-Hunt"), font_path.clone(), 200.0, Color::new(1.0, 1.0, 1.0, 1.0))?;
title_text.set_pos(Point2::new(screen_size.0 / 2.0 - title_text.width(ctx) / 2.0,
screen_size.1 / 3.5 - title_text.height(ctx) / 2.0));
let mut stats_text = Text::new(ctx, String::from(""), font_path.clone(), 40.0, Color::new(1.0, 1.0, 1.0, 1.0))?;
stats_text.set_pos(Point2::new(100.0, 100.0));
let color_not_hover = Color::from_rgb(255, 255, 255);
let color_hover = Color::from_rgb(160, 160, 160);
let play_button = Button::new(ctx, BUTTON_WIDTH, BUTTON_HEIGHT, screen_size.0 / 2.0 - BUTTON_WIDTH - SPACING,
screen_size.1 / 1.75 - BUTTON_HEIGHT / 2.0, color_not_hover, color_hover, 10.0, "Jouer".to_string())?;
let stats_button = Button::new(ctx, BUTTON_WIDTH, BUTTON_HEIGHT, screen_size.0 / 4.0 - BUTTON_WIDTH - SPACING,
screen_size.1 / 1.3 - BUTTON_HEIGHT / 2.0, color_not_hover, color_hover, 10.0, "Statistiques".to_string())?;
let back_button = Button::new(ctx, BUTTON_WIDTH, BUTTON_HEIGHT, screen_size.0 / 2.0 - BUTTON_WIDTH / 2.0,
screen_size.1 / 1.3 - BUTTON_HEIGHT / 2.0, color_not_hover, color_hover, 10.0, "Retour".to_string())?;
let quit_button = Button::new(ctx, BUTTON_WIDTH, BUTTON_HEIGHT, screen_size.0 / 2.0 + SPACING,
screen_size.1 / 1.3 - BUTTON_HEIGHT / 2.0, color_not_hover, color_hover, 10.0, "Quitter".to_string())?;
let set_button = Button::new(ctx, BUTTON_WIDTH, BUTTON_HEIGHT, screen_size.0 / 2.0 + SPACING,
screen_size.1 / 1.75 - BUTTON_HEIGHT / 2.0, color_not_hover, color_hover, 10.0, "Options".to_string())?;
let mut buttons = HashMap::new();
buttons.insert(ButtonType::Play, play_button);
buttons.insert(ButtonType::Stats, stats_button);
buttons.insert(ButtonType::Back, back_button);
buttons.insert(ButtonType::Quit, quit_button);
buttons.insert(ButtonType::Settings, set_button);
let mut texts = HashMap::new();
texts.insert("title".to_string(), title_text);
texts.insert("stats".to_string(), stats_text);
let menu = Menu {
state: MenuState::Main,
texts: texts,
buttons: buttons
};
Ok(menu)
}
pub fn update(&mut self) {
}
pub fn draw(&self, ctx: &mut Context) -> GameResult {
match self.state {
MenuState::Main => {
self.texts[&"title".to_string()].draw(ctx)?;
self.buttons[&ButtonType::Play].draw(ctx)?;
self.buttons[&ButtonType::Stats].draw(ctx)?;
self.buttons[&ButtonType::Quit].draw(ctx)?;
self.buttons[&ButtonType::Settings].draw(ctx)?;
},
MenuState::Stats => {
self.buttons[&ButtonType::Back].draw(ctx)?;
self.texts[&"stats".to_string()].draw(ctx)?;
},
MenuState::Settings => {
self.buttons[&ButtonType::Back].draw(ctx)?;
}
}
Ok(())
}
pub fn mouse_motion_event(&mut self, ctx: &mut Context, x: f32, y: f32) {
for (_, button) in self.buttons.iter_mut() {
button.mouse_motion_event(ctx, x, y);
}
}
pub fn mouse_button_down_event(&mut self, ctx: &mut Context, x: f32, y: f32,
mouse_button: MouseButton, screen_size: (f32, f32)) -> u8
{
if let MouseButton::Left = mouse_button {
for (which, button) in self.buttons.iter() {
if button.contains(x, y) {
match self.state {
MenuState::Main => {
match which {
ButtonType::Play => {
self.texts.get_mut(&"stats".to_string()).unwrap().set_string(String::from(""));
return 1;
},
ButtonType::Stats => {
if self.texts[&"stats".to_string()].contents() == "" {
loading_screen(ctx, screen_size);
self.get_stats();
}
self.state = MenuState::Stats;
break;
},
ButtonType::Quit => {
ggez::event::quit(ctx);
},
ButtonType::Settings => {
self.state = MenuState::Settings;
}
_ => {}
}
},
MenuState::Stats | MenuState::Settings => {
match which {
ButtonType::Back => {
self.state = MenuState::Main;
break;
},
_ => {}
}
}
}
}
}
}
0
}
fn get_stats(&mut self) {
let mut error_message = String::from("");
// Get the user info in the .gj_credentials file
let mut user_info = String::from("");
match fs::read_to_string(".gj-credentials") {
Ok(info) => user_info = info,
Err(e) => error_message = format!("Erreur : {}", e)
}
let user_info: Vec<&str> = user_info.split('\n').collect();
let mut username = String::from("");
let mut user_token = String::from("");
if user_info.len() < 2 {
error_message = "Vous n'avez pas lancé le jeu\navec le client GameJolt donc vous\n\
ne pouvez pas voir vos statistiqes.".to_string();
} else {
username = user_info[1].to_string();
user_token = user_info[2].to_string();
}
// Get the useful infos to access the GameJolt API
let mut game_id = String::from(""); let mut private_key = String::from("");
match env::var("GAME_ID") {
Ok(id) => game_id = id,
Err(_) => error_message = String::from("Variable manquante (GAME_ID)")
}
match env::var("PRIVATE_KEY") {
Ok(key) => private_key = key,
Err(_) => error_message = String::from("Variable manquante (PRIVATE_KEY)")
}
let keys: [&str; 6] = ["bertrand_killed", "shots", "powerups_activated",
"hits_taken", "time_played", "games_played"];
let mut api_url = format!("https://api.gamejolt.com/api/game/v1_2/batch?game_id={}", game_id);
for key in keys.iter() {
let mut url = format!("/data-store/?game_id={}&key={}&username={}&user_token={}",
game_id, key, username, user_token);
let mut hasher = sha1::Sha1::new();
hasher.update(format!("{}{}", url, private_key).as_bytes());
let signature = hasher.digest().to_string();
url = format!("{}&signature={}", url, signature);
url = encode(&url);
api_url = format!("{}&requests[]={}", api_url, url);
}
let mut hasher = sha1::Sha1::new();
hasher.update(format!("{}¶llel=true{}", api_url, private_key).as_bytes());
let signature = hasher.digest().to_string();
api_url = format!("{}¶llel=true&signature={}", api_url, signature);
let res;
match reqwest::blocking::get(&api_url) {
Ok(r) => res = r,
Err(e) => { self.texts.get_mut(&"stats".to_string()).unwrap().set_string(format!("Erreur : {}", e)); return; }
}
let response: ApiResponse;
match res.json() {
Ok(r) => response = r,
Err(e) => { self.texts.get_mut(&"stats".to_string()).unwrap().set_string(format!("Erreur : {}", e)); return; }
}
let mut stats;
if error_message != "" {
stats = error_message;
} else {
stats = format!("Statistiques pour {} :\n\n", username);
for (i, data) in response.response.responses.iter().enumerate() {
if i >= keys.len() { break; }
let stat_name = match keys[i] {
"bertrand_killed" => "Nombre d'ennemies tués",
"shots" => "Nombre de coup tirés",
"powerups_activated" => "Nombre de powerups activés",
"hits_taken" => "Nombre de coups pris",
"time_played" => "Temps joué (en secondes)",
"games_played" => "Nombre de parties jouées",
_ => unreachable!()
};
if data.success == "true" {
stats = format!("{}{} : {}\n", stats, stat_name, data.data);
}
}
}
self.texts.get_mut(&"stats".to_string()).unwrap().set_string(stats);
}
pub fn resize_event(&mut self, ctx: &mut Context, width: f32, height: f32) {
let title_text = self.texts.get_mut(&"title".to_string()).unwrap();
title_text.set_pos(Point2::new(width / 2.0 - title_text.width(ctx) / 2.0,
height / 3.5 - title_text.height(ctx) / 2.0));
let stats_text = self.texts.get_mut(&"stats".to_string()).unwrap();
stats_text.set_pos(Point2::new(100.0, 100.0));
let play_button = self.buttons.get_mut(&ButtonType::Play).unwrap();
play_button.set_pos(ctx, width / 2.0 - BUTTON_WIDTH - SPACING, height / 1.75 - BUTTON_HEIGHT / 2.0);
let stats_button = self.buttons.get_mut(&ButtonType::Stats).unwrap();
stats_button.set_pos(ctx, width / 2.0 - BUTTON_WIDTH - SPACING, height / 1.3 - BUTTON_HEIGHT / 2.0);
let back_button = self.buttons.get_mut(&ButtonType::Back).unwrap();
back_button.set_pos(ctx, width / 2.0 - BUTTON_WIDTH / 2.0, height / 1.3 - BUTTON_HEIGHT / 2.0);
let set_button = self.buttons.get_mut(&ButtonType::Settings).unwrap();
set_button.set_pos(ctx, width / 2.0 + SPACING, height / 1.75 - BUTTON_HEIGHT / 2.0);
let quit_button = self.buttons.get_mut(&ButtonType::Quit).unwrap();
quit_button.set_pos(ctx, width / 2.0 + SPACING, height / 1.3 - BUTTON_HEIGHT / 2.0);
}
}
|
use std::fmt;
use std::iter::Fuse;
use std::ops::RangeInclusive;
use std::vec;
use serde_derive::Serialize;
use super::ihex16::{IHex16File, IHex16Word};
#[derive(Copy, Clone, Serialize)]
#[serde(tag = "type")]
pub enum IHex16Diff {
#[serde(rename(serialize = "single"))]
Single {
address: u32,
value_1: u32,
value_2: u32,
},
#[serde(rename(serialize = "range"))]
Range {
start: u32,
end: u32,
value_1: u32,
value_2: u32,
},
}
impl IHex16Diff {
fn range(start: u32, end: u32, value_1: u32, value_2: u32) -> IHex16Diff {
IHex16Diff::Range {
start,
end,
value_1,
value_2,
}
}
fn single(address: u32, value_1: u32, value_2: u32) -> IHex16Diff {
IHex16Diff::Single {
address,
value_1,
value_2,
}
}
pub fn is_same(&self) -> bool {
match self {
IHex16Diff::Single {
address: _,
value_1,
value_2,
} => value_1 == value_2,
IHex16Diff::Range {
start: _,
end: _,
value_1,
value_2,
} => value_1 == value_2,
}
}
pub fn is_diff(&self) -> bool {
match self {
IHex16Diff::Single {
address: _,
value_1,
value_2,
} => value_1 != value_2,
IHex16Diff::Range {
start: _,
end: _,
value_1,
value_2,
} => value_1 != value_2,
}
}
pub fn in_range(&self, range: &RangeInclusive<u32>) -> bool {
match self {
IHex16Diff::Single {
address,
value_1: _,
value_2: _,
} => range.contains(address),
IHex16Diff::Range {
start,
end,
value_1: _,
value_2: _,
} => range.contains(start) && range.contains(end),
}
}
}
impl fmt::Display for IHex16Diff {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
match self {
IHex16Diff::Single {
address,
value_1,
value_2,
} => write!(f, "{:06X} {:06X} {:06X}", address, value_1, value_2),
IHex16Diff::Range {
start,
end,
value_1,
value_2,
} => write!(
f,
"{:06X} {:06X} {:06X} {:06X}",
start, end, value_1, value_2
),
}
}
}
pub struct IHex16DiffEngine {
hex_1: Fuse<vec::IntoIter<IHex16Word>>,
hex_2: Fuse<vec::IntoIter<IHex16Word>>,
address: u32,
curr_1: Option<IHex16Word>,
curr_2: Option<IHex16Word>,
}
impl IHex16DiffEngine {
pub fn diff(hex_1: IHex16File, hex_2: IHex16File) -> IHex16DiffEngine {
let mut hex_1 = hex_1.0.into_iter().fuse();
let mut hex_2 = hex_2.0.into_iter().fuse();
let curr_1 = hex_1.next();
let curr_2 = hex_2.next();
IHex16DiffEngine {
hex_1,
hex_2,
address: 0,
curr_1,
curr_2,
}
}
fn compare(&self) -> Option<(u32, u32, u32)> {
match (self.curr_1, self.curr_2) {
(None, None) => None,
(Some(l), None) => Some((l.address, l.value, 0xFFFFFF)),
(None, Some(r)) => Some((r.address, 0xFFFFFF, r.value)),
(Some(l), Some(r)) => {
if l.address == r.address {
Some((l.address, l.value, r.value))
} else if l.address < r.address {
Some((l.address, l.value, 0xFFFFFF))
} else {
Some((r.address, 0xFFFFFF, r.value))
}
}
}
}
fn advance(&mut self) {
match (self.curr_1, self.curr_2) {
(None, None) => {
return;
}
(Some(_), None) => {
self.curr_1 = self.hex_1.next();
}
(None, Some(_)) => {
self.curr_2 = self.hex_2.next();
}
(Some(l), Some(r)) => {
if l.address <= r.address {
self.curr_1 = self.hex_1.next();
}
if r.address <= l.address {
self.curr_2 = self.hex_2.next();
}
}
}
}
}
impl Iterator for IHex16DiffEngine {
type Item = IHex16Diff;
fn next(&mut self) -> Option<Self::Item> {
let (address, value_1, value_2) = self.compare()?;
let mut next_address = address;
let mut next_value_1 = value_1;
let mut next_value_2 = value_2;
if address > self.address {
next_value_1 = 0xFFFFFF;
next_value_2 = 0xFFFFFF;
}
while next_value_1 == value_1 && next_value_2 == value_2 {
self.advance();
if let Some((na, nv1, nv2)) = self.compare() {
if (na - 4) != next_address {
if next_value_1 == 0xFFFFFF && next_value_2 == 0xFFFFFF {
next_address = na - 4;
} else {
break;
}
}
next_address += 4;
next_value_1 = nv1;
next_value_2 = nv2;
} else {
break;
}
}
if address == next_address - 4 {
let output = IHex16Diff::single(address / 2, value_1, value_2);
self.address += 4;
Some(output)
} else {
let output = IHex16Diff::range(address / 2, (next_address - 4) / 2, value_1, value_2);
self.address = next_address;
Some(output)
}
}
}
|
extern crate clap;
extern crate fuse;
extern crate secstr;
extern crate colorhash256;
extern crate interactor;
extern crate rusterpassword;
extern crate ansi_term;
extern crate rustc_serialize;
extern crate cbor;
extern crate freepass_core;
mod util;
mod openfile;
mod interact;
mod mergein;
use std::{env, fs};
use clap::{Arg, App, SubCommand};
use openfile::*;
use freepass_core::{import, vault};
fn main() {
let matches = App::new("freepass")
.version(env!("CARGO_PKG_VERSION"))
.author("Greg V <greg@unrelenting.technology>")
.about("The free password manager for power users")
.arg(Arg::with_name("FILE").short("f").long("file").takes_value(true)
.help("The vault file to use, by default: $FREEPASS_FILE"))
.arg(Arg::with_name("NAME").short("n").long("name").takes_value(true)
.help("The user name to use (must be always the same for a vault file!), by default: $FREEPASS_NAME"))
.arg(Arg::with_name("DEBUG").long("debug")
.help("Enable logging of data structures for debugging (DO NOT USE ON YOUR REAL DATA)"))
.subcommand(SubCommand::with_name("interact")
.about("Launches interactive mode"))
.subcommand(SubCommand::with_name("mergein")
.about("Adds entires from a second file (possibly importing from a foreign format) that don't exist in the first file (e.g. to resolve file sync conflicts)")
.arg(Arg::with_name("IMPORTTYPE").short("i").long("import").takes_value(true)
.help("If you want to import from a foreign file format instead of merging a second freepass vault, the format of that file. Supported: kdbx"))
.arg(Arg::with_name("SECONDFILE").short("F").long("secondfile").takes_value(true)
.help("The vault file to get additional entries from, by default: $FREEPASS_SECOND_FILE"))
.arg(Arg::with_name("SECONDNAME").short("N").long("secondname").takes_value(true)
.help("The user name to use for the second file, by default: $FREEPASS_SECOND_NAME or the first file name")))
.get_matches();
let file_path = unwrap_for_opt(opt_or_env(&matches, "FILE", "FREEPASS_FILE"), "file");
let user_name = unwrap_for_opt(opt_or_env(&matches, "NAME", "FREEPASS_NAME"), "name");
let debug = matches.is_present("DEBUG");
freepass_core::init();
// Ensure we can write! Maybe someone somewhere would want to open the vault in read-only mode...
// But the frustration of trying to save the vault while only having read permissions would be worse.
let mut open_file = OpenFile::open(file_path, &user_name, util::read_password(), true);
if debug {
util::debug_output(&open_file.vault.data, "Vault");
}
match matches.subcommand() {
("mergein", submatches_opt) => {
if let Some(submatches) = submatches_opt {
let second_file_path = unwrap_for_opt(opt_or_env(submatches, "SECONDFILE", "FREEPASS_SECOND_FILE"), "secondfile");
let second_vault : Box<vault::Vault> = match submatches.value_of("IMPORTTYPE") {
Some("kdbx") => {
let mut second_file = match fs::OpenOptions::new().read(true).open(&second_file_path) {
Ok(file) => file,
Err(ref err) => panic!("Could not open file {}: {}", &second_file_path, err),
};
Box::new(import::kdbx(&mut second_file, &util::read_password()).expect("Could not read the file as kdbx"))
},
Some(x) => panic!("Unsupported import format {}", x),
None => {
let second_user_name = opt_or_env(submatches, "SECONDNAME", "FREEPASS_SECOND_NAME").unwrap_or(user_name);
let second_open_file = OpenFile::open(second_file_path, &second_user_name, util::read_password(), false);
if debug {
util::debug_output(&second_open_file.vault.data, "Second Vault");
}
Box::new(second_open_file.vault)
}
};
mergein::merge_in(&mut open_file.vault, &*second_vault);
open_file.save();
} else { panic!("No options for mergein") }
},
("interact", _) | _ => interact::interact_entries(&mut open_file, debug),
}
}
fn opt_or_env(matches: &clap::ArgMatches, opt_name: &str, env_name: &str) -> Option<String> {
matches.value_of(opt_name).map(|x| x.to_owned()).or(env::var_os(env_name).and_then(|s| s.into_string().ok()))
}
fn unwrap_for_opt(opt: Option<String>, name: &str) -> String {
match opt {
Some(s) => s,
None => panic!("Option {} not found", name)
}
}
|
use aoc_runner_derive::{aoc, aoc_generator};
use std::num::ParseIntError;
#[aoc_generator(day10)]
fn parse_input_day10(input: &str) -> Result<Vec<usize>, ParseIntError> {
let mut adapters = input
.lines()
.map(|n| n.parse().unwrap_or(0))
.collect::<Vec<_>>();
adapters.sort_unstable();
Ok(adapters)
}
#[aoc(day10, part1)]
fn part1(adapters: &[usize]) -> usize {
let adapters = adapters.to_vec();
let result = adapters.iter().fold((0, 0, 0), |(one, three, prev), &v| {
let delta = v - prev;
if delta == 1 {
(one + 1, three, v)
} else if delta == 3 {
(one, three + 1, v)
} else {
(one, three, prev)
}
});
result.0 * (result.1 + 1)
}
#[aoc(day10, part2)]
fn part2(adapters: &[usize]) -> usize {
let max = adapters.iter().max().unwrap_or(&0).to_owned();
let mut count = vec![0; max + 1];
adapters
.iter()
.filter(|&&a| a <= 3)
.for_each(|&a| count[a] = 1);
adapters.iter().for_each(|a| {
(1..=3)
.filter(|i| a > i && adapters.contains(&(a - i)))
.for_each(|i| count[*a] += count[a - i])
});
count[max]
}
|
use fs::Resource;
use alloc::boxed::Box;
use system::error::Result;
pub fn resource() -> Result<Box<Resource>> {
Ok(Box::new(SyslogResource {
pos: 0
}))
}
/// The kernel log resource.
pub struct SyslogResource {
pos: usize
}
impl Resource for SyslogResource {
fn dup(&self) -> Result<Box<Resource>> {
Ok(Box::new(SyslogResource {
pos: self.pos
}))
}
/// Fills `buf` with the kernel log. Each message is prefixed by its log level:
/// - `CRIT`
/// - `ERROR`
/// - `WARN`
/// - `INFO`
/// - `DEBUG`
fn read(&mut self, buf: &mut [u8]) -> Result<usize> {
let log = unsafe { & *::env().log.get() };
let count = log.read_at(self.pos, buf);
self.pos += count;
Ok(count)
}
}
|
fn gcd(mut n: u64, mut m: u64) -> u64 {
assert!(n != 0 && m != 0);
while m != 0 {
if m < n {
let t = m;
m = n;
n = t;
}
m = m % n;
}
n
}
fn main() {
let n = gcd(12, 16);
println!("greatest common divisor of 12 and 16 is {}", n);
}
|
use support::{decl_storage, decl_module, StorageValue, dispatch::Result, StorageMap};
use system::ensure_signed;
use parity_codec::{Encode, Decode};
use runtime_primitives::traits::{As, Hash};
pub trait Trait: balances::Trait {}
#[derive(Encode, Decode, Default, Clone, PartialEq)]
#[cfg_attr(feature = "std", derive(Debug))]
pub struct Kitty<Hash, Balance> {
id: Hash,
dna: Hash,
price: Balance,
gen: u64
}
decl_storage! {
trait Store for Module<T: Trait> as KittyStorage {
Kitties: map T::Hash => Kitty<T::Hash, T::Balance>;
//Kitties: map T::Hash => Kitty<T::Hash, T::Balance>;
KittyOwner: map T::Hash => T::AccountId;
OwnerKitty: map T::AccountId => T::Hash;
Nonce: u64;
}
}
decl_module! {
pub struct Module<T: Trait> for enum Call where origin: T::Origin {
fn create_cat(origin) -> Result {
let _sender = ensure_signed(origin)?;
let nonce = <Nonce<T>>::get();
let random_seed = <system::Module<T>>::random_seed();
let random_hash = (random_seed, &_sender, nonce).using_encoded(<T as system::Trait>::Hashing::hash);
<Nonce<T>>::mutate(|n| *n += 1);
let hash_of_zero = <T as system::Trait>::Hashing::hash_of(&0);
let zero_balance = <T::Balance as As<u64>>::sa(0);
let cat = Kitty {
id: random_hash,
dna: random_hash,
price: zero_balance,
gen: 0,
};
<KittyOwner<T>>::insert(&random_hash, &_sender );
<Kitties<T>>::insert(&random_hash, cat);
<OwnerKitty<T>>::insert(&_sender, &random_hash);
Ok(())
}
// Declare public functions here
}
} |
extern crate log;
extern crate png;
extern crate vk_sys as vk;
extern crate vulkano;
pub mod debug;
pub mod engine;
pub mod figure;
pub mod frame;
pub mod scene;
|
use bytes::Bytes;
use crate::error::Result;
use crate::node::NodeCollection;
use crate::node_types::StandardType;
use crate::reader::Reader;
pub struct Printer;
impl Printer {
pub fn run(input: impl Into<Bytes>) -> Result<()> {
let mut reader = Reader::new(input.into())?;
let mut nodes = Vec::new();
let mut definitions = Vec::new();
while let Ok(def) = reader.read_node_definition() {
trace!("definition: {:?}", def);
let node_type = def.node_type;
let key = match def.key() {
Ok(v) => v,
Err(e) => {
error!("error processing key for definition {:?}: {}", def, e);
None
},
};
nodes.push((node_type, def.is_array, key));
definitions.push(def);
if node_type == StandardType::FileEnd {
break;
}
}
let mut indent = 0;
for (node_type, is_array, identifier) in nodes {
eprint!(
"{:indent$} - {:?} (is_array: {}",
"",
node_type,
is_array,
indent = indent
);
if let Some(identifier) = identifier {
eprint!(", identifier: {}", identifier);
}
eprintln!(")");
match node_type {
StandardType::Attribute => {},
StandardType::NodeEnd => indent -= 2,
_ => indent += 2,
};
}
let collection: Option<NodeCollection> = definitions.into_iter().collect();
match collection {
Some(collection) => eprintln!("collection: {:#}", collection),
None => eprintln!("collection: {:?}", collection),
};
Ok(())
}
}
|
pub fn process(input: String) -> String {
return input.chars().rev().collect::<String>();
}
|
use std::ptr;
pub struct FrameBuffer {
frame_buffer: u32,
render_buffer: u32,
texture_color_buffer: u32,
}
impl FrameBuffer {
pub fn new(width: u32, height: u32) -> FrameBuffer {
let mut frame_buffer: u32 = 0;
let mut render_buffer: u32 = 0;
let mut texture_color_buffer: u32 = 0;
unsafe {
gl::GenFramebuffers(1, &mut frame_buffer);
gl::BindFramebuffer(gl::FRAMEBUFFER, frame_buffer);
// init a color attachment texture
gl::GenTextures(1, &mut texture_color_buffer);
gl::BindTexture(gl::TEXTURE_2D, texture_color_buffer);
gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_MIN_FILTER, gl::LINEAR as i32);
gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_MAG_FILTER, gl::LINEAR as i32);
gl::TexImage2D(
gl::TEXTURE_2D,
0,
gl::RGB as i32,
width as i32,
height as i32,
0,
gl::RGB,
gl::UNSIGNED_BYTE,
ptr::null(),
);
gl::FramebufferTexture2D(
gl::FRAMEBUFFER,
gl::COLOR_ATTACHMENT0,
gl::TEXTURE_2D,
texture_color_buffer,
0,
);
gl::BindTexture(gl::TEXTURE_2D, 0);
// init render buffer object
gl::GenRenderbuffers(1, &mut render_buffer);
gl::BindRenderbuffer(gl::RENDERBUFFER, render_buffer);
gl::RenderbufferStorage(
gl::RENDERBUFFER,
gl::DEPTH_COMPONENT24,
width as i32,
height as i32,
);
gl::FramebufferRenderbuffer(
gl::FRAMEBUFFER,
gl::DEPTH_ATTACHMENT,
gl::RENDERBUFFER,
render_buffer,
);
gl::BindRenderbuffer(gl::RENDERBUFFER, 0);
// check frame buffer status
if gl::CheckFramebufferStatus(gl::FRAMEBUFFER) != gl::FRAMEBUFFER_COMPLETE {
println!("error: frame buffer is not complete");
}
// bind default frame buffer
gl::BindFramebuffer(gl::FRAMEBUFFER, 0);
}
FrameBuffer {
frame_buffer: frame_buffer,
render_buffer: render_buffer,
texture_color_buffer: texture_color_buffer,
}
}
pub fn bind_as_frame_buffer(&self) {
unsafe {
gl::BindFramebuffer(gl::FRAMEBUFFER, self.frame_buffer);
}
}
pub fn bind_as_texture(&self) {
unsafe {
gl::BindTexture(gl::TEXTURE_2D, self.texture_color_buffer);
}
}
}
impl Drop for FrameBuffer {
fn drop(&mut self) {
unsafe {
if 0 != self.frame_buffer {
gl::DeleteFramebuffers(1, &self.frame_buffer);
self.frame_buffer = 0;
}
if 0 != self.texture_color_buffer {
gl::DeleteTextures(1, &self.texture_color_buffer);
self.texture_color_buffer = 0;
}
if 0 != self.render_buffer {
gl::DeleteRenderbuffers(1, &self.render_buffer);
self.render_buffer = 0;
}
}
}
} |
fn main() {
let n = read::<i32>();
let str = match n % 10 {
2 | 4 | 5 | 7 | 9 => "hon",
0 | 1 | 6 | 8 => "pon",
3 => "bon",
_ => panic!("unreachable")
};
println!("{}", str);
}
fn read<T: std::str::FromStr>() -> T {
let mut s = String::new();
std::io::stdin().read_line(&mut s).ok();
s.trim().parse().ok().unwrap()
} |
pub struct GuiSettings {
pub(crate) show_fps: bool,
pub(crate) show_graph_stats: bool,
}
impl std::default::Default for GuiSettings {
fn default() -> Self {
Self {
show_fps: false,
show_graph_stats: false,
}
}
}
impl GuiSettings {
pub fn ui(&mut self, ui: &mut egui::Ui) {
ui.checkbox(&mut self.show_fps, "Display FPS");
ui.checkbox(&mut self.show_graph_stats, "Display graph stats");
}
}
|
use std::hash::Hash;
use serde::{Deserialize, Serialize};
use crate::evaluation::Evaluation;
#[derive(Serialize, Deserialize, Debug)]
pub struct Blunder {
pub position: String,
pub move_: String,
pub eval_before: Evaluation,
pub eval_after: Evaluation,
}
impl PartialEq for Blunder {
fn eq(&self, other: &Self) -> bool {
self.position == other.position
}
}
impl Eq for Blunder {
fn assert_receiver_is_total_eq(&self) {}
}
impl Hash for Blunder {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.position.hash(state);
}
}
|
use std::fs;
use test_case::test_case;
use fnv::FnvHashMap;
use std::collections::{HashMap, HashSet};
use tf_demo_parser::demo::packet::datatable::{ParseSendTable, SendTableName, ServerClass};
use tf_demo_parser::demo::parser::MessageHandler;
use tf_demo_parser::demo::sendprop::{SendPropIdentifier, SendPropName};
use tf_demo_parser::{Demo, DemoParser, MessageType, ParserState};
#[derive(Default)]
pub struct SendPropAnalyser {
tables: Vec<ParseSendTable>,
prop_names: FnvHashMap<SendPropIdentifier, (SendTableName, SendPropName)>,
}
impl SendPropAnalyser {
pub fn new() -> Self {
SendPropAnalyser::default()
}
}
impl MessageHandler for SendPropAnalyser {
type Output = (
Vec<ParseSendTable>,
FnvHashMap<SendPropIdentifier, (SendTableName, SendPropName)>,
);
fn does_handle(_message_type: MessageType) -> bool {
false
}
fn handle_data_tables(
&mut self,
tables: &[ParseSendTable],
_server_classes: &[ServerClass],
_parser_state: &ParserState,
) {
for table in tables {
for prop_def in &table.props {
self.prop_names.insert(
prop_def.identifier(),
(table.name.clone(), prop_def.name.clone()),
);
}
}
self.tables = tables.to_vec()
}
fn into_output(self, _state: &ParserState) -> Self::Output {
(self.tables, self.prop_names)
}
}
#[test_case("test_data/gully.dem", "test_data/gully_props.json"; "gully.dem")]
fn flatten_test(input_file: &str, snapshot_file: &str) {
let file = fs::read(input_file).expect("Unable to read file");
let demo = Demo::new(&file);
let (_, (send_tables, prop_names)) =
DemoParser::new_with_analyser(demo.get_stream(), SendPropAnalyser::new())
.parse()
.expect("Failed to parse");
let flat_props: HashMap<SendTableName, Vec<String>> = send_tables
.iter()
.map(|table| {
(
table.name.clone(),
table
.flatten_props(&send_tables)
.unwrap()
.into_iter()
.map(|prop| {
let (table_name, prop_name) = &prop_names[&prop.identifier];
format!("{}.{}", table_name, prop_name)
})
.collect(),
)
})
.collect();
let expected: HashMap<SendTableName, Vec<String>> = serde_json::from_slice(
fs::read(snapshot_file)
.expect("Unable to read file")
.as_slice(),
)
.unwrap();
let expected_tables: HashSet<_> = expected.keys().collect();
let actual_tables: HashSet<_> = flat_props.keys().collect();
pretty_assertions::assert_eq!(expected_tables, actual_tables);
for table in expected_tables {
pretty_assertions::assert_eq!(expected[table], flat_props[table]);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.